repo_name
stringlengths 5
100
| path
stringlengths 4
299
| copies
stringclasses 990
values | size
stringlengths 4
7
| content
stringlengths 666
1.03M
| license
stringclasses 15
values | hash
int64 -9,223,351,895,964,839,000
9,223,297,778B
| line_mean
float64 3.17
100
| line_max
int64 7
1k
| alpha_frac
float64 0.25
0.98
| autogenerated
bool 1
class |
---|---|---|---|---|---|---|---|---|---|---|
dstftw/youtube-dl | youtube_dl/extractor/imgur.py | 20 | 5082 | from __future__ import unicode_literals
import re
from .common import InfoExtractor
from ..utils import (
int_or_none,
js_to_json,
mimetype2ext,
ExtractorError,
)
class ImgurIE(InfoExtractor):
_VALID_URL = r'https?://(?:i\.)?imgur\.com/(?!(?:a|gallery|(?:t(?:opic)?|r)/[^/]+)/)(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'https://i.imgur.com/A61SaA1.gifv',
'info_dict': {
'id': 'A61SaA1',
'ext': 'mp4',
'title': 're:Imgur GIF$|MRW gifv is up and running without any bugs$',
},
}, {
'url': 'https://imgur.com/A61SaA1',
'only_matching': True,
}, {
'url': 'https://i.imgur.com/crGpqCV.mp4',
'only_matching': True,
}, {
# no title
'url': 'https://i.imgur.com/jxBXAMC.gifv',
'only_matching': True,
}]
def _real_extract(self, url):
video_id = self._match_id(url)
webpage = self._download_webpage(
'https://i.imgur.com/{id}.gifv'.format(id=video_id), video_id)
width = int_or_none(self._og_search_property(
'video:width', webpage, default=None))
height = int_or_none(self._og_search_property(
'video:height', webpage, default=None))
video_elements = self._search_regex(
r'(?s)<div class="video-elements">(.*?)</div>',
webpage, 'video elements', default=None)
if not video_elements:
raise ExtractorError(
'No sources found for video %s. Maybe an image?' % video_id,
expected=True)
formats = []
for m in re.finditer(r'<source\s+src="(?P<src>[^"]+)"\s+type="(?P<type>[^"]+)"', video_elements):
formats.append({
'format_id': m.group('type').partition('/')[2],
'url': self._proto_relative_url(m.group('src')),
'ext': mimetype2ext(m.group('type')),
'width': width,
'height': height,
'http_headers': {
'User-Agent': 'youtube-dl (like wget)',
},
})
gif_json = self._search_regex(
r'(?s)var\s+videoItem\s*=\s*(\{.*?\})',
webpage, 'GIF code', fatal=False)
if gif_json:
gifd = self._parse_json(
gif_json, video_id, transform_source=js_to_json)
formats.append({
'format_id': 'gif',
'preference': -10,
'width': width,
'height': height,
'ext': 'gif',
'acodec': 'none',
'vcodec': 'gif',
'container': 'gif',
'url': self._proto_relative_url(gifd['gifUrl']),
'filesize': gifd.get('size'),
'http_headers': {
'User-Agent': 'youtube-dl (like wget)',
},
})
self._sort_formats(formats)
return {
'id': video_id,
'formats': formats,
'title': self._og_search_title(webpage, default=video_id),
}
class ImgurGalleryIE(InfoExtractor):
IE_NAME = 'imgur:gallery'
_VALID_URL = r'https?://(?:i\.)?imgur\.com/(?:gallery|(?:t(?:opic)?|r)/[^/]+)/(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'http://imgur.com/gallery/Q95ko',
'info_dict': {
'id': 'Q95ko',
'title': 'Adding faces make every GIF better',
},
'playlist_count': 25,
}, {
'url': 'http://imgur.com/topic/Aww/ll5Vk',
'only_matching': True,
}, {
'url': 'https://imgur.com/gallery/YcAQlkx',
'info_dict': {
'id': 'YcAQlkx',
'ext': 'mp4',
'title': 'Classic Steve Carell gif...cracks me up everytime....damn the repost downvotes....',
}
}, {
'url': 'http://imgur.com/topic/Funny/N8rOudd',
'only_matching': True,
}, {
'url': 'http://imgur.com/r/aww/VQcQPhM',
'only_matching': True,
}]
def _real_extract(self, url):
gallery_id = self._match_id(url)
data = self._download_json(
'https://imgur.com/gallery/%s.json' % gallery_id,
gallery_id)['data']['image']
if data.get('is_album'):
entries = [
self.url_result('http://imgur.com/%s' % image['hash'], ImgurIE.ie_key(), image['hash'])
for image in data['album_images']['images'] if image.get('hash')]
return self.playlist_result(entries, gallery_id, data.get('title'), data.get('description'))
return self.url_result('http://imgur.com/%s' % gallery_id, ImgurIE.ie_key(), gallery_id)
class ImgurAlbumIE(ImgurGalleryIE):
IE_NAME = 'imgur:album'
_VALID_URL = r'https?://(?:i\.)?imgur\.com/a/(?P<id>[a-zA-Z0-9]+)'
_TESTS = [{
'url': 'http://imgur.com/a/j6Orj',
'info_dict': {
'id': 'j6Orj',
'title': 'A Literary Analysis of "Star Wars: The Force Awakens"',
},
'playlist_count': 12,
}]
| unlicense | -6,625,328,027,046,027,000 | 32 | 109 | 0.486029 | false |
franky88/emperioanimesta | env/Lib/site-packages/wheel/egg2wheel.py | 471 | 2633 | #!/usr/bin/env python
import os.path
import re
import sys
import tempfile
import zipfile
import wheel.bdist_wheel
import shutil
import distutils.dist
from distutils.archive_util import make_archive
from argparse import ArgumentParser
from glob import iglob
egg_info_re = re.compile(r'''(?P<name>.+?)-(?P<ver>.+?)
(-(?P<pyver>.+?))?(-(?P<arch>.+?))?.egg''', re.VERBOSE)
def egg2wheel(egg_path, dest_dir):
egg_info = egg_info_re.match(os.path.basename(egg_path)).groupdict()
dir = tempfile.mkdtemp(suffix="_e2w")
if os.path.isfile(egg_path):
# assume we have a bdist_egg otherwise
egg = zipfile.ZipFile(egg_path)
egg.extractall(dir)
else:
# support buildout-style installed eggs directories
for pth in os.listdir(egg_path):
src = os.path.join(egg_path, pth)
if os.path.isfile(src):
shutil.copy2(src, dir)
else:
shutil.copytree(src, os.path.join(dir, pth))
dist_info = "%s-%s" % (egg_info['name'], egg_info['ver'])
abi = 'none'
pyver = egg_info['pyver'].replace('.', '')
arch = (egg_info['arch'] or 'any').replace('.', '_').replace('-', '_')
if arch != 'any':
# assume all binary eggs are for CPython
pyver = 'cp' + pyver[2:]
wheel_name = '-'.join((
dist_info,
pyver,
abi,
arch
))
bw = wheel.bdist_wheel.bdist_wheel(distutils.dist.Distribution())
bw.root_is_purelib = egg_info['arch'] is None
dist_info_dir = os.path.join(dir, '%s.dist-info' % dist_info)
bw.egg2dist(os.path.join(dir, 'EGG-INFO'),
dist_info_dir)
bw.write_wheelfile(dist_info_dir, generator='egg2wheel')
bw.write_record(dir, dist_info_dir)
filename = make_archive(os.path.join(dest_dir, wheel_name), 'zip', root_dir=dir)
os.rename(filename, filename[:-3] + 'whl')
shutil.rmtree(dir)
def main():
parser = ArgumentParser()
parser.add_argument('eggs', nargs='*', help="Eggs to convert")
parser.add_argument('--dest-dir', '-d', default=os.path.curdir,
help="Directory to store wheels (default %(default)s)")
parser.add_argument('--verbose', '-v', action='store_true')
args = parser.parse_args()
for pat in args.eggs:
for egg in iglob(pat):
if args.verbose:
sys.stdout.write("{0}... ".format(egg))
egg2wheel(egg, args.dest_dir)
if args.verbose:
sys.stdout.write("OK\n")
if __name__ == "__main__":
main()
| gpl-3.0 | -8,842,711,588,377,372,000 | 35.068493 | 84 | 0.569313 | false |
h2oai/h2o-2 | py/testdir_single_jvm/test_parse_small_many_fvec.py | 9 | 1652 | import unittest, re, sys, random
sys.path.extend(['.','..','../..','py'])
import h2o, h2o_cmd, h2o_import as h2i
def writeRows(csvPathname,row,eol,repeat):
f = open(csvPathname, 'w')
for r in range(repeat):
f.write(row + eol)
class Basic(unittest.TestCase):
def tearDown(self):
h2o.check_sandbox_for_errors()
@classmethod
def setUpClass(cls):
global SEED
# SEED = h2o.setup_random_seed()
SEED = 6204672511291494176
h2o.init(1)
@classmethod
def tearDownClass(cls):
h2o.tear_down_cloud()
def test_parse_small_many_fvec(self):
SYNDATASETS_DIR = h2o.make_syn_dir()
# can try the other two possibilities also
eol = "\n"
row = "a,b,c,d,e,f,g"
# need unique key name for upload and for parse, each time
# maybe just upload it once?
timeoutSecs = 10
node = h2o.nodes[0]
# fail rate is one in 200?
# need at least two rows (parser)
for sizeTrial in range(10):
size = random.randint(2,129)
print "\nparsing with rows:", size
csvFilename = "p" + "_" + str(size)
csvPathname = SYNDATASETS_DIR + "/" + csvFilename
writeRows(csvPathname,row,eol,size)
src_key = csvFilename
for trial in range(5):
hex_key = csvFilename + "_" + str(trial) + ".hex"
parseResult = h2i.import_parse(path=csvPathname, schema='put', src_key=src_key, hex_key=hex_key)
sys.stdout.write('.')
sys.stdout.flush()
if __name__ == '__main__':
h2o.unit_main()
| apache-2.0 | -76,470,845,546,272,100 | 30.169811 | 112 | 0.559322 | false |
MrLoick/python-for-android | python-modules/twisted/twisted/internet/endpoints.py | 49 | 33917 | # -*- test-case-name: twisted.internet.test.test_endpoints -*-
# Copyright (c) 2007-2010 Twisted Matrix Laboratories.
# See LICENSE for details.
"""
Implementations of L{IStreamServerEndpoint} and L{IStreamClientEndpoint} that
wrap the L{IReactorTCP}, L{IReactorSSL}, and L{IReactorUNIX} interfaces.
This also implements an extensible mini-language for describing endpoints,
parsed by the L{clientFromString} and L{serverFromString} functions.
@since: 10.1
"""
from zope.interface import implements, directlyProvides
import warnings
from twisted.internet import interfaces, defer, error
from twisted.internet.protocol import ClientFactory, Protocol
from twisted.plugin import getPlugins
from twisted.internet.interfaces import IStreamServerEndpointStringParser
from twisted.internet.interfaces import IStreamClientEndpointStringParser
from twisted.python.filepath import FilePath
__all__ = ["clientFromString", "serverFromString",
"TCP4ServerEndpoint", "TCP4ClientEndpoint",
"UNIXServerEndpoint", "UNIXClientEndpoint",
"SSL4ServerEndpoint", "SSL4ClientEndpoint"]
class _WrappingProtocol(Protocol):
"""
Wrap another protocol in order to notify my user when a connection has
been made.
@ivar _connectedDeferred: The L{Deferred} that will callback
with the C{wrappedProtocol} when it is connected.
@ivar _wrappedProtocol: An L{IProtocol} provider that will be
connected.
"""
def __init__(self, connectedDeferred, wrappedProtocol):
"""
@param connectedDeferred: The L{Deferred} that will callback
with the C{wrappedProtocol} when it is connected.
@param wrappedProtocol: An L{IProtocol} provider that will be
connected.
"""
self._connectedDeferred = connectedDeferred
self._wrappedProtocol = wrappedProtocol
if interfaces.IHalfCloseableProtocol.providedBy(
self._wrappedProtocol):
directlyProvides(self, interfaces.IHalfCloseableProtocol)
def connectionMade(self):
"""
Connect the C{self._wrappedProtocol} to our C{self.transport} and
callback C{self._connectedDeferred} with the C{self._wrappedProtocol}
"""
self._wrappedProtocol.makeConnection(self.transport)
self._connectedDeferred.callback(self._wrappedProtocol)
def dataReceived(self, data):
"""
Proxy C{dataReceived} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.dataReceived(data)
def connectionLost(self, reason):
"""
Proxy C{connectionLost} calls to our C{self._wrappedProtocol}
"""
return self._wrappedProtocol.connectionLost(reason)
def readConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.readConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.readConnectionLost()
def writeConnectionLost(self):
"""
Proxy L{IHalfCloseableProtocol.writeConnectionLost} to our
C{self._wrappedProtocol}
"""
self._wrappedProtocol.writeConnectionLost()
class _WrappingFactory(ClientFactory):
"""
Wrap a factory in order to wrap the protocols it builds.
@ivar _wrappedFactory: A provider of I{IProtocolFactory} whose
buildProtocol method will be called and whose resulting protocol
will be wrapped.
@ivar _onConnection: An L{Deferred} that fires when the protocol is
connected
"""
protocol = _WrappingProtocol
def __init__(self, wrappedFactory, canceller):
"""
@param wrappedFactory: A provider of I{IProtocolFactory} whose
buildProtocol method will be called and whose resulting protocol
will be wrapped.
@param canceller: An object that will be called to cancel the
L{self._onConnection} L{Deferred}
"""
self._wrappedFactory = wrappedFactory
self._onConnection = defer.Deferred(canceller=canceller)
def buildProtocol(self, addr):
"""
Proxy C{buildProtocol} to our C{self._wrappedFactory} or errback
the C{self._onConnection} L{Deferred}.
@return: An instance of L{_WrappingProtocol} or C{None}
"""
try:
proto = self._wrappedFactory.buildProtocol(addr)
except:
self._onConnection.errback()
else:
return self.protocol(self._onConnection, proto)
def clientConnectionFailed(self, connector, reason):
"""
Errback the C{self._onConnection} L{Deferred} when the
client connection fails.
"""
self._onConnection.errback(reason)
class TCP4ServerEndpoint(object):
"""
TCP server endpoint with an IPv4 configuration
@ivar _reactor: An L{IReactorTCP} provider.
@type _port: int
@ivar _port: The port number on which to listen for incoming connections.
@type _backlog: int
@ivar _backlog: size of the listen queue
@type _interface: str
@ivar _interface: the hostname to bind to, defaults to '' (all)
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, port, backlog=50, interface=''):
"""
@param reactor: An L{IReactorTCP} provider.
@param port: The port number used listening
@param backlog: size of the listen queue
@param interface: the hostname to bind to, defaults to '' (all)
"""
self._reactor = reactor
self._port = port
self._listenArgs = dict(backlog=50, interface='')
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a TCP socket
"""
return defer.execute(self._reactor.listenTCP,
self._port,
protocolFactory,
backlog=self._backlog,
interface=self._interface)
class TCP4ClientEndpoint(object):
"""
TCP client endpoint with an IPv4 configuration.
@ivar _reactor: An L{IReactorTCP} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the
connection has failed.
@type _bindAddress: tuple
@type _bindAddress: a (host, port) tuple of local address to bind
to, or None.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, host, port, timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorTCP} provider
@param host: A hostname, used when connecting
@param port: The port number, used when connecting
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._host = host
self._port = port
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via TCP.
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectTCP(
self._host, self._port, wf,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
class SSL4ServerEndpoint(object):
"""
SSL secured TCP server endpoint with an IPv4 configuration.
@ivar _reactor: An L{IReactorSSL} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _sslContextFactory: L{OpenSSLCertificateOptions}
@var _sslContextFactory: SSL Configuration information as an
L{OpenSSLCertificateOptions}
@type _backlog: int
@ivar _backlog: size of the listen queue
@type _interface: str
@ivar _interface: the hostname to bind to, defaults to '' (all)
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, port, sslContextFactory,
backlog=50, interface=''):
"""
@param reactor: An L{IReactorSSL} provider.
@param port: The port number used listening
@param sslContextFactory: An instance of
L{twisted.internet._sslverify.OpenSSLCertificateOptions}.
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._port = port
self._sslContextFactory = sslContextFactory
self._backlog = backlog
self._interface = interface
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen for SSL on a
TCP socket.
"""
return defer.execute(self._reactor.listenSSL, self._port,
protocolFactory,
contextFactory=self._sslContextFactory,
backlog=self._backlog,
interface=self._interface)
class SSL4ClientEndpoint(object):
"""
SSL secured TCP client endpoint with an IPv4 configuration
@ivar _reactor: An L{IReactorSSL} provider.
@type _host: str
@ivar _host: The hostname to connect to as a C{str}
@type _port: int
@ivar _port: The port to connect to as C{int}
@type _sslContextFactory: L{OpenSSLCertificateOptions}
@var _sslContextFactory: SSL Configuration information as an
L{OpenSSLCertificateOptions}
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the
connection has failed.
@type _bindAddress: tuple
@ivar _bindAddress: a (host, port) tuple of local address to bind
to, or None.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, host, port, sslContextFactory,
timeout=30, bindAddress=None):
"""
@param reactor: An L{IReactorSSL} provider.
@param host: A hostname, used when connecting
@param port: The port number, used when connecting
@param sslContextFactory: SSL Configuration information as An instance
of L{OpenSSLCertificateOptions}.
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param bindAddress: a (host, port tuple of local address to bind to,
or None.
"""
self._reactor = reactor
self._host = host
self._port = port
self._sslContextFactory = sslContextFactory
self._timeout = timeout
self._bindAddress = bindAddress
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect with SSL over
TCP.
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectSSL(
self._host, self._port, wf, self._sslContextFactory,
timeout=self._timeout, bindAddress=self._bindAddress)
return wf._onConnection
except:
return defer.fail()
class UNIXServerEndpoint(object):
"""
UnixSocket server endpoint.
@type path: str
@ivar path: a path to a unix socket on the filesystem.
@type _listenArgs: dict
@ivar _listenArgs: A C{dict} of keyword args that will be passed
to L{IReactorUNIX.listenUNIX}
@var _reactor: An L{IReactorTCP} provider.
"""
implements(interfaces.IStreamServerEndpoint)
def __init__(self, reactor, address, backlog=50, mode=0666, wantPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param address: The path to the Unix socket file, used when listening
@param listenArgs: An optional dict of keyword args that will be
passed to L{IReactorUNIX.listenUNIX}
@param backlog: number of connections to allow in backlog.
@param mode: mode to set on the unix socket. This parameter is
deprecated. Permissions should be set on the directory which
contains the UNIX socket.
@param wantPID: if True, create a pidfile for the socket.
"""
self._reactor = reactor
self._address = address
self._backlog = backlog
self._mode = mode
self._wantPID = wantPID
def listen(self, protocolFactory):
"""
Implement L{IStreamServerEndpoint.listen} to listen on a UNIX socket.
"""
return defer.execute(self._reactor.listenUNIX, self._address,
protocolFactory,
backlog=self._backlog,
mode=self._mode,
wantPID=self._wantPID)
class UNIXClientEndpoint(object):
"""
UnixSocket client endpoint.
@type _path: str
@ivar _path: a path to a unix socket on the filesystem.
@type _timeout: int
@ivar _timeout: number of seconds to wait before assuming the connection
has failed.
@type _checkPID: bool
@ivar _checkPID: if True, check for a pid file to verify that a server
is listening.
@var _reactor: An L{IReactorUNIX} provider.
"""
implements(interfaces.IStreamClientEndpoint)
def __init__(self, reactor, path, timeout=30, checkPID=0):
"""
@param reactor: An L{IReactorUNIX} provider.
@param path: The path to the Unix socket file, used when connecting
@param timeout: number of seconds to wait before assuming the
connection has failed.
@param checkPID: if True, check for a pid file to verify that a server
is listening.
"""
self._reactor = reactor
self._path = path
self._timeout = timeout
self._checkPID = checkPID
def connect(self, protocolFactory):
"""
Implement L{IStreamClientEndpoint.connect} to connect via a
UNIX Socket
"""
def _canceller(deferred):
connector.stopConnecting()
deferred.errback(
error.ConnectingCancelledError(connector.getDestination()))
try:
wf = _WrappingFactory(protocolFactory, _canceller)
connector = self._reactor.connectUNIX(
self._path, wf,
timeout=self._timeout,
checkPID=self._checkPID)
return wf._onConnection
except:
return defer.fail()
def _parseTCP(factory, port, interface="", backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a TCP(IPv4) stream endpoint into the structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenTCP} (or, modulo argument 2, the factory, arguments
to L{TCP4ServerEndpoint}.
"""
return (int(port), factory), {'interface': interface,
'backlog': int(backlog)}
def _parseUNIX(factory, address, mode='666', backlog=50, lockfile=True):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for a UNIX (AF_UNIX/SOCK_STREAM) stream endpoint into the
structured arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param address: the pathname of the unix socket
@type address: C{str}
@param backlog: the length of the listen queue
@type backlog: C{str}
@param lockfile: A string '0' or '1', mapping to True and False
respectively. See the C{wantPID} argument to C{listenUNIX}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorTCP.listenUNIX} (or, modulo argument 2, the factory,
arguments to L{UNIXServerEndpoint}.
"""
return (
(address, factory),
{'mode': int(mode, 8), 'backlog': int(backlog),
'wantPID': bool(int(lockfile))})
def _parseSSL(factory, port, privateKey="server.pem", certKey=None,
sslmethod=None, interface='', backlog=50):
"""
Internal parser function for L{_parseServer} to convert the string
arguments for an SSL (over TCP/IPv4) stream endpoint into the structured
arguments.
@param factory: the protocol factory being parsed, or C{None}. (This was a
leftover argument from when this code was in C{strports}, and is now
mostly None and unused.)
@type factory: L{IProtocolFactory} or C{NoneType}
@param port: the integer port number to bind
@type port: C{str}
@param interface: the interface IP to listen on
@param backlog: the length of the listen queue
@type backlog: C{str}
@param privateKey: The file name of a PEM format private key file.
@type privateKey: C{str}
@param certKey: The file name of a PEM format certificate file.
@type certKey: C{str}
@param sslmethod: The string name of an SSL method, based on the name of a
constant in C{OpenSSL.SSL}. Must be one of: "SSLv23_METHOD",
"SSLv2_METHOD", "SSLv3_METHOD", "TLSv1_METHOD".
@type sslmethod: C{str}
@return: a 2-tuple of (args, kwargs), describing the parameters to
L{IReactorSSL.listenSSL} (or, modulo argument 2, the factory, arguments
to L{SSL4ServerEndpoint}.
"""
from twisted.internet import ssl
if certKey is None:
certKey = privateKey
kw = {}
if sslmethod is not None:
kw['sslmethod'] = getattr(ssl.SSL, sslmethod)
cf = ssl.DefaultOpenSSLContextFactory(privateKey, certKey, **kw)
return ((int(port), factory, cf),
{'interface': interface, 'backlog': int(backlog)})
_serverParsers = {"tcp": _parseTCP,
"unix": _parseUNIX,
"ssl": _parseSSL}
_OP, _STRING = range(2)
def _tokenize(description):
"""
Tokenize a strports string and yield each token.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: an iterable of 2-tuples of (L{_OP} or L{_STRING}, string). Tuples
starting with L{_OP} will contain a second element of either ':' (i.e.
'next parameter') or '=' (i.e. 'assign parameter value'). For example,
the string 'hello:greet\=ing=world' would result in a generator
yielding these values::
_STRING, 'hello'
_OP, ':'
_STRING, 'greet=ing'
_OP, '='
_STRING, 'world'
"""
current = ''
ops = ':='
nextOps = {':': ':=', '=': ':'}
description = iter(description)
for n in description:
if n in ops:
yield _STRING, current
yield _OP, n
current = ''
ops = nextOps[n]
elif n == '\\':
current += description.next()
else:
current += n
yield _STRING, current
def _parse(description):
"""
Convert a description string into a list of positional and keyword
parameters, using logic vaguely like what Python does.
@param description: a string as described by L{serverFromString} or
L{clientFromString}.
@return: a 2-tuple of C{(args, kwargs)}, where 'args' is a list of all
':'-separated C{str}s not containing an '=' and 'kwargs' is a map of
all C{str}s which do contain an '='. For example, the result of
C{_parse('a:b:d=1:c')} would be C{(['a', 'b', 'c'], {'d': '1'})}.
"""
args, kw = [], {}
def add(sofar):
if len(sofar) == 1:
args.append(sofar[0])
else:
kw[sofar[0]] = sofar[1]
sofar = ()
for (type, value) in _tokenize(description):
if type is _STRING:
sofar += (value,)
elif value == ':':
add(sofar)
sofar = ()
add(sofar)
return args, kw
# Mappings from description "names" to endpoint constructors.
_endpointServerFactories = {
'TCP': TCP4ServerEndpoint,
'SSL': SSL4ServerEndpoint,
'UNIX': UNIXServerEndpoint,
}
_endpointClientFactories = {
'TCP': TCP4ClientEndpoint,
'SSL': SSL4ClientEndpoint,
'UNIX': UNIXClientEndpoint,
}
_NO_DEFAULT = object()
def _parseServer(description, factory, default=None):
"""
Parse a stports description into a 2-tuple of arguments and keyword values.
@param description: A description in the format explained by
L{serverFromString}.
@type description: C{str}
@param factory: A 'factory' argument; this is left-over from
twisted.application.strports, it's not really used.
@type factory: L{IProtocolFactory} or L{None}
@param default: Deprecated argument, specifying the default parser mode to
use for unqualified description strings (those which do not have a ':'
and prefix).
@type default: C{str} or C{NoneType}
@return: a 3-tuple of (plugin or name, arguments, keyword arguments)
"""
args, kw = _parse(description)
if not args or (len(args) == 1 and not kw):
deprecationMessage = (
"Unqualified strport description passed to 'service'."
"Use qualified endpoint descriptions; for example, 'tcp:%s'."
% (description,))
if default is None:
default = 'tcp'
warnings.warn(
deprecationMessage, category=DeprecationWarning, stacklevel=4)
elif default is _NO_DEFAULT:
raise ValueError(deprecationMessage)
# If the default has been otherwise specified, the user has already
# been warned.
args[0:0] = [default]
endpointType = args[0]
parser = _serverParsers.get(endpointType)
if parser is None:
for plugin in getPlugins(IStreamServerEndpointStringParser):
if plugin.prefix == endpointType:
return (plugin, args[1:], kw)
raise ValueError("Unknown endpoint type: '%s'" % (endpointType,))
return (endpointType.upper(),) + parser(factory, *args[1:], **kw)
def _serverFromStringLegacy(reactor, description, default):
"""
Underlying implementation of L{serverFromString} which avoids exposing the
deprecated 'default' argument to anything but L{strports.service}.
"""
nameOrPlugin, args, kw = _parseServer(description, None, default)
if type(nameOrPlugin) is not str:
plugin = nameOrPlugin
return plugin.parseStreamServer(reactor, *args, **kw)
else:
name = nameOrPlugin
# Chop out the factory.
args = args[:1] + args[2:]
return _endpointServerFactories[name](reactor, *args, **kw)
def serverFromString(reactor, description):
"""
Construct a stream server endpoint from an endpoint description string.
The format for server endpoint descriptions is a simple string. It is a
prefix naming the type of endpoint, then a colon, then the arguments for
that endpoint.
For example, you can call it like this to create an endpoint that will
listen on TCP port 80::
serverFromString(reactor, "tcp:80")
Additional arguments may be specified as keywords, separated with colons.
For example, you can specify the interface for a TCP server endpoint to
bind to like this::
serverFromString(reactor, "tcp:80:interface=127.0.0.1")
SSL server endpoints may be specified with the 'ssl' prefix, and the
private key and certificate files may be specified by the C{privateKey} and
C{certKey} arguments::
serverFromString(reactor, "ssl:443:privateKey=key.pem:certKey=crt.pem")
If a private key file name (C{privateKey}) isn't provided, a "server.pem"
file is assumed to exist which contains the private key. If the certificate
file name (C{certKey}) isn't provided, the private key file is assumed to
contain the certificate as well.
You may escape colons in arguments with a backslash, which you will need to
use if you want to specify a full pathname argument on Windows::
serverFromString(reactor,
"ssl:443:privateKey=C\\:/key.pem:certKey=C\\:/cert.pem")
finally, the 'unix' prefix may be used to specify a filesystem UNIX socket,
optionally with a 'mode' argument to specify the mode of the socket file
created by C{listen}::
serverFromString(reactor, "unix:/var/run/finger")
serverFromString(reactor, "unix:/var/run/finger:mode=660")
This function is also extensible; new endpoint types may be registered as
L{IStreamServerEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The server endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to listen with the parameters
given by by C{description}.
@rtype: L{IStreamServerEndpoint<twisted.internet.interfaces.IStreamServerEndpoint>}
@raise ValueError: when the 'description' string cannot be parsed.
@since: 10.2
"""
return _serverFromStringLegacy(reactor, description, _NO_DEFAULT)
def quoteStringArgument(argument):
"""
Quote an argument to L{serverFromString} and L{clientFromString}. Since
arguments are separated with colons and colons are escaped with
backslashes, some care is necessary if, for example, you have a pathname,
you may be tempted to interpolate into a string like this::
serverFromString("ssl:443:privateKey=%s" % (myPathName,))
This may appear to work, but will have portability issues (Windows
pathnames, for example). Usually you should just construct the appropriate
endpoint type rather than interpolating strings, which in this case would
be L{SSL4ServerEndpoint}. There are some use-cases where you may need to
generate such a string, though; for example, a tool to manipulate a
configuration file which has strports descriptions in it. To be correct in
those cases, do this instead::
serverFromString("ssl:443:privateKey=%s" %
(quoteStringArgument(myPathName),))
@param argument: The part of the endpoint description string you want to
pass through.
@type argument: C{str}
@return: The quoted argument.
@rtype: C{str}
"""
return argument.replace('\\', '\\\\').replace(':', '\\:')
def _parseClientTCP(**kwargs):
"""
Perform any argument value coercion necessary for TCP client parameters.
Valid keyword arguments to this function are all L{IReactorTCP.connectTCP}
arguments.
@return: The coerced values as a C{dict}.
"""
kwargs['port'] = int(kwargs['port'])
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
def _loadCAsFromDir(directoryPath):
"""
Load certificate-authority certificate objects in a given directory.
@param directoryPath: a L{FilePath} pointing at a directory to load .pem
files from.
@return: a C{list} of L{OpenSSL.crypto.X509} objects.
"""
from twisted.internet import ssl
caCerts = {}
for child in directoryPath.children():
if not child.basename().split('.')[-1].lower() == 'pem':
continue
try:
data = child.getContent()
except IOError:
# Permission denied, corrupt disk, we don't care.
continue
try:
theCert = ssl.Certificate.loadPEM(data)
except ssl.SSL.Error:
# Duplicate certificate, invalid certificate, etc. We don't care.
pass
else:
caCerts[theCert.digest()] = theCert.original
return caCerts.values()
def _parseClientSSL(**kwargs):
"""
Perform any argument value coercion necessary for SSL client parameters.
Valid keyword arguments to this function are all L{IReactorSSL.connectSSL}
arguments except for C{contextFactory}. Instead, C{certKey} (the path name
of the certificate file) C{privateKey} (the path name of the private key
associated with the certificate) are accepted and used to construct a
context factory.
@param caCertsDir: The one parameter which is not part of
L{IReactorSSL.connectSSL}'s signature, this is a path name used to
construct a list of certificate authority certificates. The directory
will be scanned for files ending in C{.pem}, all of which will be
considered valid certificate authorities for this connection.
@type caCertsDir: C{str}
@return: The coerced values as a C{dict}.
"""
from twisted.internet import ssl
kwargs = _parseClientTCP(**kwargs)
certKey = kwargs.pop('certKey', None)
privateKey = kwargs.pop('privateKey', None)
caCertsDir = kwargs.pop('caCertsDir', None)
if certKey is not None:
certx509 = ssl.Certificate.loadPEM(
FilePath(certKey).getContent()).original
else:
certx509 = None
if privateKey is not None:
privateKey = ssl.PrivateCertificate.loadPEM(
FilePath(privateKey).getContent()).privateKey.original
else:
privateKey = None
if caCertsDir is not None:
verify = True
caCerts = _loadCAsFromDir(FilePath(caCertsDir))
else:
verify = False
caCerts = None
kwargs['sslContextFactory'] = ssl.CertificateOptions(
method=ssl.SSL.SSLv23_METHOD,
certificate=certx509,
privateKey=privateKey,
verify=verify,
caCerts=caCerts
)
return kwargs
def _parseClientUNIX(**kwargs):
"""
Perform any argument value coercion necessary for UNIX client parameters.
Valid keyword arguments to this function are all L{IReactorUNIX.connectUNIX}
arguments except for C{checkPID}. Instead, C{lockfile} is accepted and has
the same meaning.
@return: The coerced values as a C{dict}.
"""
try:
kwargs['checkPID'] = bool(int(kwargs.pop('lockfile')))
except KeyError:
pass
try:
kwargs['timeout'] = int(kwargs['timeout'])
except KeyError:
pass
return kwargs
_clientParsers = {
'TCP': _parseClientTCP,
'SSL': _parseClientSSL,
'UNIX': _parseClientUNIX,
}
def clientFromString(reactor, description):
"""
Construct a client endpoint from a description string.
Client description strings are much like server description strings,
although they take all of their arguments as keywords, since even the
simplest client endpoint (plain TCP) requires at least 2 arguments (host
and port) to construct.
You can create a TCP client endpoint with the 'host' and 'port' arguments,
like so::
clientFromString(reactor, "tcp:host=www.example.com:port=80")
or an SSL client endpoint with those arguments, plus the arguments used by
the server SSL, for a client certificate::
clientFromString(reactor, "ssl:host=web.example.com:port=443:"
"privateKey=foo.pem:certKey=foo.pem")
to specify your certificate trust roots, you can identify a directory with
PEM files in it with the C{caCertsDir} argument::
clientFromString(reactor, "ssl:host=web.example.com:port=443:"
"caCertsDir=/etc/ssl/certs")
This function is also extensible; new endpoint types may be registered as
L{IStreamClientEndpointStringParser} plugins. See that interface for more
information.
@param reactor: The client endpoint will be constructed with this reactor.
@param description: The strports description to parse.
@return: A new endpoint which can be used to connect with the parameters
given by by C{description}.
@rtype: L{IStreamClientEndpoint<twisted.internet.interfaces.IStreamClientEndpoint>}
@since: 10.2
"""
args, kwargs = _parse(description)
aname = args.pop(0)
name = aname.upper()
for plugin in getPlugins(IStreamClientEndpointStringParser):
if plugin.prefix.upper() == name:
return plugin.parseStreamClient(*args, **kwargs)
if name not in _clientParsers:
raise ValueError("Unknown endpoint type: %r" % (aname,))
kwargs = _clientParsers[name](*args, **kwargs)
return _endpointClientFactories[name](reactor, **kwargs)
| apache-2.0 | 4,291,482,568,986,889,700 | 32.714712 | 87 | 0.64475 | false |
gotcha/Selenium2Library | demo/demoapp/server.py | 48 | 2648 | #!/usr/bin/env python
# Copyright 2008-2011 Nokia Siemens Networks Oyj
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Simple HTTP server requiring only Python and no other preconditions.
Server is started by running this script with argument 'start' and
optional port number (default port 7272). Server root is the same
directory where this script is situated. Server can be stopped either
using Ctrl-C or running this script with argument 'stop' and same port
number as when starting it.
"""
import os
import sys
import httplib
import BaseHTTPServer
import SimpleHTTPServer
DEFAULT_PORT = 7272
DEFAULT_HOST = 'localhost'
class StoppableHttpServer(BaseHTTPServer.HTTPServer):
def serve_forever(self):
self.stop = False
while not self.stop:
try:
self.handle_request()
except KeyboardInterrupt:
break
class StoppableHttpRequestHandler(SimpleHTTPServer.SimpleHTTPRequestHandler):
def do_QUIT(self):
self.send_response(200)
self.end_headers()
self.server.stop = True
def do_POST(self):
# We could also process paremeters here using something like below.
# length = self.headers['Content-Length']
# print self.rfile.read(int(length))
self.do_GET()
def start_server(host=DEFAULT_HOST, port=DEFAULT_PORT):
print "Demo application starting on port %s" % port
root = os.path.dirname(os.path.abspath(__file__))
os.chdir(root)
server = StoppableHttpServer((host, int(port)), StoppableHttpRequestHandler)
server.serve_forever()
def stop_server(host=DEFAULT_HOST, port=DEFAULT_PORT):
print "Demo application on port %s stopping" % port
conn = httplib.HTTPConnection("%s:%s" % (host, port))
conn.request("QUIT", "/")
conn.getresponse()
def print_help():
print __doc__
if __name__ == '__main__':
try:
{'start': start_server,
'stop': stop_server,
'help': print_help}[sys.argv[1]](*sys.argv[2:])
except (IndexError, KeyError, TypeError):
print 'Usage: %s start|stop|help [port]' % os.path.basename(sys.argv[0])
| apache-2.0 | -6,505,144,494,339,511,000 | 30.152941 | 80 | 0.688444 | false |
jymannob/CouchPotatoServer | couchpotato/core/notifications/notifymywp.py | 25 | 2136 | from couchpotato.core.helpers.variable import splitString
from couchpotato.core.logger import CPLog
from couchpotato.core.notifications.base import Notification
from pynmwp import PyNMWP
import six
log = CPLog(__name__)
autoload = 'NotifyMyWP'
class NotifyMyWP(Notification):
def notify(self, message = '', data = None, listener = None):
if not data: data = {}
keys = splitString(self.conf('api_key'))
p = PyNMWP(keys, self.conf('dev_key'))
response = p.push(application = self.default_title, event = message, description = message, priority = self.conf('priority'), batch_mode = len(keys) > 1)
for key in keys:
if not response[key]['Code'] == six.u('200'):
log.error('Could not send notification to NotifyMyWindowsPhone (%s). %s', (key, response[key]['message']))
return False
return response
config = [{
'name': 'notifymywp',
'groups': [
{
'tab': 'notifications',
'list': 'notification_providers',
'name': 'notifymywp',
'label': 'Windows Phone',
'options': [
{
'name': 'enabled',
'default': 0,
'type': 'enabler',
},
{
'name': 'api_key',
'description': 'Multiple keys seperated by a comma. Maximum of 5.'
},
{
'name': 'dev_key',
'advanced': True,
},
{
'name': 'priority',
'default': 0,
'type': 'dropdown',
'values': [('Very Low', -2), ('Moderate', -1), ('Normal', 0), ('High', 1), ('Emergency', 2)],
},
{
'name': 'on_snatch',
'default': 0,
'type': 'bool',
'advanced': True,
'description': 'Also send message when movie is snatched.',
},
],
}
],
}]
| gpl-3.0 | -7,817,837,923,442,169,000 | 30.411765 | 161 | 0.452715 | false |
ampax/edx-platform-backup | common/test/acceptance/pages/lms/edxnotes.py | 19 | 15856 | from bok_choy.page_object import PageObject, PageLoadError, unguarded
from bok_choy.promise import BrokenPromise
from .course_page import CoursePage
from ...tests.helpers import disable_animations
from selenium.webdriver.common.action_chains import ActionChains
class NoteChild(PageObject):
url = None
BODY_SELECTOR = None
def __init__(self, browser, item_id):
super(NoteChild, self).__init__(browser)
self.item_id = item_id
def is_browser_on_page(self):
return self.q(css="{}#{}".format(self.BODY_SELECTOR, self.item_id)).present
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `NoteChild` context
"""
return "{}#{} {}".format(
self.BODY_SELECTOR,
self.item_id,
selector,
)
def _get_element_text(self, selector):
element = self.q(css=self._bounded_selector(selector)).first
if element:
return element.text[0]
else:
return None
class EdxNotesPageGroup(NoteChild):
"""
Helper class that works with note groups on Note page of the course.
"""
BODY_SELECTOR = ".note-group"
@property
def title(self):
return self._get_element_text(".course-title")
@property
def subtitles(self):
return [section.title for section in self.children]
@property
def children(self):
children = self.q(css=self._bounded_selector('.note-section'))
return [EdxNotesPageSection(self.browser, child.get_attribute("id")) for child in children]
class EdxNotesPageSection(NoteChild):
"""
Helper class that works with note sections on Note page of the course.
"""
BODY_SELECTOR = ".note-section"
@property
def title(self):
return self._get_element_text(".course-subtitle")
@property
def children(self):
children = self.q(css=self._bounded_selector('.note'))
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def notes(self):
return [section.text for section in self.children]
class EdxNotesPageItem(NoteChild):
"""
Helper class that works with note items on Note page of the course.
"""
BODY_SELECTOR = ".note"
UNIT_LINK_SELECTOR = "a.reference-unit-link"
def go_to_unit(self, unit_page=None):
self.q(css=self._bounded_selector(self.UNIT_LINK_SELECTOR)).click()
if unit_page is not None:
unit_page.wait_for_page()
@property
def unit_name(self):
return self._get_element_text(self.UNIT_LINK_SELECTOR)
@property
def text(self):
return self._get_element_text(".note-comment-p")
@property
def quote(self):
return self._get_element_text(".note-excerpt")
@property
def time_updated(self):
return self._get_element_text(".reference-updated-date")
class EdxNotesPageView(PageObject):
"""
Base class for EdxNotes views: Recent Activity, Location in Course, Search Results.
"""
url = None
BODY_SELECTOR = ".tab-panel"
TAB_SELECTOR = ".tab"
CHILD_SELECTOR = ".note"
CHILD_CLASS = EdxNotesPageItem
@unguarded
def visit(self):
"""
Open the page containing this page object in the browser.
Raises:
PageLoadError: The page did not load successfully.
Returns:
PageObject
"""
self.q(css=self.TAB_SELECTOR).first.click()
try:
return self.wait_for_page()
except (BrokenPromise):
raise PageLoadError("Timed out waiting to load page '{!r}'".format(self))
def is_browser_on_page(self):
return all([
self.q(css="{}".format(self.BODY_SELECTOR)).present,
self.q(css="{}.is-active".format(self.TAB_SELECTOR)).present,
not self.q(css=".ui-loading").visible,
])
@property
def is_closable(self):
"""
Indicates if tab is closable or not.
"""
return self.q(css="{} .action-close".format(self.TAB_SELECTOR)).present
def close(self):
"""
Closes the tab.
"""
self.q(css="{} .action-close".format(self.TAB_SELECTOR)).first.click()
@property
def children(self):
"""
Returns all notes on the page.
"""
children = self.q(css=self.CHILD_SELECTOR)
return [self.CHILD_CLASS(self.browser, child.get_attribute("id")) for child in children]
class RecentActivityView(EdxNotesPageView):
"""
Helper class for Recent Activity view.
"""
BODY_SELECTOR = "#recent-panel"
TAB_SELECTOR = ".tab#view-recent-activity"
class CourseStructureView(EdxNotesPageView):
"""
Helper class for Location in Course view.
"""
BODY_SELECTOR = "#structure-panel"
TAB_SELECTOR = ".tab#view-course-structure"
CHILD_SELECTOR = ".note-group"
CHILD_CLASS = EdxNotesPageGroup
class SearchResultsView(EdxNotesPageView):
"""
Helper class for Search Results view.
"""
BODY_SELECTOR = "#search-results-panel"
TAB_SELECTOR = ".tab#view-search-results"
class EdxNotesPage(CoursePage):
"""
EdxNotes page.
"""
url_path = "edxnotes/"
MAPPING = {
"recent": RecentActivityView,
"structure": CourseStructureView,
"search": SearchResultsView,
}
def __init__(self, *args, **kwargs):
super(EdxNotesPage, self).__init__(*args, **kwargs)
self.current_view = self.MAPPING["recent"](self.browser)
def is_browser_on_page(self):
return self.q(css=".wrapper-student-notes").present
def switch_to_tab(self, tab_name):
"""
Switches to the appropriate tab `tab_name(str)`.
"""
self.current_view = self.MAPPING[tab_name](self.browser)
self.current_view.visit()
def close_tab(self, tab_name):
"""
Closes the tab `tab_name(str)`.
"""
self.current_view.close()
self.current_view = self.MAPPING["recent"](self.browser)
def search(self, text):
"""
Runs search with `text(str)` query.
"""
self.q(css="#search-notes-form #search-notes-input").first.fill(text)
self.q(css='#search-notes-form .search-notes-submit').first.click()
# Frontend will automatically switch to Search results tab when search
# is running, so the view also needs to be changed.
self.current_view = self.MAPPING["search"](self.browser)
if text.strip():
self.current_view.wait_for_page()
@property
def tabs(self):
"""
Returns all tabs on the page.
"""
tabs = self.q(css=".tabs .tab-label")
if tabs:
return map(lambda x: x.replace("Current tab\n", ""), tabs.text)
else:
return None
@property
def is_error_visible(self):
"""
Indicates whether error message is visible or not.
"""
return self.q(css=".inline-error").visible
@property
def error_text(self):
"""
Returns error message.
"""
element = self.q(css=".inline-error").first
if element and self.is_error_visible:
return element.text[0]
else:
return None
@property
def notes(self):
"""
Returns all notes on the page.
"""
children = self.q(css='.note')
return [EdxNotesPageItem(self.browser, child.get_attribute("id")) for child in children]
@property
def groups(self):
"""
Returns all groups on the page.
"""
children = self.q(css='.note-group')
return [EdxNotesPageGroup(self.browser, child.get_attribute("id")) for child in children]
@property
def sections(self):
"""
Returns all sections on the page.
"""
children = self.q(css='.note-section')
return [EdxNotesPageSection(self.browser, child.get_attribute("id")) for child in children]
@property
def no_content_text(self):
"""
Returns no content message.
"""
element = self.q(css=".is-empty").first
if element:
return element.text[0]
else:
return None
class EdxNotesUnitPage(CoursePage):
"""
Page for the Unit with EdxNotes.
"""
url_path = "courseware/"
def is_browser_on_page(self):
return self.q(css="body.courseware .edx-notes-wrapper").present
def move_mouse_to(self, selector):
"""
Moves mouse to the element that matches `selector(str)`.
"""
body = self.q(css=selector)[0]
ActionChains(self.browser).move_to_element(body).release().perform()
return self
def click(self, selector):
"""
Clicks on the element that matches `selector(str)`.
"""
self.q(css=selector).first.click()
return self
def toggle_visibility(self):
"""
Clicks on the "Show notes" checkbox.
"""
self.q(css=".action-toggle-notes").first.click()
return self
@property
def components(self):
"""
Returns a list of annotatable components.
"""
components = self.q(css=".edx-notes-wrapper")
return [AnnotatableComponent(self.browser, component.get_attribute("id")) for component in components]
@property
def notes(self):
"""
Returns a list of notes for the page.
"""
notes = []
for component in self.components:
notes.extend(component.notes)
return notes
def refresh(self):
"""
Refreshes the page and returns a list of annotatable components.
"""
self.browser.refresh()
return self.components
class AnnotatableComponent(NoteChild):
"""
Helper class that works with annotatable components.
"""
BODY_SELECTOR = ".edx-notes-wrapper"
@property
def notes(self):
"""
Returns a list of notes for the component.
"""
notes = self.q(css=self._bounded_selector(".annotator-hl"))
return [EdxNoteHighlight(self.browser, note, self.item_id) for note in notes]
def create_note(self, selector=".annotate-id"):
"""
Create the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.select_and_click_adder()
yield note
note.save()
def edit_note(self, selector=".annotator-hl"):
"""
Edit the note by the selector, return a context manager that will
show and save the note popup.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().edit()
yield note
note.save()
def remove_note(self, selector=".annotator-hl"):
"""
Removes the note by the selector.
"""
for element in self.q(css=self._bounded_selector(selector)):
note = EdxNoteHighlight(self.browser, element, self.item_id)
note.show().remove()
class EdxNoteHighlight(NoteChild):
"""
Helper class that works with notes.
"""
BODY_SELECTOR = ""
ADDER_SELECTOR = ".annotator-adder"
VIEWER_SELECTOR = ".annotator-viewer"
EDITOR_SELECTOR = ".annotator-editor"
def __init__(self, browser, element, parent_id):
super(EdxNoteHighlight, self).__init__(browser, parent_id)
self.element = element
self.item_id = parent_id
disable_animations(self)
@property
def is_visible(self):
"""
Returns True if the note is visible.
"""
viewer_is_visible = self.q(css=self._bounded_selector(self.VIEWER_SELECTOR)).visible
editor_is_visible = self.q(css=self._bounded_selector(self.EDITOR_SELECTOR)).visible
return viewer_is_visible or editor_is_visible
def wait_for_adder_visibility(self):
"""
Waiting for visibility of note adder button.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.ADDER_SELECTOR), "Adder is visible."
)
def wait_for_viewer_visibility(self):
"""
Waiting for visibility of note viewer.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.VIEWER_SELECTOR), "Note Viewer is visible."
)
def wait_for_editor_visibility(self):
"""
Waiting for visibility of note editor.
"""
self.wait_for_element_visibility(
self._bounded_selector(self.EDITOR_SELECTOR), "Note Editor is visible."
)
def wait_for_notes_invisibility(self, text="Notes are hidden"):
"""
Waiting for invisibility of all notes.
"""
selector = self._bounded_selector(".annotator-outer")
self.wait_for_element_invisibility(selector, text)
def select_and_click_adder(self):
"""
Creates selection for the element and clicks `add note` button.
"""
ActionChains(self.browser).double_click(self.element).release().perform()
self.wait_for_adder_visibility()
self.q(css=self._bounded_selector(self.ADDER_SELECTOR)).first.click()
self.wait_for_editor_visibility()
return self
def click_on_highlight(self):
"""
Clicks on the highlighted text.
"""
ActionChains(self.browser).move_to_element(self.element).click().release().perform()
return self
def click_on_viewer(self):
"""
Clicks on the note viewer.
"""
self.q(css=self._bounded_selector(self.VIEWER_SELECTOR)).first.click()
return self
def show(self):
"""
Hover over highlighted text -> shows note.
"""
ActionChains(self.browser).move_to_element(self.element).release().perform()
self.wait_for_viewer_visibility()
return self
def cancel(self):
"""
Clicks cancel button.
"""
self.q(css=self._bounded_selector(".annotator-cancel")).first.click()
self.wait_for_notes_invisibility("Note is canceled.")
return self
def save(self):
"""
Clicks save button.
"""
self.q(css=self._bounded_selector(".annotator-save")).first.click()
self.wait_for_notes_invisibility("Note is saved.")
self.wait_for_ajax()
return self
def remove(self):
"""
Clicks delete button.
"""
self.q(css=self._bounded_selector(".annotator-delete")).first.click()
self.wait_for_notes_invisibility("Note is removed.")
self.wait_for_ajax()
return self
def edit(self):
"""
Clicks edit button.
"""
self.q(css=self._bounded_selector(".annotator-edit")).first.click()
self.wait_for_editor_visibility()
return self
@property
def text(self):
"""
Returns text of the note.
"""
self.show()
element = self.q(css=self._bounded_selector(".annotator-annotation > div"))
if element:
text = element.text[0].strip()
else:
text = None
self.q(css=("body")).first.click()
self.wait_for_notes_invisibility()
return text
@text.setter
def text(self, value):
"""
Sets text for the note.
"""
self.q(css=self._bounded_selector(".annotator-item textarea")).first.fill(value)
| agpl-3.0 | 6,007,041,009,201,993,000 | 28.362963 | 110 | 0.593718 | false |
redhat-openstack/nova | nova/api/openstack/compute/schemas/v3/flavor_access.py | 110 | 1710 | # Copyright 2013 NEC Corporation. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
add_tenant_access = {
'type': 'object',
'properties': {
'addTenantAccess': {
'type': 'object',
'properties': {
'tenant': {
# defined from project_id in instance_type_projects table
'type': 'string', 'minLength': 1, 'maxLength': 255,
},
},
'required': ['tenant'],
'additionalProperties': False,
},
},
'required': ['addTenantAccess'],
'additionalProperties': False,
}
remove_tenant_access = {
'type': 'object',
'properties': {
'removeTenantAccess': {
'type': 'object',
'properties': {
'tenant': {
# defined from project_id in instance_type_projects table
'type': 'string', 'minLength': 1, 'maxLength': 255,
},
},
'required': ['tenant'],
'additionalProperties': False,
},
},
'required': ['removeTenantAccess'],
'additionalProperties': False,
}
| apache-2.0 | -4,237,578,269,523,983,000 | 31.884615 | 78 | 0.55731 | false |
ericzhou2008/zulip | api/integrations/perforce/zulip_change-commit.py | 114 | 2744 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright © 2012-2014 Zulip, Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
'''Zulip notification change-commit hook.
In Perforce, The "change-commit" trigger is fired after a metadata has been
created, files have been transferred, and the changelist comitted to the depot
database.
This specific trigger expects command-line arguments in the form:
%change% %changeroot%
For example:
1234 //depot/security/src/
'''
import os
import sys
import os.path
import git_p4
__version__ = "0.1"
sys.path.insert(0, os.path.dirname(__file__))
import zulip_perforce_config as config
if config.ZULIP_API_PATH is not None:
sys.path.append(config.ZULIP_API_PATH)
import zulip
client = zulip.Client(
email=config.ZULIP_USER,
site=config.ZULIP_SITE,
api_key=config.ZULIP_API_KEY,
client="ZulipPerforce/" + __version__)
try:
changelist = int(sys.argv[1])
changeroot = sys.argv[2]
except IndexError:
print >> sys.stderr, "Wrong number of arguments.\n\n",
print >> sys.stderr, __doc__
sys.exit(-1)
except ValueError:
print >> sys.stderr, "First argument must be an integer.\n\n",
print >> sys.stderr, __doc__
sys.exit(-1)
metadata = git_p4.p4_describe(changelist)
destination = config.commit_notice_destination(changeroot, changelist)
if destination is None:
# Don't forward the notice anywhere
sys.exit(0)
message = """**{0}** committed revision @{1} to `{2}`.
> {3}
""".format(metadata["user"], metadata["change"], changeroot, metadata["desc"])
message_data = {
"type": "stream",
"to": destination["stream"],
"subject": destination["subject"],
"content": message,
}
client.send_message(message_data)
| apache-2.0 | 8,180,969,851,187,401,000 | 30.170455 | 79 | 0.720744 | false |
redhat-openstack/neutron | neutron/plugins/mlnx/agent/eswitch_neutron_agent.py | 8 | 17266 | # Copyright 2013 Mellanox Technologies, Ltd
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
import sys
import time
import eventlet
eventlet.monkey_patch()
from oslo.config import cfg
from neutron.agent import rpc as agent_rpc
from neutron.agent import securitygroups_rpc as sg_rpc
from neutron.common import config as common_config
from neutron.common import constants as q_constants
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.common import utils as q_utils
from neutron import context
from neutron.openstack.common.gettextutils import _LE, _LI
from neutron.openstack.common import log as logging
from neutron.openstack.common import loopingcall
from neutron.plugins.common import constants as p_const
from neutron.plugins.mlnx.agent import utils
from neutron.plugins.mlnx.common import config # noqa
from neutron.plugins.mlnx.common import exceptions
LOG = logging.getLogger(__name__)
class EswitchManager(object):
def __init__(self, interface_mappings, endpoint, timeout):
self.utils = utils.EswitchUtils(endpoint, timeout)
self.interface_mappings = interface_mappings
self.network_map = {}
self.utils.define_fabric_mappings(interface_mappings)
def get_port_id_by_mac(self, port_mac):
for network_id, data in self.network_map.iteritems():
for port in data['ports']:
if port['port_mac'] == port_mac:
return port['port_id']
err_msg = _("Agent cache inconsistency - port id "
"is not stored for %s") % port_mac
LOG.error(err_msg)
raise exceptions.MlnxException(err_msg=err_msg)
def get_vnics_mac(self):
return set(self.utils.get_attached_vnics().keys())
def vnic_port_exists(self, port_mac):
return port_mac in self.utils.get_attached_vnics()
def remove_network(self, network_id):
if network_id in self.network_map:
del self.network_map[network_id]
else:
LOG.debug(_("Network %s not defined on Agent."), network_id)
def port_down(self, network_id, physical_network, port_mac):
"""Sets port to down.
Check internal network map for port data.
If port exists set port to Down
"""
for network_id, data in self.network_map.iteritems():
for port in data['ports']:
if port['port_mac'] == port_mac:
self.utils.port_down(physical_network, port_mac)
return
LOG.info(_('Network %s is not available on this agent'), network_id)
def port_up(self, network_id, network_type,
physical_network, seg_id, port_id, port_mac):
"""Sets port to up.
Update internal network map with port data.
- Check if vnic defined
- configure eswitch vport
- set port to Up
"""
LOG.debug(_("Connecting port %s"), port_id)
if network_id not in self.network_map:
self.provision_network(port_id, port_mac,
network_id, network_type,
physical_network, seg_id)
net_map = self.network_map[network_id]
net_map['ports'].append({'port_id': port_id, 'port_mac': port_mac})
if network_type == p_const.TYPE_VLAN:
LOG.info(_('Binding Segmentation ID %(seg_id)s'
'to eSwitch for vNIC mac_address %(mac)s'),
{'seg_id': seg_id,
'mac': port_mac})
self.utils.set_port_vlan_id(physical_network,
seg_id,
port_mac)
self.utils.port_up(physical_network, port_mac)
else:
LOG.error(_('Unsupported network type %s'), network_type)
def port_release(self, port_mac):
"""Clear port configuration from eSwitch."""
for network_id, net_data in self.network_map.iteritems():
for port in net_data['ports']:
if port['port_mac'] == port_mac:
self.utils.port_release(net_data['physical_network'],
port['port_mac'])
return
LOG.info(_('Port_mac %s is not available on this agent'), port_mac)
def provision_network(self, port_id, port_mac,
network_id, network_type,
physical_network, segmentation_id):
LOG.info(_("Provisioning network %s"), network_id)
if network_type == p_const.TYPE_VLAN:
LOG.debug(_("Creating VLAN Network"))
else:
LOG.error(_("Unknown network type %(network_type)s "
"for network %(network_id)s"),
{'network_type': network_type,
'network_id': network_id})
return
data = {
'physical_network': physical_network,
'network_type': network_type,
'ports': [],
'vlan_id': segmentation_id}
self.network_map[network_id] = data
class MlnxEswitchRpcCallbacks(n_rpc.RpcCallback,
sg_rpc.SecurityGroupAgentRpcCallbackMixin):
# Set RPC API version to 1.0 by default.
# history
# 1.1 Support Security Group RPC
RPC_API_VERSION = '1.1'
def __init__(self, context, agent):
super(MlnxEswitchRpcCallbacks, self).__init__()
self.context = context
self.agent = agent
self.eswitch = agent.eswitch
self.sg_agent = agent
def network_delete(self, context, **kwargs):
LOG.debug(_("network_delete received"))
network_id = kwargs.get('network_id')
if not network_id:
LOG.warning(_("Invalid Network ID, cannot remove Network"))
else:
LOG.debug(_("Delete network %s"), network_id)
self.eswitch.remove_network(network_id)
def port_update(self, context, **kwargs):
port = kwargs.get('port')
self.agent.add_port_update(port['mac_address'])
LOG.debug("port_update message processed for port with mac %s",
port['mac_address'])
class MlnxEswitchPluginApi(agent_rpc.PluginApi,
sg_rpc.SecurityGroupServerRpcApiMixin):
pass
class MlnxEswitchNeutronAgent(sg_rpc.SecurityGroupAgentRpcMixin):
# Set RPC API version to 1.0 by default.
#RPC_API_VERSION = '1.0'
def __init__(self, interface_mapping):
self._polling_interval = cfg.CONF.AGENT.polling_interval
self._setup_eswitches(interface_mapping)
configurations = {'interface_mappings': interface_mapping}
self.agent_state = {
'binary': 'neutron-mlnx-agent',
'host': cfg.CONF.host,
'topic': q_constants.L2_AGENT_TOPIC,
'configurations': configurations,
'agent_type': q_constants.AGENT_TYPE_MLNX,
'start_flag': True}
# Stores port update notifications for processing in main rpc loop
self.updated_ports = set()
self._setup_rpc()
self.init_firewall()
def _setup_eswitches(self, interface_mapping):
daemon = cfg.CONF.ESWITCH.daemon_endpoint
timeout = cfg.CONF.ESWITCH.request_timeout
self.eswitch = EswitchManager(interface_mapping, daemon, timeout)
def _report_state(self):
try:
devices = len(self.eswitch.get_vnics_mac())
self.agent_state.get('configurations')['devices'] = devices
self.state_rpc.report_state(self.context,
self.agent_state)
self.agent_state.pop('start_flag', None)
except Exception:
LOG.exception(_("Failed reporting state!"))
def _setup_rpc(self):
self.agent_id = 'mlnx-agent.%s' % socket.gethostname()
LOG.info(_("RPC agent_id: %s"), self.agent_id)
self.topic = topics.AGENT
self.plugin_rpc = MlnxEswitchPluginApi(topics.PLUGIN)
self.state_rpc = agent_rpc.PluginReportStateAPI(topics.PLUGIN)
# RPC network init
self.context = context.get_admin_context_without_session()
# Handle updates from service
self.endpoints = [MlnxEswitchRpcCallbacks(self.context, self)]
# Define the listening consumers for the agent
consumers = [[topics.PORT, topics.UPDATE],
[topics.NETWORK, topics.DELETE],
[topics.SECURITY_GROUP, topics.UPDATE]]
self.connection = agent_rpc.create_consumers(self.endpoints,
self.topic,
consumers)
report_interval = cfg.CONF.AGENT.report_interval
if report_interval:
heartbeat = loopingcall.FixedIntervalLoopingCall(
self._report_state)
heartbeat.start(interval=report_interval)
def add_port_update(self, port):
self.updated_ports.add(port)
def scan_ports(self, previous, sync):
cur_ports = self.eswitch.get_vnics_mac()
port_info = {'current': cur_ports}
updated_ports = self.updated_ports
self.updated_ports = set()
if sync:
# Either it's the first iteration or previous iteration had
# problems.
port_info['added'] = cur_ports
port_info['removed'] = ((previous['removed'] | previous['current'])
- cur_ports)
port_info['updated'] = ((previous['updated'] | updated_ports)
& cur_ports)
else:
# Shouldn't process updates for not existing ports
port_info['added'] = cur_ports - previous['current']
port_info['removed'] = previous['current'] - cur_ports
port_info['updated'] = updated_ports & cur_ports
return port_info
def process_network_ports(self, port_info):
resync_a = False
resync_b = False
device_added_updated = port_info['added'] | port_info['updated']
if device_added_updated:
resync_a = self.treat_devices_added_or_updated(
device_added_updated)
if port_info['removed']:
resync_b = self.treat_devices_removed(port_info['removed'])
# If one of the above opertaions fails => resync with plugin
return (resync_a | resync_b)
def treat_vif_port(self, port_id, port_mac,
network_id, network_type,
physical_network, segmentation_id,
admin_state_up):
if self.eswitch.vnic_port_exists(port_mac):
if admin_state_up:
self.eswitch.port_up(network_id,
network_type,
physical_network,
segmentation_id,
port_id,
port_mac)
else:
self.eswitch.port_down(network_id, physical_network, port_mac)
else:
LOG.debug(_("No port %s defined on agent."), port_id)
def treat_devices_added_or_updated(self, devices):
try:
devs_details_list = self.plugin_rpc.get_devices_details_list(
self.context,
devices,
self.agent_id)
except Exception as e:
LOG.debug("Unable to get device details for devices "
"with MAC address %(devices)s: due to %(exc)s",
{'devices': devices, 'exc': e})
# resync is needed
return True
for dev_details in devs_details_list:
device = dev_details['device']
LOG.info(_("Adding or updating port with mac %s"), device)
if 'port_id' in dev_details:
LOG.info(_("Port %s updated"), device)
LOG.debug("Device details %s", str(dev_details))
self.treat_vif_port(dev_details['port_id'],
dev_details['device'],
dev_details['network_id'],
dev_details['network_type'],
dev_details['physical_network'],
dev_details['segmentation_id'],
dev_details['admin_state_up'])
if dev_details.get('admin_state_up'):
LOG.debug("Setting status for %s to UP", device)
self.plugin_rpc.update_device_up(
self.context, device, self.agent_id)
else:
LOG.debug("Setting status for %s to DOWN", device)
self.plugin_rpc.update_device_down(
self.context, device, self.agent_id)
else:
LOG.debug("Device with mac_address %s not defined "
"on Neutron Plugin", device)
return False
def treat_devices_removed(self, devices):
resync = False
for device in devices:
LOG.info(_("Removing device with mac_address %s"), device)
try:
port_id = self.eswitch.get_port_id_by_mac(device)
dev_details = self.plugin_rpc.update_device_down(self.context,
port_id,
self.agent_id,
cfg.CONF.host)
except Exception as e:
LOG.debug(_("Removing port failed for device %(device)s "
"due to %(exc)s"), {'device': device, 'exc': e})
resync = True
continue
if dev_details['exists']:
LOG.info(_("Port %s updated."), device)
else:
LOG.debug(_("Device %s not defined on plugin"), device)
self.eswitch.port_release(device)
return resync
def _port_info_has_changes(self, port_info):
return (port_info['added'] or
port_info['removed'] or
port_info['updated'])
def daemon_loop(self):
LOG.info(_("eSwitch Agent Started!"))
sync = True
port_info = {'current': set(),
'added': set(),
'removed': set(),
'updated': set()}
while True:
start = time.time()
try:
port_info = self.scan_ports(previous=port_info, sync=sync)
except exceptions.RequestTimeout:
LOG.exception(_("Request timeout in agent event loop "
"eSwitchD is not responding - exiting..."))
raise SystemExit(1)
if sync:
LOG.info(_LI("Agent out of sync with plugin!"))
sync = False
if self._port_info_has_changes(port_info):
LOG.debug("Starting to process devices in:%s", port_info)
try:
sync = self.process_network_ports(port_info)
except Exception:
LOG.exception(_LE("Error in agent event loop"))
sync = True
# sleep till end of polling interval
elapsed = (time.time() - start)
if (elapsed < self._polling_interval):
time.sleep(self._polling_interval - elapsed)
else:
LOG.debug(_("Loop iteration exceeded interval "
"(%(polling_interval)s vs. %(elapsed)s)"),
{'polling_interval': self._polling_interval,
'elapsed': elapsed})
def main():
common_config.init(sys.argv[1:])
common_config.setup_logging()
try:
interface_mappings = q_utils.parse_mappings(
cfg.CONF.ESWITCH.physical_interface_mappings)
except ValueError as e:
LOG.error(_("Parsing physical_interface_mappings failed: %s."
" Agent terminated!"), e)
sys.exit(1)
LOG.info(_("Interface mappings: %s"), interface_mappings)
try:
agent = MlnxEswitchNeutronAgent(interface_mappings)
except Exception as e:
LOG.error(_("Failed on Agent initialisation : %s."
" Agent terminated!"), e)
sys.exit(1)
# Start everything.
LOG.info(_("Agent initialised successfully, now running... "))
agent.daemon_loop()
sys.exit(0)
if __name__ == '__main__':
main()
| apache-2.0 | 4,030,902,914,062,779,000 | 39.530516 | 79 | 0.548998 | false |
boundary/boundary-plugin-aws-redshift | boundary_aws_plugin/boundary_plugin.py | 8 | 4162 | from __future__ import (absolute_import, division, print_function, unicode_literals)
import logging
import datetime
import time
import socket
import json
import multiprocessing
from contextlib import contextmanager
import sys
import os
HOSTNAME = socket.gethostname()
metric_log_file = None
plugin_params = None
keepalive_process = None
keepalive_lock = None
"""
If the plugin doesn't generate any output for 30 seconds (hard-coded), the
Boundary Relay thinks we're dead and kills us. Because we may not have any
data to output for much longer than that, we workaround this by outputting
a bogus metric every so often. This constant controls the delay between
bogus metrics; it should be significantly less than 30 seconds to prevent
any timing issues.
"""
KEEPALIVE_INTERVAL = 15
def log_metrics_to_file(filename):
"""
Logs all reported metrics to a file for debugging purposes.
@param filename File name to log to; specify None to disable logging.
"""
global metric_log_file
metric_log_file = filename
def unix_time(dt):
epoch = datetime.datetime.utcfromtimestamp(0)
delta = dt - epoch
return delta.days * 86400 + delta.seconds + delta.microseconds / 1e6
def unix_time_millis(dt):
return unix_time(dt) * 1000.0
@contextmanager
def maybe_lock(lock):
if lock: lock.acquire()
yield
if lock: lock.release()
return
def boundary_report_metric(name, value, source=None, timestamp=None):
"""
Reports a metric to the Boundary relay.
@param name Metric name, as defined in the plugin's plugin.json file.
@param value Metric value, should be a number.
@param source Metric source. Defaults to the machine's hostname.
@param timestamp Timestamp of the metric as a Python datetime object. Defaults to none
(Boundary uses the current time in that case).
"""
with maybe_lock(keepalive_lock) as _:
source = source or HOSTNAME
if timestamp:
timestamp = unix_time_millis(timestamp)
out = "%s %s %s%s" % (name, value, source, (' %d' % timestamp) if timestamp else '')
print(out)
# Flush stdout before we release the lock so output doesn't get intermixed
sys.stdout.flush()
global metric_log_file
if metric_log_file:
with open(metric_log_file, 'a') as f:
f.write(out + "\n")
def report_alive():
"""
Reports a bogus metric just so the Boundary Relay doesn't think we're dead.
See notes on KEEPALIVE_INTERVAL for more information.
"""
boundary_report_metric('BOGUS_METRIC', 0)
def parse_params():
"""
Parses and returns the contents of the plugin's "param.json" file.
"""
global plugin_params
if not plugin_params:
with open('param.json') as f:
plugin_params = json.loads(f.read())
return plugin_params
def sleep_interval():
"""
Sleeps for the plugin's poll interval, as configured in the plugin's parameters.
"""
params = parse_params()
time.sleep(float(params.get("pollInterval", 1000) / 1000))
def __keepalive_process_main(parent_pid):
# Workaround: on Linux, the Boundary Relay's sends SIGTERM to kill the plugin, which kills the main process but
# doesn't kill the keepalive process. We work around this by identifying that our parent has died (and
# accordingly, our parent is now init) and killing ourselves.
# Note that os.getppid() doesn't exist on Windows, hence the getattr workaround.
while parent_pid == getattr(os, 'getppid', lambda: parent_pid)():
report_alive()
time.sleep(KEEPALIVE_INTERVAL)
def start_keepalive_subprocess():
"""
Starts the subprocess that keeps us alive by reporting bogus metrics.
This function should be called only once on plugin startup.
See notes on KEEPALIVE_INTERVAL for more information.
"""
global keepalive_lock, keepalive_process
assert not keepalive_lock and not keepalive_process
keepalive_lock = multiprocessing.Lock()
keepalive_process = multiprocessing.Process(target=__keepalive_process_main, args=(os.getpid(),))
keepalive_process.start()
| apache-2.0 | 1,283,864,263,078,800,400 | 31.263566 | 115 | 0.694137 | false |
musicrighter/CIS422-P2 | env/lib/python3.4/site-packages/pymongo/periodic_executor.py | 16 | 4679 | # Copyright 2014-2015 MongoDB, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you
# may not use this file except in compliance with the License. You
# may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied. See the License for the specific language governing
# permissions and limitations under the License.
"""Run a target function on a background thread."""
import atexit
import threading
import time
import weakref
from pymongo import thread_util
from pymongo.monotonic import time as _time
class PeriodicExecutor(object):
def __init__(self, condition_class, interval, min_interval, target):
""""Run a target function periodically on a background thread.
If the target's return value is false, the executor stops.
:Parameters:
- `condition_class`: A class like threading.Condition.
- `interval`: Seconds between calls to `target`.
- `min_interval`: Minimum seconds between calls if `wake` is
called very often.
- `target`: A function.
"""
self._event = thread_util.Event(condition_class)
self._interval = interval
self._min_interval = min_interval
self._target = target
self._stopped = False
self._thread = None
def open(self):
"""Start. Multiple calls have no effect.
Not safe to call from multiple threads at once.
"""
self._stopped = False
started = False
try:
started = self._thread and self._thread.is_alive()
except ReferenceError:
# Thread terminated.
pass
if not started:
thread = threading.Thread(target=self._run)
thread.daemon = True
self._thread = weakref.proxy(thread)
_register_executor(self)
thread.start()
def close(self, dummy=None):
"""Stop. To restart, call open().
The dummy parameter allows an executor's close method to be a weakref
callback; see monitor.py.
Since this can be called from a weakref callback during garbage
collection it must take no locks! That means it cannot call wake().
"""
self._stopped = True
def join(self, timeout=None):
if self._thread is not None:
try:
self._thread.join(timeout)
except ReferenceError:
# Thread already terminated.
pass
def wake(self):
"""Execute the target function soon."""
self._event.set()
def _run(self):
while not self._stopped:
try:
if not self._target():
self._stopped = True
break
except:
self._stopped = True
raise
deadline = _time() + self._interval
# Avoid running too frequently if wake() is called very often.
time.sleep(self._min_interval)
# Until the deadline, wake often to check if close() was called.
while not self._stopped and _time() < deadline:
# Our Event's wait returns True if set, else False.
if self._event.wait(0.1):
# Someone called wake().
break
self._event.clear()
# _EXECUTORS has a weakref to each running PeriodicExecutor. Once started,
# an executor is kept alive by a strong reference from its thread and perhaps
# from other objects. When the thread dies and all other referrers are freed,
# the executor is freed and removed from _EXECUTORS. If any threads are
# running when the interpreter begins to shut down, we try to halt and join
# them to avoid spurious errors.
_EXECUTORS = set()
def _register_executor(executor):
ref = weakref.ref(executor, _on_executor_deleted)
_EXECUTORS.add(ref)
def _on_executor_deleted(ref):
_EXECUTORS.remove(ref)
def _shutdown_executors():
# Copy the set. Stopping threads has the side effect of removing executors.
executors = list(_EXECUTORS)
# First signal all executors to close...
for ref in executors:
executor = ref()
if executor:
executor.close()
# ...then try to join them.
for ref in executors:
executor = ref()
if executor:
executor.join(1)
executor = None
atexit.register(_shutdown_executors)
| artistic-2.0 | 2,810,738,558,260,062,000 | 30.193333 | 79 | 0.61573 | false |
nexiles/odoo | addons/l10n_be_coda/wizard/account_coda_import.py | 255 | 24127 | # -*- encoding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
#
# Copyright (c) 2012 Noviat nv/sa (www.noviat.be). All rights reserved.
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import base64
import time
from openerp.osv import fields, osv
from openerp.tools.translate import _
from openerp import tools
import logging
_logger = logging.getLogger(__name__)
class account_coda_import(osv.osv_memory):
_name = 'account.coda.import'
_description = 'Import CODA File'
_columns = {
'coda_data': fields.binary('CODA File', required=True),
'coda_fname': fields.char('CODA Filename', required=True),
'note': fields.text('Log'),
}
_defaults = {
'coda_fname': 'coda.txt',
}
def coda_parsing(self, cr, uid, ids, context=None, batch=False, codafile=None, codafilename=None):
if context is None:
context = {}
if batch:
codafile = str(codafile)
codafilename = codafilename
else:
data = self.browse(cr, uid, ids)[0]
try:
codafile = data.coda_data
codafilename = data.coda_fname
except:
raise osv.except_osv(_('Error'), _('Wizard in incorrect state. Please hit the Cancel button'))
return {}
recordlist = unicode(base64.decodestring(codafile), 'windows-1252', 'strict').split('\n')
statements = []
globalisation_comm = {}
for line in recordlist:
if not line:
pass
elif line[0] == '0':
#Begin of a new Bank statement
statement = {}
statements.append(statement)
statement['version'] = line[127]
if statement['version'] not in ['1', '2']:
raise osv.except_osv(_('Error') + ' R001', _('CODA V%s statements are not supported, please contact your bank') % statement['version'])
statement['globalisation_stack'] = []
statement['lines'] = []
statement['date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[5:11]), '%d%m%y'))
statement['separateApplication'] = rmspaces(line[83:88])
elif line[0] == '1':
#Statement details
if statement['version'] == '1':
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif statement['version'] == '2':
if line[1] == '0': # Belgian bank account BBAN structure
statement['acc_number'] = rmspaces(line[5:17])
statement['currency'] = rmspaces(line[18:21])
elif line[1] == '1': # foreign bank account BBAN structure
raise osv.except_osv(_('Error') + ' R1001', _('Foreign bank accounts with BBAN structure are not supported '))
elif line[1] == '2': # Belgian bank account IBAN structure
statement['acc_number'] = rmspaces(line[5:21])
statement['currency'] = rmspaces(line[39:42])
elif line[1] == '3': # foreign bank account IBAN structure
raise osv.except_osv(_('Error') + ' R1002', _('Foreign bank accounts with IBAN structure are not supported '))
else: # Something else, not supported
raise osv.except_osv(_('Error') + ' R1003', _('Unsupported bank account structure '))
statement['journal_id'] = False
statement['bank_account'] = False
# Belgian Account Numbers are composed of 12 digits.
# In OpenERP, the user can fill the bank number in any format: With or without IBan code, with or without spaces, with or without '-'
# The two following sql requests handle those cases.
if len(statement['acc_number']) >= 12:
# If the Account Number is >= 12 digits, it is mostlikely a Belgian Account Number (With or without IBAN).
# The following request try to find the Account Number using a 'like' operator.
# So, if the Account Number is stored with IBAN code, it can be found thanks to this.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') like %s", ('%' + statement['acc_number'] + '%',))
else:
# This case is necessary to avoid cases like the Account Number in the CODA file is set to a single or few digits,
# and so a 'like' operator would return the first account number in the database which matches.
cr.execute("select id from res_partner_bank where replace(replace(acc_number,' ',''),'-','') = %s", (statement['acc_number'],))
bank_ids = [id[0] for id in cr.fetchall()]
# Filter bank accounts which are not allowed
bank_ids = self.pool.get('res.partner.bank').search(cr, uid, [('id', 'in', bank_ids)])
if bank_ids and len(bank_ids) > 0:
bank_accs = self.pool.get('res.partner.bank').browse(cr, uid, bank_ids)
for bank_acc in bank_accs:
if bank_acc.journal_id.id and ((bank_acc.journal_id.currency.id and bank_acc.journal_id.currency.name == statement['currency']) or (not bank_acc.journal_id.currency.id and bank_acc.journal_id.company_id.currency_id.name == statement['currency'])):
statement['journal_id'] = bank_acc.journal_id
statement['bank_account'] = bank_acc
break
if not statement['bank_account']:
raise osv.except_osv(_('Error') + ' R1004', _("No matching Bank Account (with Account Journal) found.\n\nPlease set-up a Bank Account with as Account Number '%s' and as Currency '%s' and an Account Journal.") % (statement['acc_number'], statement['currency']))
statement['description'] = rmspaces(line[90:125])
statement['balance_start'] = float(rmspaces(line[43:58])) / 1000
if line[42] == '1': #1 = Debit, the starting balance is negative
statement['balance_start'] = - statement['balance_start']
statement['balance_start_date'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[58:64]), '%d%m%y'))
statement['accountHolder'] = rmspaces(line[64:90])
statement['paperSeqNumber'] = rmspaces(line[2:5])
statement['codaSeqNumber'] = rmspaces(line[125:128])
elif line[0] == '2':
if line[1] == '1':
#New statement line
statementLine = {}
statementLine['ref'] = rmspaces(line[2:10])
statementLine['ref_move'] = rmspaces(line[2:6])
statementLine['ref_move_detail'] = rmspaces(line[6:10])
statementLine['sequence'] = len(statement['lines']) + 1
statementLine['transactionRef'] = rmspaces(line[10:31])
statementLine['debit'] = line[31] # 0 = Credit, 1 = Debit
statementLine['amount'] = float(rmspaces(line[32:47])) / 1000
if statementLine['debit'] == '1':
statementLine['amount'] = - statementLine['amount']
statementLine['transactionDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[47:53]), '%d%m%y'))
statementLine['transaction_family'] = rmspaces(line[54:56])
statementLine['transaction_code'] = rmspaces(line[56:58])
statementLine['transaction_category'] = rmspaces(line[58:61])
if line[61] == '1':
#Structured communication
statementLine['communication_struct'] = True
statementLine['communication_type'] = line[62:65]
statementLine['communication'] = '+++' + line[65:68] + '/' + line[68:72] + '/' + line[72:77] + '+++'
else:
#Non-structured communication
statementLine['communication_struct'] = False
statementLine['communication'] = rmspaces(line[62:115])
statementLine['entryDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[115:121]), '%d%m%y'))
statementLine['type'] = 'normal'
statementLine['globalisation'] = int(line[124])
if statementLine['globalisation'] > 0:
if statementLine['globalisation'] in statement['globalisation_stack']:
statement['globalisation_stack'].remove(statementLine['globalisation'])
else:
statementLine['type'] = 'globalisation'
statement['globalisation_stack'].append(statementLine['globalisation'])
globalisation_comm[statementLine['ref_move']] = statementLine['communication']
if not statementLine.get('communication'):
statementLine['communication'] = globalisation_comm.get(statementLine['ref_move'], '')
statement['lines'].append(statementLine)
elif line[1] == '2':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2004', _('CODA parsing error on movement data record 2.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:63])
statement['lines'][-1]['payment_reference'] = rmspaces(line[63:98])
statement['lines'][-1]['counterparty_bic'] = rmspaces(line[98:109])
elif line[1] == '3':
if statement['lines'][-1]['ref'][0:4] != line[2:6]:
raise osv.except_osv(_('Error') + 'R2005', _('CODA parsing error on movement data record 2.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
if statement['version'] == '1':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:73])
statement['lines'][-1]['counterpartyAddress'] = rmspaces(line[73:125])
statement['lines'][-1]['counterpartyCurrency'] = ''
else:
if line[22] == ' ':
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:22])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[23:26])
else:
statement['lines'][-1]['counterpartyNumber'] = rmspaces(line[10:44])
statement['lines'][-1]['counterpartyCurrency'] = rmspaces(line[44:47])
statement['lines'][-1]['counterpartyName'] = rmspaces(line[47:82])
statement['lines'][-1]['communication'] += rmspaces(line[82:125])
else:
# movement data record 2.x (x != 1,2,3)
raise osv.except_osv(_('Error') + 'R2006', _('\nMovement data records of type 2.%s are not supported ') % line[1])
elif line[0] == '3':
if line[1] == '1':
infoLine = {}
infoLine['entryDate'] = statement['lines'][-1]['entryDate']
infoLine['type'] = 'information'
infoLine['sequence'] = len(statement['lines']) + 1
infoLine['ref'] = rmspaces(line[2:10])
infoLine['transactionRef'] = rmspaces(line[10:31])
infoLine['transaction_family'] = rmspaces(line[32:34])
infoLine['transaction_code'] = rmspaces(line[34:36])
infoLine['transaction_category'] = rmspaces(line[36:39])
infoLine['communication'] = rmspaces(line[40:113])
statement['lines'].append(infoLine)
elif line[1] == '2':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3004', _('CODA parsing error on information data record 3.2, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[1] == '3':
if infoLine['ref'] != rmspaces(line[2:10]):
raise osv.except_osv(_('Error') + 'R3005', _('CODA parsing error on information data record 3.3, seq nr %s! Please report this issue via your Odoo support channel.') % line[2:10])
statement['lines'][-1]['communication'] += rmspaces(line[10:100])
elif line[0] == '4':
comm_line = {}
comm_line['type'] = 'communication'
comm_line['sequence'] = len(statement['lines']) + 1
comm_line['ref'] = rmspaces(line[2:10])
comm_line['communication'] = rmspaces(line[32:112])
statement['lines'].append(comm_line)
elif line[0] == '8':
# new balance record
statement['debit'] = line[41]
statement['paperSeqNumber'] = rmspaces(line[1:4])
statement['balance_end_real'] = float(rmspaces(line[42:57])) / 1000
statement['balance_end_realDate'] = time.strftime(tools.DEFAULT_SERVER_DATE_FORMAT, time.strptime(rmspaces(line[57:63]), '%d%m%y'))
if statement['debit'] == '1': # 1=Debit
statement['balance_end_real'] = - statement['balance_end_real']
if statement['balance_end_realDate']:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['balance_end_realDate']), ('date_stop', '>=', statement['balance_end_realDate'])])
else:
period_id = self.pool.get('account.period').search(cr, uid, [('company_id', '=', statement['journal_id'].company_id.id), ('date_start', '<=', statement['date']), ('date_stop', '>=', statement['date'])])
if not period_id and len(period_id) == 0:
raise osv.except_osv(_('Error') + 'R0002', _("The CODA Statement New Balance date doesn't fall within a defined Accounting Period! Please create the Accounting Period for date %s for the company %s.") % (statement['balance_end_realDate'], statement['journal_id'].company_id.name))
statement['period_id'] = period_id[0]
elif line[0] == '9':
statement['balanceMin'] = float(rmspaces(line[22:37])) / 1000
statement['balancePlus'] = float(rmspaces(line[37:52])) / 1000
if not statement.get('balance_end_real'):
statement['balance_end_real'] = statement['balance_start'] + statement['balancePlus'] - statement['balanceMin']
for i, statement in enumerate(statements):
statement['coda_note'] = ''
balance_start_check_date = (len(statement['lines']) > 0 and statement['lines'][0]['entryDate']) or statement['date']
cr.execute('SELECT balance_end_real \
FROM account_bank_statement \
WHERE journal_id = %s and date <= %s \
ORDER BY date DESC,id DESC LIMIT 1', (statement['journal_id'].id, balance_start_check_date))
res = cr.fetchone()
balance_start_check = res and res[0]
if balance_start_check == None:
if statement['journal_id'].default_debit_account_id and (statement['journal_id'].default_credit_account_id == statement['journal_id'].default_debit_account_id):
balance_start_check = statement['journal_id'].default_debit_account_id.balance
else:
raise osv.except_osv(_('Error'), _("Configuration Error in journal %s!\nPlease verify the Default Debit and Credit Account settings.") % statement['journal_id'].name)
if balance_start_check != statement['balance_start']:
statement['coda_note'] = _("The CODA Statement %s Starting Balance (%.2f) does not correspond with the previous Closing Balance (%.2f) in journal %s!") % (statement['description'] + ' #' + statement['paperSeqNumber'], statement['balance_start'], balance_start_check, statement['journal_id'].name)
if not(statement.get('period_id')):
raise osv.except_osv(_('Error') + ' R3006', _(' No transactions or no period in coda file !'))
data = {
'name': statement['paperSeqNumber'],
'date': statement['date'],
'journal_id': statement['journal_id'].id,
'period_id': statement['period_id'],
'balance_start': statement['balance_start'],
'balance_end_real': statement['balance_end_real'],
}
statement['id'] = self.pool.get('account.bank.statement').create(cr, uid, data, context=context)
for line in statement['lines']:
if line['type'] == 'information':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Date: ' + str(line['entryDate']), 'Communication: ' + line['communication'], ''])
elif line['type'] == 'communication':
statement['coda_note'] = "\n".join([statement['coda_note'], line['type'].title() + ' with Ref. ' + str(line['ref']), 'Ref: ', 'Communication: ' + line['communication'], ''])
elif line['type'] == 'normal':
note = []
if 'counterpartyName' in line and line['counterpartyName'] != '':
note.append(_('Counter Party') + ': ' + line['counterpartyName'])
else:
line['counterpartyName'] = False
if 'counterpartyNumber' in line and line['counterpartyNumber'] != '':
try:
if int(line['counterpartyNumber']) == 0:
line['counterpartyNumber'] = False
except:
pass
if line['counterpartyNumber']:
note.append(_('Counter Party Account') + ': ' + line['counterpartyNumber'])
else:
line['counterpartyNumber'] = False
if 'counterpartyAddress' in line and line['counterpartyAddress'] != '':
note.append(_('Counter Party Address') + ': ' + line['counterpartyAddress'])
partner_id = None
structured_com = False
bank_account_id = False
if line['communication_struct'] and 'communication_type' in line and line['communication_type'] == '101':
structured_com = line['communication']
if 'counterpartyNumber' in line and line['counterpartyNumber']:
account = str(line['counterpartyNumber'])
domain = [('acc_number', '=', account)]
iban = account[0:2].isalpha()
if iban:
n = 4
space_separated_account = ' '.join(account[i:i + n] for i in range(0, len(account), n))
domain = ['|', ('acc_number', '=', space_separated_account)] + domain
ids = self.pool.get('res.partner.bank').search(cr, uid, domain)
if ids:
bank_account_id = ids[0]
bank_account = self.pool.get('res.partner.bank').browse(cr, uid, bank_account_id, context=context)
line['counterpartyNumber'] = bank_account.acc_number
partner_id = bank_account.partner_id.id
else:
#create the bank account, not linked to any partner. The reconciliation will link the partner manually
#chosen at the bank statement final confirmation time.
try:
type_model, type_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'base', 'bank_normal')
type_id = self.pool.get('res.partner.bank.type').browse(cr, uid, type_id, context=context)
bank_code = type_id.code
except ValueError:
bank_code = 'bank'
bank_account_id = self.pool.get('res.partner.bank').create(cr, uid, {'acc_number': str(line['counterpartyNumber']), 'state': bank_code}, context=context)
if line.get('communication', ''):
note.append(_('Communication') + ': ' + line['communication'])
data = {
'name': structured_com or (line.get('communication', '') != '' and line['communication'] or '/'),
'note': "\n".join(note),
'date': line['entryDate'],
'amount': line['amount'],
'partner_id': partner_id,
'partner_name': line['counterpartyName'],
'statement_id': statement['id'],
'ref': line['ref'],
'sequence': line['sequence'],
'bank_account_id': bank_account_id,
}
self.pool.get('account.bank.statement.line').create(cr, uid, data, context=context)
if statement['coda_note'] != '':
self.pool.get('account.bank.statement').write(cr, uid, [statement['id']], {'coda_note': statement['coda_note']}, context=context)
model, action_id = self.pool.get('ir.model.data').get_object_reference(cr, uid, 'account', 'action_bank_reconcile_bank_statements')
action = self.pool[model].browse(cr, uid, action_id, context=context)
statements_ids = [statement['id'] for statement in statements]
return {
'name': action.name,
'tag': action.tag,
'context': {'statement_ids': statements_ids},
'type': 'ir.actions.client',
}
def rmspaces(s):
return " ".join(s.split())
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | -3,626,367,692,760,846,000 | 67.155367 | 312 | 0.527915 | false |
JioCloud/tempest | tempest/api/network/test_routers_negative.py | 11 | 5926 | # Copyright 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import netaddr
from tempest_lib.common.utils import data_utils
from tempest_lib import exceptions as lib_exc
from tempest.api.network import base_routers as base
from tempest import config
from tempest import test
CONF = config.CONF
class RoutersNegativeTest(base.BaseRouterTest):
@classmethod
def skip_checks(cls):
super(RoutersNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('router', 'network'):
msg = "router extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(RoutersNegativeTest, cls).resource_setup()
cls.router = cls.create_router(data_utils.rand_name('router-'))
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network)
cls.tenant_cidr = (CONF.network.tenant_network_cidr
if cls._ip_version == 4 else
CONF.network.tenant_network_v6_cidr)
@test.attr(type=['negative'])
@test.idempotent_id('37a94fc0-a834-45b9-bd23-9a81d2fd1e22')
def test_router_add_gateway_invalid_network_returns_404(self):
self.assertRaises(lib_exc.NotFound,
self.client.update_router,
self.router['id'],
external_gateway_info={
'network_id': self.router['id']})
@test.attr(type=['negative'])
@test.idempotent_id('11836a18-0b15-4327-a50b-f0d9dc66bddd')
def test_router_add_gateway_net_not_external_returns_400(self):
alt_network = self.create_network(
network_name=data_utils.rand_name('router-negative-'))
sub_cidr = netaddr.IPNetwork(self.tenant_cidr).next()
self.create_subnet(alt_network, cidr=sub_cidr)
self.assertRaises(lib_exc.BadRequest,
self.client.update_router,
self.router['id'],
external_gateway_info={
'network_id': alt_network['id']})
@test.attr(type=['negative'])
@test.idempotent_id('957751a3-3c68-4fa2-93b6-eb52ea10db6e')
def test_add_router_interfaces_on_overlapping_subnets_returns_400(self):
network01 = self.create_network(
network_name=data_utils.rand_name('router-network01-'))
network02 = self.create_network(
network_name=data_utils.rand_name('router-network02-'))
subnet01 = self.create_subnet(network01)
subnet02 = self.create_subnet(network02)
self._add_router_interface_with_subnet_id(self.router['id'],
subnet01['id'])
self.assertRaises(lib_exc.BadRequest,
self._add_router_interface_with_subnet_id,
self.router['id'],
subnet02['id'])
@test.attr(type=['negative'])
@test.idempotent_id('04df80f9-224d-47f5-837a-bf23e33d1c20')
def test_router_remove_interface_in_use_returns_409(self):
self.client.add_router_interface_with_subnet_id(
self.router['id'], self.subnet['id'])
self.assertRaises(lib_exc.Conflict,
self.client.delete_router,
self.router['id'])
@test.attr(type=['negative'])
@test.idempotent_id('c2a70d72-8826-43a7-8208-0209e6360c47')
def test_show_non_existent_router_returns_404(self):
router = data_utils.rand_name('non_exist_router')
self.assertRaises(lib_exc.NotFound, self.client.show_router,
router)
@test.attr(type=['negative'])
@test.idempotent_id('b23d1569-8b0c-4169-8d4b-6abd34fad5c7')
def test_update_non_existent_router_returns_404(self):
router = data_utils.rand_name('non_exist_router')
self.assertRaises(lib_exc.NotFound, self.client.update_router,
router, name="new_name")
@test.attr(type=['negative'])
@test.idempotent_id('c7edc5ad-d09d-41e6-a344-5c0c31e2e3e4')
def test_delete_non_existent_router_returns_404(self):
router = data_utils.rand_name('non_exist_router')
self.assertRaises(lib_exc.NotFound, self.client.delete_router,
router)
class RoutersNegativeIpV6Test(RoutersNegativeTest):
_ip_version = 6
class DvrRoutersNegativeTest(base.BaseRouterTest):
@classmethod
def skip_checks(cls):
super(DvrRoutersNegativeTest, cls).skip_checks()
if not test.is_extension_enabled('dvr', 'network'):
msg = "DVR extension not enabled."
raise cls.skipException(msg)
@classmethod
def resource_setup(cls):
super(DvrRoutersNegativeTest, cls).resource_setup()
cls.router = cls.create_router(data_utils.rand_name('router'))
cls.network = cls.create_network()
cls.subnet = cls.create_subnet(cls.network)
@test.attr(type=['negative'])
@test.idempotent_id('4990b055-8fc7-48ab-bba7-aa28beaad0b9')
def test_router_create_tenant_distributed_returns_forbidden(self):
self.assertRaises(lib_exc.Forbidden,
self.create_router,
data_utils.rand_name('router'),
distributed=True)
| apache-2.0 | -6,493,495,303,022,511,000 | 41.028369 | 78 | 0.625886 | false |
vivsh/django-ginger | ginger/html/forms.py | 1 | 6313 | from ginger.dataset import GingerDataSet
from django.forms.widgets import CheckboxInput
import re
from collections import namedtuple
from django.middleware import csrf
from django.utils import six
from django.utils.encoding import force_text
from ginger import utils
from . import common
__all__ = ["Choice", "Link", "form_csrf_tag", "form_attrs", "form_css_class",
"field_choices", "field_name_range", "field_links", "iter_fields", "widget_css_class",
"render_widget", "register_layout", "render_field", "field_css_class", "field_range",
"render_page", "wrap_csrf_token", "is_selected_choice", "make_css_class"]
Choice = namedtuple("Choice", ["name", "value", "content", "selected"])
class Link(object):
def __init__(self, url, content, is_active=False, **kwargs):
self.url = url
self.content = content
self.is_active = is_active
for k in kwargs:
setattr(self, k, kwargs[k])
_layouts = {}
def make_css_class(obj, suffix=""):
name = utils.camel_to_hyphen(re.sub(r'(?i)widget|field|ginger|form|input', '', obj.__class__.__name__, 1))
if suffix:
name = "%s%s" % (name, suffix)
return name
def is_selected_choice(values, choice):
if not isinstance(values, (list, tuple)):
values = (values, )
text_choice = force_text(choice)
for v in values:
if v == choice or text_choice == force_text(v):
return True
return False
def field_choices(field):
form_field = field.field
field_value = field.value()
name = field.html_name
for code, label in form_field.choices:
is_active = is_selected_choice(field_value, code)
yield Choice(name, code, label, is_active)
def field_links(request, field):
url = request.get_full_path()
form_field = field.field
field_value = field.value()
if hasattr(form_field, 'build_links'):
for value in form_field.build_links(request, field):
yield value
else:
for code, label in form_field.choices:
is_active = is_selected_choice(field_value, code)
link_url = utils.get_url_with_modified_params(url, {field.name: code})
yield Link(link_url, label, is_active, value=code)
def form_attrs(form, **kwargs):
attrs = kwargs
attrs.setdefault("method", "post")
classes = attrs.pop("class", "")
if isinstance(classes, six.string_types):
classes = classes.split(" ")
classes.append(form_css_class(form))
attrs["class"] = classes
attrs['enctype']='multipart/form-data' if form.is_multipart() else 'application/x-www-form-urlencoded'
return common.html_attrs(attrs)
def form_csrf_tag(request):
csrf_token = csrf.get_token(request)
el = common.input(type_="hidden", name="csrfmiddlewaretoken", value=csrf_token)
return el.render()
def wrap_csrf_token(token):
el = common.input(type_="hidden", name="csrfmiddlewaretoken", value=token)
return el.render()
def field_range(form, start, end, step=None, hidden=True):
field_names = field_name_range(form, start, end, step)
return iter_fields(form, field_names, hidden=hidden)
def field_name_range(form, first, last, step=None, field_names=None):
if field_names is None:
field_names = list(form.fields.keys())
keys = field_names
if first is not None and isinstance(first, six.string_types):
try:
first = keys.index(first)
except ValueError:
raise KeyError("%r is not a field for form %r" % (first, form.__class__.__name__))
if last is not None and isinstance(last, six.string_types):
try:
last = keys.index(last)-1
except ValueError:
raise KeyError("%r is not a field for form %r" % (last, form.__class__.__name__))
return keys[first:last:step]
def iter_fields(form, names, hidden=True):
for name in names:
field = form[name]
if hidden or not field.hidden:
yield field
def render_field(field, layout=None, **kwargs):
if field.is_hidden:
return field.as_hidden()
layout = _layouts.get(layout, default_layout)
template = layout(field)
ctx = {
"field": field,
"label": field.label,
"label_tag": common.label(class_="form-label", for_=field.id_for_label)[field.label] if field.label else "",
"widget": render_widget(field),
"help": field.help_text,
"help_tag": common.div(class_="form-help")[field.help_text],
"errors": field.errors
}
content = template.format(**ctx)
classes = ["form-field", field_css_class(field)]
if field.errors:
classes.append("has-error")
return common.div(class_=classes,
data_field=field.name, **kwargs)[content]
def render_widget(field, **attrs):
el = common.div(**attrs)[str(field)]
el.attrib.update(class_=[widget_css_class(field), "form-widget"])
return el.render()
def register_layout(name, func):
_layouts[name] = func
def default_layout(field):
if isinstance(field.field.widget, CheckboxInput):
return "{widget}{label_tag}{help}{errors}"
return "{label_tag}{widget}{help}{errors}"
def field_css_class(field):
return make_css_class(field.field, "-field")
def widget_css_class(field):
return make_css_class(field.field.widget, "-widget")
def form_css_class(form):
return make_css_class(form, "-form")
def render_page(request, page, previous="«", next="»", **kwargs):
if isinstance(page, GingerDataSet):
page = page.object_list
if page.paginator.num_pages <= 1:
return ""
H = common
nav = H.ul(class_="pagination", **kwargs)
if page.has_previous():
url = page.previous_link(request).url
previous_tag = H.li(aria_label="Previous")[H.a(href=url)[previous]]
nav.append(previous_tag)
for link in page.build_links(request):
if link.is_active:
el = H.li(class_="active")[H.span[link.content]]
else:
el = H.li[H.a(href=link.url)[link.content]]
nav.append(el)
if page.has_next():
url = page.next_link(request).url
next_tag = H.li(aria_label="Next")[H.a(href=url)[next]]
nav.append(next_tag)
return nav.render() | mit | 19,992,718,202,595,770 | 31.214286 | 116 | 0.628703 | false |
veroc/Bika-LIMS | bika/lims/exportimport/instruments/beckmancoulter/access/model2.py | 3 | 3313 | """ Beckman Coulter Access 2
"""
from bika.lims import bikaMessageFactory as _
from bika.lims.utils import t
from . import BeckmancoulterAccessCSVParser, BeckmancoulterAccessImporter
import json
import traceback
title = "Beckman Coulter Access 2"
def Import(context, request):
""" Beckman Coulter Access 2 analysis results
"""
infile = request.form['beckmancoulter_access_model2_file']
fileformat = request.form['beckmancoulter_access_model2_format']
artoapply = request.form['beckmancoulter_access_model2_artoapply']
override = request.form['beckmancoulter_access_model2_override']
sample = request.form.get('beckmancoulter_access_model2_sample',
'requestid')
instrument = request.form.get('beckmancoulter_access_model2_instrument', None)
errors = []
logs = []
warns = []
# Load the most suitable parser according to file extension/options/etc...
parser = None
if not hasattr(infile, 'filename'):
errors.append(_("No file selected"))
if fileformat == 'csv':
parser = BeckmancoulterAccess2CSVParser(infile)
else:
errors.append(t(_("Unrecognized file format ${fileformat}",
mapping={"fileformat": fileformat})))
if parser:
# Load the importer
status = ['sample_received', 'attachment_due', 'to_be_verified']
if artoapply == 'received':
status = ['sample_received']
elif artoapply == 'received_tobeverified':
status = ['sample_received', 'attachment_due', 'to_be_verified']
over = [False, False]
if override == 'nooverride':
over = [False, False]
elif override == 'override':
over = [True, False]
elif override == 'overrideempty':
over = [True, True]
sam = ['getRequestID', 'getSampleID', 'getClientSampleID']
if sample == 'requestid':
sam = ['getRequestID']
if sample == 'sampleid':
sam = ['getSampleID']
elif sample == 'clientsid':
sam = ['getClientSampleID']
elif sample == 'sample_clientsid':
sam = ['getSampleID', 'getClientSampleID']
importer = BeckmancoulterAccess2Importer(parser=parser,
context=context,
idsearchcriteria=sam,
allowed_ar_states=status,
allowed_analysis_states=None,
override=over,
instrument_uid=instrument)
tbex = ''
try:
importer.process()
except:
tbex = traceback.format_exc()
errors = importer.errors
logs = importer.logs
warns = importer.warns
if tbex:
errors.append(tbex)
results = {'errors': errors, 'log': logs, 'warns': warns}
return json.dumps(results)
class BeckmancoulterAccess2CSVParser(BeckmancoulterAccessCSVParser):
def getAttachmentFileType(self):
return "Beckman Couter Access 2"
class BeckmancoulterAccess2Importer(BeckmancoulterAccessImporter):
def getKeywordsToBeExcluded(self):
return []
| agpl-3.0 | 3,867,281,841,500,889,000 | 35.811111 | 82 | 0.58195 | false |
uwdata/termite-data-server | web2py/gluon/contrib/login_methods/oneall_account.py | 33 | 4559 | #!/usr/bin/env python
# coding: utf8
"""
Oneall Authentication for web2py
Developed by Nathan Freeze (Copyright © 2013)
Email <[email protected]>
This file contains code to allow using onall.com
authentication services with web2py
"""
import os
import base64
from gluon import *
from gluon.storage import Storage
from gluon.contrib.simplejson import JSONDecodeError
from gluon.tools import fetch
import gluon.contrib.simplejson as json
class OneallAccount(object):
"""
from gluon.contrib.login_methods.oneall_account import OneallAccount
auth.settings.actions_disabled=['register','change_password',
'request_reset_password']
auth.settings.login_form = OneallAccount(request,
public_key="...",
private_key="...",
domain="...",
url = "http://localhost:8000/%s/default/user/login" % request.application)
"""
def __init__(self, request, public_key="", private_key="", domain="",
url=None, providers=None, on_login_failure=None):
self.request = request
self.public_key = public_key
self.private_key = private_key
self.url = url
self.domain = domain
self.profile = None
self.on_login_failure = on_login_failure
self.providers = providers or ["facebook", "google", "yahoo", "openid"]
self.mappings = Storage()
def defaultmapping(profile):
name = profile.get('name',{})
dname = name.get('formatted',profile.get('displayName'))
email=profile.get('emails', [{}])[0].get('value')
reg_id=profile.get('identity_token','')
username=profile.get('preferredUsername',email)
first_name=name.get('givenName', dname.split(' ')[0])
last_name=profile.get('familyName',dname.split(' ')[1])
return dict(registration_id=reg_id,username=username,email=email,
first_name=first_name,last_name=last_name)
self.mappings.default = defaultmapping
def get_user(self):
request = self.request
user = None
if request.vars.connection_token:
auth_url = "https://%s.api.oneall.com/connections/%s.json" % \
(self.domain, request.vars.connection_token)
auth_pw = "%s:%s" % (self.public_key,self.private_key)
auth_pw = base64.b64encode(auth_pw)
headers = dict(Authorization="Basic %s" % auth_pw)
try:
auth_info_json = fetch(auth_url,headers=headers)
auth_info = json.loads(auth_info_json)
data = auth_info['response']['result']['data']
if data['plugin']['key'] == 'social_login':
if data['plugin']['data']['status'] == 'success':
userdata = data['user']
self.profile = userdata['identity']
source = self.profile['source']['key']
mapping = self.mappings.get(source,self.mappings['default'])
user = mapping(self.profile)
except (JSONDecodeError, KeyError):
pass
if user is None and self.on_login_failure:
redirect(self.on_login_failure)
return user
def login_form(self):
scheme = self.request.env.wsgi_url_scheme
oneall_url = scheme + "://%s.api.oneall.com/socialize/library.js" % self.domain
oneall_lib = SCRIPT(_src=oneall_url,_type='text/javascript')
container = DIV(_id="oa_social_login_container")
widget = SCRIPT('oneall.api.plugins.social_login.build("oa_social_login_container",',
'{providers : %s,' % self.providers,
'callback_uri: "%s"});' % self.url,
_type="text/javascript")
form = DIV(oneall_lib,container,widget)
return form
def use_oneall(auth, filename='private/oneall.key', **kwargs):
path = os.path.join(current.request.folder, filename)
if os.path.exists(path):
request = current.request
domain, public_key, private_key = open(path, 'r').read().strip().split(':')
url = URL('default', 'user', args='login', scheme=True)
auth.settings.actions_disabled =\
['register', 'change_password', 'request_reset_password']
auth.settings.login_form = OneallAccount(
request, public_key=public_key,private_key=private_key,
domain=domain, url=url, **kwargs)
| bsd-3-clause | -2,138,309,296,084,541,000 | 41.598131 | 93 | 0.585344 | false |
nathanielvarona/airflow | tests/www/views/conftest.py | 1 | 6162 | #
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from contextlib import contextmanager
from typing import Any, Dict, Generator, List, NamedTuple
import flask
import jinja2
import pytest
from airflow import settings
from airflow.models import DagBag
from airflow.www.app import create_app
from tests.test_utils.api_connexion_utils import create_user, delete_roles
from tests.test_utils.decorators import dont_initialize_flask_app_submodules
from tests.test_utils.www import client_with_login
@pytest.fixture(autouse=True, scope="module")
def session():
settings.configure_orm()
yield settings.Session
@pytest.fixture(autouse=True, scope="module")
def examples_dag_bag(session):
DagBag(include_examples=True).sync_to_db()
dag_bag = DagBag(include_examples=True, read_dags_from_db=True)
session.commit()
yield dag_bag
@pytest.fixture(scope="module")
def app(examples_dag_bag):
@dont_initialize_flask_app_submodules(
skip_all_except=[
"init_api_connexion",
"init_appbuilder",
"init_appbuilder_links",
"init_appbuilder_views",
"init_flash_views",
"init_jinja_globals",
"init_plugins",
]
)
def factory():
return create_app(testing=True)
app = factory()
app.config["WTF_CSRF_ENABLED"] = False
app.dag_bag = examples_dag_bag
app.jinja_env.undefined = jinja2.StrictUndefined
security_manager = app.appbuilder.sm # pylint: disable=no-member
if not security_manager.find_user(username='test'):
security_manager.add_user(
username='test',
first_name='test',
last_name='test',
email='[email protected]',
role=security_manager.find_role('Admin'),
password='test',
)
if not security_manager.find_user(username='test_user'):
security_manager.add_user(
username='test_user',
first_name='test_user',
last_name='test_user',
email='[email protected]',
role=security_manager.find_role('User'),
password='test_user',
)
if not security_manager.find_user(username='test_viewer'):
security_manager.add_user(
username='test_viewer',
first_name='test_viewer',
last_name='test_viewer',
email='[email protected]',
role=security_manager.find_role('Viewer'),
password='test_viewer',
)
yield app
delete_roles(app)
@pytest.fixture()
def admin_client(app):
return client_with_login(app, username="test", password="test")
@pytest.fixture()
def viewer_client(app):
return client_with_login(app, username="test_viewer", password="test_viewer")
@pytest.fixture()
def user_client(app):
return client_with_login(app, username="test_user", password="test_user")
@pytest.fixture(scope="module")
def client_factory(app):
def factory(name, role_name, permissions):
create_user(app, name, role_name, permissions)
client = app.test_client()
resp = client.post("/login/", data={"username": name, "password": name})
assert resp.status_code == 302
return client
return factory
class _TemplateWithContext(NamedTuple):
template: jinja2.environment.Template
context: Dict[str, Any]
@property
def name(self):
return self.template.name
@property
def local_context(self):
"""Returns context without global arguments"""
result = self.context.copy()
keys_to_delete = [
# flask.templating._default_template_ctx_processor
'g',
'request',
'session',
# flask_wtf.csrf.CSRFProtect.init_app
'csrf_token',
# flask_login.utils._user_context_processor
'current_user',
# flask_appbuilder.baseviews.BaseView.render_template
'appbuilder',
'base_template',
# airflow.www.app.py.create_app (inner method - jinja_globals)
'server_timezone',
'default_ui_timezone',
'hostname',
'navbar_color',
'log_fetch_delay_sec',
'log_auto_tailing_offset',
'log_animation_speed',
'state_color_mapping',
'airflow_version',
'git_version',
'k8s_or_k8scelery_executor',
# airflow.www.static_config.configure_manifest_files
'url_for_asset',
# airflow.www.views.AirflowBaseView.render_template
'scheduler_job',
# airflow.www.views.AirflowBaseView.extra_args
'macros',
]
for key in keys_to_delete:
del result[key]
return result
@pytest.fixture(scope="module")
def capture_templates(app):
@contextmanager
def manager() -> Generator[List[_TemplateWithContext], None, None]:
recorded = []
def record(sender, template, context, **extra): # pylint: disable=unused-argument
recorded.append(_TemplateWithContext(template, context))
flask.template_rendered.connect(record, app) # type: ignore
try:
yield recorded
finally:
flask.template_rendered.disconnect(record, app) # type: ignore
assert recorded, "Failed to catch the templates"
return manager
| apache-2.0 | -7,838,551,156,938,262,000 | 30.6 | 90 | 0.6321 | false |
nkrinner/nova | nova/filters.py | 21 | 3305 | # Copyright (c) 2011-2012 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Filter support
"""
from nova import loadables
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
LOG = logging.getLogger(__name__)
class BaseFilter(object):
"""Base class for all filter classes."""
def _filter_one(self, obj, filter_properties):
"""Return True if it passes the filter, False otherwise.
Override this in a subclass.
"""
return True
def filter_all(self, filter_obj_list, filter_properties):
"""Yield objects that pass the filter.
Can be overridden in a subclass, if you need to base filtering
decisions on all objects. Otherwise, one can just override
_filter_one() to filter a single object.
"""
for obj in filter_obj_list:
if self._filter_one(obj, filter_properties):
yield obj
# Set to true in a subclass if a filter only needs to be run once
# for each request rather than for each instance
run_filter_once_per_request = False
def run_filter_for_index(self, index):
"""Return True if the filter needs to be run for the "index-th"
instance in a request. Only need to override this if a filter
needs anything other than "first only" or "all" behaviour.
"""
if self.run_filter_once_per_request and index > 0:
return False
else:
return True
class BaseFilterHandler(loadables.BaseLoader):
"""Base class to handle loading filter classes.
This class should be subclassed where one needs to use filters.
"""
def get_filtered_objects(self, filter_classes, objs,
filter_properties, index=0):
list_objs = list(objs)
LOG.debug(_("Starting with %d host(s)"), len(list_objs))
for filter_cls in filter_classes:
cls_name = filter_cls.__name__
filter = filter_cls()
if filter.run_filter_for_index(index):
objs = filter.filter_all(list_objs,
filter_properties)
if objs is None:
LOG.debug(_("Filter %(cls_name)s says to stop filtering"),
{'cls_name': cls_name})
return
list_objs = list(objs)
if not list_objs:
LOG.info(_("Filter %s returned 0 hosts"), cls_name)
break
LOG.debug(_("Filter %(cls_name)s returned "
"%(obj_len)d host(s)"),
{'cls_name': cls_name, 'obj_len': len(list_objs)})
return list_objs
| apache-2.0 | -4,145,656,673,956,288,000 | 36.134831 | 78 | 0.603631 | false |
pygeek/django | django/contrib/sessions/backends/file.py | 4 | 5255 | import errno
import os
import tempfile
from django.conf import settings
from django.contrib.sessions.backends.base import SessionBase, CreateError
from django.core.exceptions import SuspiciousOperation, ImproperlyConfigured
class SessionStore(SessionBase):
"""
Implements a file based session store.
"""
def __init__(self, session_key=None):
self.storage_path = getattr(settings, "SESSION_FILE_PATH", None)
if not self.storage_path:
self.storage_path = tempfile.gettempdir()
# Make sure the storage path is valid.
if not os.path.isdir(self.storage_path):
raise ImproperlyConfigured(
"The session storage path %r doesn't exist. Please set your"
" SESSION_FILE_PATH setting to an existing directory in which"
" Django can store session data." % self.storage_path)
self.file_prefix = settings.SESSION_COOKIE_NAME
super(SessionStore, self).__init__(session_key)
VALID_KEY_CHARS = set("abcdef0123456789")
def _key_to_file(self, session_key=None):
"""
Get the file associated with this session key.
"""
if session_key is None:
session_key = self._get_or_create_session_key()
# Make sure we're not vulnerable to directory traversal. Session keys
# should always be md5s, so they should never contain directory
# components.
if not set(session_key).issubset(self.VALID_KEY_CHARS):
raise SuspiciousOperation(
"Invalid characters in session key")
return os.path.join(self.storage_path, self.file_prefix + session_key)
def load(self):
session_data = {}
try:
with open(self._key_to_file(), "rb") as session_file:
file_data = session_file.read()
# Don't fail if there is no data in the session file.
# We may have opened the empty placeholder file.
if file_data:
try:
session_data = self.decode(file_data)
except (EOFError, SuspiciousOperation):
self.create()
except IOError:
self.create()
return session_data
def create(self):
while True:
self._session_key = self._get_new_session_key()
try:
self.save(must_create=True)
except CreateError:
continue
self.modified = True
self._session_cache = {}
return
def save(self, must_create=False):
# Get the session data now, before we start messing
# with the file it is stored within.
session_data = self._get_session(no_load=must_create)
session_file_name = self._key_to_file()
try:
# Make sure the file exists. If it does not already exist, an
# empty placeholder file is created.
flags = os.O_WRONLY | os.O_CREAT | getattr(os, 'O_BINARY', 0)
if must_create:
flags |= os.O_EXCL
fd = os.open(session_file_name, flags)
os.close(fd)
except OSError as e:
if must_create and e.errno == errno.EEXIST:
raise CreateError
raise
# Write the session file without interfering with other threads
# or processes. By writing to an atomically generated temporary
# file and then using the atomic os.rename() to make the complete
# file visible, we avoid having to lock the session file, while
# still maintaining its integrity.
#
# Note: Locking the session file was explored, but rejected in part
# because in order to be atomic and cross-platform, it required a
# long-lived lock file for each session, doubling the number of
# files in the session storage directory at any given time. This
# rename solution is cleaner and avoids any additional overhead
# when reading the session data, which is the more common case
# unless SESSION_SAVE_EVERY_REQUEST = True.
#
# See ticket #8616.
dir, prefix = os.path.split(session_file_name)
try:
output_file_fd, output_file_name = tempfile.mkstemp(dir=dir,
prefix=prefix + '_out_')
renamed = False
try:
try:
os.write(output_file_fd, self.encode(session_data).encode())
finally:
os.close(output_file_fd)
os.rename(output_file_name, session_file_name)
renamed = True
finally:
if not renamed:
os.unlink(output_file_name)
except (OSError, IOError, EOFError):
pass
def exists(self, session_key):
return os.path.exists(self._key_to_file(session_key))
def delete(self, session_key=None):
if session_key is None:
if self.session_key is None:
return
session_key = self.session_key
try:
os.unlink(self._key_to_file(session_key))
except OSError:
pass
def clean(self):
pass
| bsd-3-clause | -7,450,481,783,163,705,000 | 35.493056 | 80 | 0.582493 | false |
jesramirez/odoo | openerp/addons/test_limits/models.py | 435 | 1034 | # -*- coding: utf-8 -*-
import time
import openerp
class m(openerp.osv.osv.Model):
""" This model exposes a few methods that will consume between 'almost no
resource' and 'a lot of resource'.
"""
_name = 'test.limits.model'
def consume_nothing(self, cr, uid, context=None):
return True
def consume_memory(self, cr, uid, size, context=None):
l = [0] * size
return True
def leak_memory(self, cr, uid, size, context=None):
if not hasattr(self, 'l'):
self.l = []
self.l.append([0] * size)
return True
def consume_time(self, cr, uid, seconds, context=None):
time.sleep(seconds)
return True
def consume_cpu_time(self, cr, uid, seconds, context=None):
t0 = time.clock()
t1 = time.clock()
while t1 - t0 < seconds:
for i in xrange(10000000):
x = i * i
t1 = time.clock()
return True
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| agpl-3.0 | 2,609,054,120,451,938,000 | 26.945946 | 77 | 0.573501 | false |
Samuc/Proyecto-IV | lib/python2.7/site-packages/pip/_vendor/requests/packages/charade/codingstatemachine.py | 2931 | 2318 | ######################## BEGIN LICENSE BLOCK ########################
# The Original Code is mozilla.org code.
#
# The Initial Developer of the Original Code is
# Netscape Communications Corporation.
# Portions created by the Initial Developer are Copyright (C) 1998
# the Initial Developer. All Rights Reserved.
#
# Contributor(s):
# Mark Pilgrim - port to Python
#
# This library is free software; you can redistribute it and/or
# modify it under the terms of the GNU Lesser General Public
# License as published by the Free Software Foundation; either
# version 2.1 of the License, or (at your option) any later version.
#
# This library is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this library; if not, write to the Free Software
# Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA
# 02110-1301 USA
######################### END LICENSE BLOCK #########################
from .constants import eStart
from .compat import wrap_ord
class CodingStateMachine:
def __init__(self, sm):
self._mModel = sm
self._mCurrentBytePos = 0
self._mCurrentCharLen = 0
self.reset()
def reset(self):
self._mCurrentState = eStart
def next_state(self, c):
# for each byte we get its class
# if it is first byte, we also get byte length
# PY3K: aBuf is a byte stream, so c is an int, not a byte
byteCls = self._mModel['classTable'][wrap_ord(c)]
if self._mCurrentState == eStart:
self._mCurrentBytePos = 0
self._mCurrentCharLen = self._mModel['charLenTable'][byteCls]
# from byte's class and stateTable, we get its next state
curr_state = (self._mCurrentState * self._mModel['classFactor']
+ byteCls)
self._mCurrentState = self._mModel['stateTable'][curr_state]
self._mCurrentBytePos += 1
return self._mCurrentState
def get_current_charlen(self):
return self._mCurrentCharLen
def get_coding_state_machine(self):
return self._mModel['name']
| gpl-2.0 | -1,093,339,577,341,680,500 | 37 | 73 | 0.656601 | false |
ujjwal96/mitmproxy | mitmproxy/websocket.py | 3 | 6332 | import time
import queue
from typing import List, Optional
from wsproto.frame_protocol import CloseReason
from wsproto.frame_protocol import Opcode
from mitmproxy import flow
from mitmproxy.net import websockets
from mitmproxy.coretypes import serializable
from mitmproxy.utils import strutils, human
class WebSocketMessage(serializable.Serializable):
"""
A WebSocket message sent from one endpoint to the other.
"""
def __init__(
self, type: int, from_client: bool, content: bytes, timestamp: Optional[int]=None, killed: bool=False
) -> None:
self.type = Opcode(type) # type: ignore
"""indicates either TEXT or BINARY (from wsproto.frame_protocol.Opcode)."""
self.from_client = from_client
"""True if this messages was sent by the client."""
self.content = content
"""A byte-string representing the content of this message."""
self.timestamp: int = timestamp or int(time.time())
"""Timestamp of when this message was received or created."""
self.killed = killed
"""True if this messages was killed and should not be sent to the other endpoint."""
@classmethod
def from_state(cls, state):
return cls(*state)
def get_state(self):
return int(self.type), self.from_client, self.content, self.timestamp, self.killed
def set_state(self, state):
self.type, self.from_client, self.content, self.timestamp, self.killed = state
self.type = Opcode(self.type) # replace enum with bare int
def __repr__(self):
if self.type == Opcode.TEXT:
return "text message: {}".format(repr(self.content))
else:
return "binary message: {}".format(strutils.bytes_to_escaped_str(self.content))
def kill(self):
"""
Kill this message.
It will not be sent to the other endpoint. This has no effect in streaming mode.
"""
self.killed = True
class WebSocketFlow(flow.Flow):
"""
A WebSocketFlow is a simplified representation of a Websocket connection.
"""
def __init__(self, client_conn, server_conn, handshake_flow, live=None):
super().__init__("websocket", client_conn, server_conn, live)
self.messages: List[WebSocketMessage] = []
"""A list containing all WebSocketMessage's."""
self.close_sender = 'client'
"""'client' if the client initiated connection closing."""
self.close_code = CloseReason.NORMAL_CLOSURE
"""WebSocket close code."""
self.close_message = '(message missing)'
"""WebSocket close message."""
self.close_reason = 'unknown status code'
"""WebSocket close reason."""
self.stream = False
"""True of this connection is streaming directly to the other endpoint."""
self.handshake_flow = handshake_flow
"""The HTTP flow containing the initial WebSocket handshake."""
self.ended = False
"""True when the WebSocket connection has been closed."""
self._inject_messages_client = queue.Queue(maxsize=1)
self._inject_messages_server = queue.Queue(maxsize=1)
if handshake_flow:
self.client_key = websockets.get_client_key(handshake_flow.request.headers)
self.client_protocol = websockets.get_protocol(handshake_flow.request.headers)
self.client_extensions = websockets.get_extensions(handshake_flow.request.headers)
self.server_accept = websockets.get_server_accept(handshake_flow.response.headers)
self.server_protocol = websockets.get_protocol(handshake_flow.response.headers)
self.server_extensions = websockets.get_extensions(handshake_flow.response.headers)
else:
self.client_key = ''
self.client_protocol = ''
self.client_extensions = ''
self.server_accept = ''
self.server_protocol = ''
self.server_extensions = ''
_stateobject_attributes = flow.Flow._stateobject_attributes.copy()
# mypy doesn't support update with kwargs
_stateobject_attributes.update(dict(
messages=List[WebSocketMessage],
close_sender=str,
close_code=int,
close_message=str,
close_reason=str,
client_key=str,
client_protocol=str,
client_extensions=str,
server_accept=str,
server_protocol=str,
server_extensions=str,
# Do not include handshake_flow, to prevent recursive serialization!
# Since mitmproxy-console currently only displays HTTPFlows,
# dumping the handshake_flow will include the WebSocketFlow too.
))
def get_state(self):
d = super().get_state()
d['close_code'] = int(d['close_code']) # replace enum with bare int
return d
@classmethod
def from_state(cls, state):
f = cls(None, None, None)
f.set_state(state)
return f
def __repr__(self):
return "<WebSocketFlow ({} messages)>".format(len(self.messages))
def message_info(self, message: WebSocketMessage) -> str:
return "{client} {direction} WebSocket {type} message {direction} {server}{endpoint}".format(
type=message.type,
client=human.format_address(self.client_conn.address),
server=human.format_address(self.server_conn.address),
direction="->" if message.from_client else "<-",
endpoint=self.handshake_flow.request.path,
)
def inject_message(self, endpoint, payload):
"""
Inject and send a full WebSocket message to the remote endpoint.
This might corrupt your WebSocket connection! Be careful!
The endpoint needs to be either flow.client_conn or flow.server_conn.
If ``payload`` is of type ``bytes`` then the message is flagged as
being binary If it is of type ``str`` encoded as UTF-8 and sent as
text.
:param payload: The message body to send.
:type payload: ``bytes`` or ``str``
"""
if endpoint == self.client_conn:
self._inject_messages_client.put(payload)
elif endpoint == self.server_conn:
self._inject_messages_server.put(payload)
else:
raise ValueError('Invalid endpoint')
| mit | -5,385,580,472,525,473,000 | 37.609756 | 109 | 0.637871 | false |
PandaWei/tp-qemu | qemu/tests/win_virtio_serial_data_transfer_reboot.py | 6 | 4731 | import os
import logging
from autotest.client import utils
from autotest.client.shared import error
from virttest import data_dir
from virttest import qemu_virtio_port
# This decorator makes the test function aware of context strings
@error.context_aware
def run(test, params, env):
"""
QEMU 'Windows virtio-serial data transfer' test
1) Start guest with one virtio-serial-pci and two virtio-serial-port.
2) Make sure vioser.sys verifier enabled in guest.
3) Transfering data from host to guest via virtio-serial-port in a loop.
4) Reboot guest.
5) Repeat step 3.
6) Reboot guest by system_reset qmp command.
:param test: QEMU test object.
:param params: Dictionary with the test parameters.
:param env: Dictionary with test environment.
"""
def get_virtio_port_host_file(vm, port_name):
"""
Returns separated virtserialports
:param vm: VM object
:return: All virtserialports
"""
for port in vm.virtio_ports:
if isinstance(port, qemu_virtio_port.VirtioSerial):
if port.name == port_name:
return port.hostfile
def receive_data(session, serial_receive_cmd, data_file):
output = session.cmd_output(serial_receive_cmd, timeout=30)
ori_data = file(data_file, "r").read()
if ori_data.strip() != output.strip():
err = "Data lost during transfer. Origin data is:\n%s" % ori_data
err += "Guest receive data:\n%s" % output
raise error.TestFail(err)
def transfer_data(session, receive_cmd, send_cmd, data_file, n_time):
txt = "Transfer data betwwen guest and host for %s times" % n_time
error.context(txt, logging.info)
for num in xrange(n_time):
logging.info("Data transfer repeat %s/%s." % (num + 1, n_time))
try:
args = (session, receive_cmd, data_file)
guest_receive = utils.InterruptedThread(receive_data, args)
guest_receive.start()
utils.system(send_cmd, timeout=30)
finally:
if guest_receive:
guest_receive.join(10)
vm = env.get_vm(params["main_vm"])
vm.verify_alive()
timeout = int(params.get("login_timeout", 360))
session = vm.wait_for_login(timeout=timeout)
check_cmd = params.get("check_vioser_status_cmd",
"verifier /querysettings")
output = session.cmd(check_cmd, timeout=360)
error.context("Make sure vioser.sys verifier enabled in guest.",
logging.info)
if "vioser.sys" not in output:
verify_cmd = params.get("vioser_verify_cmd",
"verifier.exe /standard /driver vioser.sys")
session.cmd(verify_cmd, timeout=360)
session = vm.reboot(session=session, timeout=timeout)
output = session.cmd(check_cmd, timeout=360)
if "vioser.sys" not in output:
error.TestError("Fail to veirfy vioser.sys driver.")
guest_scripts = params["guest_scripts"]
guest_path = params.get("guest_script_folder", "C:\\")
error.context("Copy test scripts to guest.", logging.info)
for script in guest_scripts.split(";"):
link = os.path.join(data_dir.get_deps_dir("win_serial"), script)
vm.copy_files_to(link, guest_path, timeout=60)
port_name = params["virtio_ports"].split()[0]
host_file = get_virtio_port_host_file(vm, port_name)
data_file = params["data_file"]
data_file = os.path.join(data_dir.get_deps_dir("win_serial"),
data_file)
send_script = params.get("host_send_script", "serial-host-send.py")
send_script = os.path.join(data_dir.get_deps_dir("win_serial"),
send_script)
serial_send_cmd = "python %s %s %s" % (send_script, host_file, data_file)
receive_script = params.get("guest_receive_script",
"VirtIoChannel_guest_recieve.py")
receive_script = "%s%s" % (guest_path, receive_script)
serial_receive_cmd = "python %s %s " % (receive_script, port_name)
n_time = int(params.get("repeat_times", 20))
transfer_data(session, serial_receive_cmd, serial_send_cmd,
data_file, n_time)
error.context("Reboot guest.", logging.info)
session = vm.reboot(session=session, timeout=timeout)
transfer_data(session, serial_receive_cmd, serial_send_cmd,
data_file, n_time)
error.context("Reboot guest by system_reset qmp command.", logging.info)
session = vm.reboot(session=session, method="system_reset",
timeout=timeout)
if session:
session.close()
| gpl-2.0 | 8,970,975,535,280,194,000 | 41.621622 | 77 | 0.617417 | false |
sebastic/QGIS | python/plugins/db_manager/db_plugins/postgis/info_model.py | 3 | 11691 | # -*- coding: utf-8 -*-
"""
/***************************************************************************
Name : DB Manager
Description : Database manager plugin for QGIS
Date : May 23, 2011
copyright : (C) 2011 by Giuseppe Sucameli
email : [email protected]
***************************************************************************/
/***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************/
"""
from PyQt4.QtGui import QApplication
from ..info_model import TableInfo, VectorTableInfo, RasterTableInfo
from ..html_elems import HtmlSection, HtmlParagraph, HtmlTable, HtmlTableHeader, HtmlTableCol
class PGTableInfo(TableInfo):
def __init__(self, table):
self.table = table
def generalInfo(self):
ret = []
# if the estimation is less than 100 rows, try to count them - it shouldn't take long time
if self.table.rowCount is None and self.table.estimatedRowCount < 100:
# row count information is not displayed yet, so just block
# table signals to avoid double refreshing (infoViewer->refreshRowCount->tableChanged->infoViewer)
self.table.blockSignals(True)
self.table.refreshRowCount()
self.table.blockSignals(False)
tbl = [
(QApplication.translate("DBManagerPlugin", "Relation type:"),
QApplication.translate("DBManagerPlugin", "View") if self.table._relationType == 'v' else
QApplication.translate("DBManagerPlugin", "Materialized view") if self.table._relationType == 'm' else
QApplication.translate("DBManagerPlugin", "Table")),
(QApplication.translate("DBManagerPlugin", "Owner:"), self.table.owner)
]
if self.table.comment:
tbl.append((QApplication.translate("DBManagerPlugin", "Comment:"), self.table.comment))
tbl.extend([
(QApplication.translate("DBManagerPlugin", "Pages:"), self.table.pages),
(QApplication.translate("DBManagerPlugin", "Rows (estimation):"), self.table.estimatedRowCount)
])
# privileges
# has the user access to this schema?
schema_priv = self.table.database().connector.getSchemaPrivileges(
self.table.schemaName()) if self.table.schema() else None
if schema_priv is None:
pass
elif not schema_priv[1]: # no usage privileges on the schema
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"),
QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have usage privileges for this schema!")))
else:
table_priv = self.table.database().connector.getTablePrivileges((self.table.schemaName(), self.table.name))
privileges = []
if table_priv[0]:
privileges.append("select")
if self.table.rowCount is not None or self.table.rowCount >= 0:
tbl.append((QApplication.translate("DBManagerPlugin", "Rows (counted):"),
self.table.rowCount if self.table.rowCount is not None else QApplication.translate(
"DBManagerPlugin", 'Unknown (<a href="action:rows/count">find out</a>)')))
if table_priv[1]:
privileges.append("insert")
if table_priv[2]:
privileges.append("update")
if table_priv[3]:
privileges.append("delete")
priv_string = u", ".join(privileges) if len(privileges) > 0 else QApplication.translate("DBManagerPlugin",
'<warning> This user has no privileges!')
tbl.append((QApplication.translate("DBManagerPlugin", "Privileges:"), priv_string))
ret.append(HtmlTable(tbl))
if schema_priv is not None and schema_priv[1]:
if table_priv[0] and not table_priv[1] and not table_priv[2] and not table_priv[3]:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> This user has read-only privileges.")))
if not self.table.isView:
if self.table.rowCount is not None:
if abs(self.table.estimatedRowCount - self.table.rowCount) > 1 and \
(self.table.estimatedRowCount > 2 * self.table.rowCount or
self.table.rowCount > 2 * self.table.estimatedRowCount):
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> There's a significant difference between estimated and real row count. "
'Consider running <a href="action:vacuumanalyze/run">VACUUM ANALYZE</a>.')))
# primary key defined?
if not self.table.isView:
if len(filter(lambda fld: fld.primaryKey, self.table.fields())) <= 0:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> No primary key defined for this table!")))
return ret
def getSpatialInfo(self):
ret = []
info = self.db.connector.getSpatialInfo()
if info is None:
return
tbl = [
(QApplication.translate("DBManagerPlugin", "Library:"), info[0]),
(QApplication.translate("DBManagerPlugin", "Scripts:"), info[3]),
("GEOS:", info[1]),
("Proj:", info[2])
]
ret.append(HtmlTable(tbl))
if info[1] is not None and info[1] != info[2]:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> Version of installed scripts doesn't match version of released scripts!\n"
"This is probably a result of incorrect PostGIS upgrade.")))
if not self.db.connector.has_geometry_columns:
ret.append(HtmlParagraph(
QApplication.translate("DBManagerPlugin", "<warning> geometry_columns table doesn't exist!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
elif not self.db.connector.has_geometry_columns_access:
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
"<warning> This user doesn't have privileges to read contents of geometry_columns table!\n"
"This table is essential for many GIS applications for enumeration of tables.")))
return ret
def fieldsDetails(self):
tbl = []
# define the table header
header = (
"#", QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Type"),
QApplication.translate("DBManagerPlugin", "Length"), QApplication.translate("DBManagerPlugin", "Null"),
QApplication.translate("DBManagerPlugin", "Default"))
tbl.append(HtmlTableHeader(header))
# add table contents
for fld in self.table.fields():
char_max_len = fld.charMaxLen if fld.charMaxLen is not None and fld.charMaxLen != -1 else ""
is_null_txt = "N" if fld.notNull else "Y"
# make primary key field underlined
attrs = {"class": "underline"} if fld.primaryKey else None
name = HtmlTableCol(fld.name, attrs)
tbl.append((fld.num, name, fld.type2String(), char_max_len, is_null_txt, fld.default2String()))
return HtmlTable(tbl, {"class": "header"})
def triggersDetails(self):
if self.table.triggers() is None or len(self.table.triggers()) <= 0:
return None
ret = []
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Function"),
QApplication.translate("DBManagerPlugin", "Type"), QApplication.translate("DBManagerPlugin", "Enabled"))
tbl.append(HtmlTableHeader(header))
# add table contents
for trig in self.table.triggers():
name = u'%(name)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {"name": trig.name,
"action": "delete"}
(enabled, action) = (QApplication.translate("DBManagerPlugin", "Yes"), "disable") if trig.enabled else (
QApplication.translate("DBManagerPlugin", "No"), "enable")
txt_enabled = u'%(enabled)s (<a href="action:trigger/%(name)s/%(action)s">%(action)s</a>)' % {
"name": trig.name, "action": action, "enabled": enabled}
tbl.append((name, trig.function, trig.type2String(), txt_enabled))
ret.append(HtmlTable(tbl, {"class": "header"}))
ret.append(HtmlParagraph(QApplication.translate("DBManagerPlugin",
'<a href="action:triggers/enable">Enable all triggers</a> / <a href="action:triggers/disable">Disable all triggers</a>')))
return ret
def rulesDetails(self):
if self.table.rules() is None or len(self.table.rules()) <= 0:
return None
tbl = []
# define the table header
header = (
QApplication.translate("DBManagerPlugin", "Name"), QApplication.translate("DBManagerPlugin", "Definition"))
tbl.append(HtmlTableHeader(header))
# add table contents
for rule in self.table.rules():
name = u'%(name)s (<a href="action:rule/%(name)s/%(action)s">%(action)s</a>)' % {"name": rule.name,
"action": "delete"}
tbl.append((name, rule.definition))
return HtmlTable(tbl, {"class": "header"})
def getTableInfo(self):
ret = TableInfo.getTableInfo(self)
# rules
rules_details = self.rulesDetails()
if rules_details is None:
pass
else:
ret.append(HtmlSection(QApplication.translate("DBManagerPlugin", 'Rules'), rules_details))
return ret
class PGVectorTableInfo(PGTableInfo, VectorTableInfo):
def __init__(self, table):
VectorTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return VectorTableInfo.spatialInfo(self)
class PGRasterTableInfo(PGTableInfo, RasterTableInfo):
def __init__(self, table):
RasterTableInfo.__init__(self, table)
PGTableInfo.__init__(self, table)
def spatialInfo(self):
return RasterTableInfo.spatialInfo(self)
| gpl-2.0 | 8,532,153,022,510,567,000 | 46.141129 | 178 | 0.550338 | false |
sensysnetworks/uClinux | user/python/Demo/classes/Range.py | 3 | 1684 | # Example of a generator: re-implement the built-in range function
# without actually constructing the list of values. (It turns out
# that the built-in function is about 20 times faster -- that's why
# it's built-in. :-)
# Wrapper function to emulate the complicated range() arguments
def range(*a):
if len(a) == 1:
start, stop, step = 0, a[0], 1
elif len(a) == 2:
start, stop = a
step = 1
elif len(a) == 3:
start, stop, step = a
else:
raise TypeError, 'range() needs 1-3 arguments'
return Range(start, stop, step)
# Class implementing a range object.
# To the user the instances feel like immutable sequences
# (and you can't concatenate or slice them)
class Range:
# initialization -- should be called only by range() above
def __init__(self, start, stop, step):
if step == 0:
raise ValueError, 'range() called with zero step'
self.start = start
self.stop = stop
self.step = step
self.len = max(0, int((self.stop - self.start) / self.step))
# implement `x` and is also used by print x
def __repr__(self):
return 'range' + `self.start, self.stop, self.step`
# implement len(x)
def __len__(self):
return self.len
# implement x[i]
def __getitem__(self, i):
if 0 <= i < self.len:
return self.start + self.step * i
else:
raise IndexError, 'range[i] index out of range'
# Small test program
def test():
import time, __builtin__
print range(10), range(-10, 10), range(0, 10, 2)
for i in range(100, -100, -10): print i,
print
t1 = time.time()
for i in range(1000):
pass
t2 = time.time()
for i in __builtin__.range(1000):
pass
t3 = time.time()
print t2-t1, 'sec (class)'
print t3-t2, 'sec (built-in)'
test()
| gpl-2.0 | 5,844,238,672,708,917,000 | 22.71831 | 67 | 0.652019 | false |
sammyshj/stem | test/unit/interpreter/arguments.py | 2 | 1729 | import unittest
from stem.interpreter.arguments import DEFAULT_ARGS, parse, get_help
class TestArgumentParsing(unittest.TestCase):
def test_that_we_get_default_values(self):
args = parse([])
for attr in DEFAULT_ARGS:
self.assertEqual(DEFAULT_ARGS[attr], getattr(args, attr))
def test_that_we_load_arguments(self):
args = parse(['--interface', '10.0.0.25:80'])
self.assertEqual('10.0.0.25', args.control_address)
self.assertEqual(80, args.control_port)
args = parse(['--interface', '80'])
self.assertEqual(DEFAULT_ARGS['control_address'], args.control_address)
self.assertEqual(80, args.control_port)
args = parse(['--socket', '/tmp/my_socket'])
self.assertEqual('/tmp/my_socket', args.control_socket)
args = parse(['--help'])
self.assertEqual(True, args.print_help)
def test_examples(self):
args = parse(['-i', '1643'])
self.assertEqual(1643, args.control_port)
args = parse(['-s', '~/.tor/socket'])
self.assertEqual('~/.tor/socket', args.control_socket)
def test_that_we_reject_unrecognized_arguments(self):
self.assertRaises(ValueError, parse, ['--blarg', 'stuff'])
def test_that_we_reject_invalid_interfaces(self):
invalid_inputs = (
'',
' ',
'blarg',
'127.0.0.1',
'127.0.0.1:',
':80',
'400.0.0.1:80',
'127.0.0.1:-5',
'127.0.0.1:500000',
)
for invalid_input in invalid_inputs:
self.assertRaises(ValueError, parse, ['--interface', invalid_input])
def test_get_help(self):
help_text = get_help()
self.assertTrue('Interactive interpreter for Tor.' in help_text)
self.assertTrue('change control interface from 127.0.0.1:default' in help_text)
| lgpl-3.0 | 4,954,452,864,760,776,000 | 29.333333 | 83 | 0.64199 | false |
mkost/djangocms-googlemap | schemamigration.py | 1 | 1460 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
INSTALLED_APPS = [
'django.contrib.contenttypes',
'django.contrib.auth',
'django.contrib.sessions',
'django.contrib.sites',
'django.contrib.admin',
'mptt',
'cms',
'menus',
'djangocms_googlemap',
'south',
]
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': ':memory:',
}
}
TEMPLATE_CONTEXT_PROCESSORS = [
'django.core.context_processors.auth',
'django.core.context_processors.i18n',
'django.core.context_processors.request',
'django.core.context_processors.media',
'django.core.context_processors.static',
'cms.context_processors.media',
'sekizai.context_processors.sekizai',
]
ROOT_URLCONF = 'cms.urls'
def schemamigration():
# turn ``schemamigration.py --initial`` into
# ``manage.py schemamigration cmsplugin_disqus --initial`` and setup the
# enviroment
from django.conf import settings
from django.core.management import ManagementUtility
settings.configure(
INSTALLED_APPS=INSTALLED_APPS,
ROOT_URLCONF=ROOT_URLCONF,
DATABASES=DATABASES,
TEMPLATE_CONTEXT_PROCESSORS=TEMPLATE_CONTEXT_PROCESSORS
)
argv = list(sys.argv)
argv.insert(1, 'schemamigration')
argv.insert(2, 'djangocms_googlemap')
utility = ManagementUtility(argv)
utility.execute()
if __name__ == "__main__":
schemamigration()
| bsd-3-clause | -6,339,741,224,863,982,000 | 23.333333 | 77 | 0.655479 | false |
harshilasu/LinkurApp | y/google-cloud-sdk/platform/gsutil/gslib/addlhelp/apis.py | 4 | 2736 | # -*- coding: utf-8 -*-
# Copyright 2014 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Additional help about gsutil's interaction with Cloud Storage APIs."""
from gslib.help_provider import HelpProvider
_DETAILED_HELP_TEXT = ("""
<B>OVERVIEW</B>
Google Cloud Storage offers two APIs: an XML and a JSON API. Gsutil can
interact with both APIs. By default, gsutil versions starting with 4.0
interact with the JSON API. If it is not possible to perform a command using
one of the APIs (for example, the notification command is not supported in
the XML API), gsutil will silently fall back to using the other API. Also,
gsutil will automatically fall back to using the XML API when interacting
with cloud storage providers that only support that API.
<B>CONFIGURING WHICH API IS USED</B>
To use a certain API for interacting with Google Cloud Storage, you can set
the 'prefer_api' variable in the "GSUtil" section of .boto config file to
'xml' or 'json' like so:
prefer_api = json
This will cause gsutil to use that API where possible (falling back to the
other API in cases as noted above). This applies to the gsutil test command
as well; it will run integration tests against the preferred API.
<B>PERFORMANCE DIFFERENCES BETWEEN APIS</B>
The XML API uses the boto framework. This framework re-reads downloaded files
to compute an MD5 hash if one is not present. For objects that do not
include MD5 hashes in their metadata (for example Google Cloud Storage
composite objects), this doubles the bandwidth consumed and elapsed time
needed by the download. Therefore, if you are working with composite objects,
it is recommended that you use the default value for prefer_api.
""")
class CommandOptions(HelpProvider):
"""Additional help about gsutil's interaction with Cloud Storage APIs."""
# Help specification. See help_provider.py for documentation.
help_spec = HelpProvider.HelpSpec(
help_name='apis',
help_name_aliases=['XML', 'JSON', 'api', 'force_api', 'prefer_api'],
help_type='additional_help',
help_one_line_summary='Cloud Storage APIs',
help_text=_DETAILED_HELP_TEXT,
subcommand_help_text={},
)
| gpl-3.0 | 9,025,252,058,162,140,000 | 43.129032 | 80 | 0.744883 | false |
ewheeler/rapidpro | temba/values/tests.py | 1 | 20667 | from __future__ import unicode_literals
import json
from datetime import timedelta
from django.core.urlresolvers import reverse
from django.utils import timezone
from mock import patch
from temba.contacts.models import ContactField
from temba.flows.models import RuleSet
from temba.orgs.models import Language
from temba.tests import FlowFileTest
from .models import Value
class ResultTest(FlowFileTest):
def assertResult(self, result, index, category, count):
self.assertEquals(count, result['categories'][index]['count'])
self.assertEquals(category, result['categories'][index]['label'])
def test_field_results(self):
c1 = self.create_contact("Contact1", '0788111111')
c2 = self.create_contact("Contact2", '0788222222')
c3 = self.create_contact("Contact3", '0788333333')
self.create_contact("Contact4", '0788444444')
# create a gender field that uses strings
gender = ContactField.get_or_create(self.org, self.admin, 'gender', label="Gender", value_type=Value.TYPE_TEXT)
c1.set_field(self.user, 'gender', "Male")
c2.set_field(self.user, 'gender', "Female")
c3.set_field(self.user, 'gender', "Female")
result = Value.get_value_summary(contact_field=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset']) # this is two as we have the default contact created by our unit tests
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Female", 2)
self.assertResult(result, 1, "Male", 1)
# create an born field that uses decimals
born = ContactField.get_or_create(self.org, self.admin, 'born', label="Born", value_type=Value.TYPE_DECIMAL)
c1.set_field(self.user, 'born', 1977)
c2.set_field(self.user, 'born', 1990)
c3.set_field(self.user, 'born', 1977)
result = Value.get_value_summary(contact_field=born)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset'])
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "1977", 2)
self.assertResult(result, 1, "1990", 1)
# ok, state field!
state = ContactField.get_or_create(self.org, self.admin, 'state', label="State", value_type=Value.TYPE_STATE)
c1.set_field(self.user, 'state', "Kigali City")
c2.set_field(self.user, 'state', "Kigali City")
result = Value.get_value_summary(contact_field=state)[0]
self.assertEquals(1, len(result['categories']))
self.assertEquals(2, result['set'])
self.assertEquals(3, result['unset'])
self.assertResult(result, 0, "1708283", 2)
reg_date = ContactField.get_or_create(self.org, self.admin, 'reg_date', label="Registration Date", value_type=Value.TYPE_DATETIME)
now = timezone.now()
c1.set_field(self.user, 'reg_date', now.replace(hour=9))
c2.set_field(self.user, 'reg_date', now.replace(hour=4))
c3.set_field(self.user, 'reg_date', now - timedelta(days=1))
result = Value.get_value_summary(contact_field=reg_date)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset'])
self.assertResult(result, 0, now.replace(hour=0, minute=0, second=0, microsecond=0), 2)
self.assertResult(result, 1, (now - timedelta(days=1)).replace(hour=0, minute=0, second=0, microsecond=0), 1)
# make sure categories returned are sorted by count, not name
c2.set_field(self.user, 'gender', "Male")
result = Value.get_value_summary(contact_field=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertEquals(3, result['set'])
self.assertEquals(2, result['unset']) # this is two as we have the default contact created by our unit tests
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Male", 2)
self.assertResult(result, 1, "Female", 1)
# check the modified date is tracked for fields
original_value = Value.objects.get(contact=c1, contact_field=gender)
c1.set_field(self.user, 'gender', 'unknown')
new_value = Value.objects.get(contact=c1, contact_field=gender)
self.assertTrue(new_value.modified_on > original_value.modified_on)
self.assertNotEqual(new_value.string_value, original_value.string_value)
def run_color_gender_flow(self, contact, color, gender, age):
self.assertEqual(self.send_message(self.flow, color, contact=contact, restart_participants=True), "What is your gender?")
self.assertEqual(self.send_message(self.flow, gender, contact=contact), "What is your age?")
self.assertEqual(self.send_message(self.flow, age, contact=contact), "Thanks.")
def setup_color_gender_flow(self):
self.flow = self.get_flow('color_gender_age')
(self.c1, self.c2, self.c3, self.c4) = (self.create_contact("Contact1", '0788111111'),
self.create_contact("Contact2", '0788222222'),
self.create_contact("Contact3", '0788333333'),
self.create_contact("Contact4", '0788444444'))
def test_category_results(self):
self.setup_color_gender_flow()
# create a state field:
# assign c1 and c2 to Kigali
ContactField.get_or_create(self.org, self.admin, 'state', label="State", value_type=Value.TYPE_STATE)
ContactField.get_or_create(self.org, self.admin, 'district', label="District", value_type=Value.TYPE_DISTRICT)
self.c1.set_field(self.user, 'state', "Kigali City")
self.c1.set_field(self.user, 'district', "Nyarugenge")
self.c2.set_field(self.user, 'state', "Kigali City")
self.c2.set_field(self.user, 'district', "Nyarugenge")
self.run_color_gender_flow(self.c1, "red", "male", "16")
self.run_color_gender_flow(self.c2, "blue", "female", "19")
self.run_color_gender_flow(self.c3, "green", "male", "75")
self.run_color_gender_flow(self.c4, "maroon", "female", "50")
# create a group of the women
ladies = self.create_group("Ladies", [self.c2, self.c4])
# get our rulesets
color = RuleSet.objects.get(flow=self.flow, label="Color")
gender = RuleSet.objects.get(flow=self.flow, label="Gender")
age = RuleSet.objects.get(flow=self.flow, label="Age")
# fetch our results through the view
self.login(self.admin)
response = self.client.get(reverse('flows.ruleset_results', args=[color.pk]))
response = json.loads(response.content)
categories = response['results'][0]['categories']
self.assertEqual('Red', categories[0]['label'])
self.assertEqual('Blue', categories[1]['label'])
self.assertEqual('Green', categories[2]['label'])
self.assertEqual(2, categories[0]['count'])
self.assertEqual(1, categories[1]['count'])
self.assertEqual(1, categories[2]['count'])
# categories should be in the same order as our rules, should have correct counts
result = Value.get_value_summary(ruleset=color)[0]
self.assertEquals(3, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Red", 2)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 1)
# check our age category as well
result = Value.get_value_summary(ruleset=age)[0]
self.assertEquals(3, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Child", 1)
self.assertResult(result, 1, "Adult", 2)
self.assertResult(result, 2, "Senior", 1)
# and our gender categories
result = Value.get_value_summary(ruleset=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertFalse(result['open_ended'])
self.assertResult(result, 0, "Male", 2)
self.assertResult(result, 1, "Female", 2)
# now filter the results and only get responses by men
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male"])])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 1)
# what about men that are adults?
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male"]),
dict(ruleset=age.pk, categories=["Adult"])])[0]
self.assertResult(result, 0, "Red", 0)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 0)
# union of all genders
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=gender.pk, categories=["Male", "Female"]),
dict(ruleset=age.pk, categories=["Adult"])])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 0)
# just women adults by group
result = Value.get_value_summary(ruleset=color, filters=[dict(groups=[ladies.pk]), dict(ruleset=age.pk, categories="Adult")])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 1)
self.assertResult(result, 2, "Green", 0)
# remove one of the women from the group
ladies.update_contacts(self.user, [self.c2], False)
# get a new summary
result = Value.get_value_summary(ruleset=color, filters=[dict(groups=[ladies.pk]), dict(ruleset=age.pk, categories="Adult")])[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 0)
self.assertResult(result, 2, "Green", 0)
# ok, back in she goes
ladies.update_contacts(self.user, [self.c2], True)
# do another run for contact 1
self.run_color_gender_flow(self.c1, "blue", "male", "16")
# totals should reflect the new value, not the old
result = Value.get_value_summary(ruleset=color)[0]
self.assertResult(result, 0, "Red", 1)
self.assertResult(result, 1, "Blue", 2)
self.assertResult(result, 2, "Green", 1)
# what if we do a partial run?
self.send_message(self.flow, "red", contact=self.c1, restart_participants=True)
# should change our male/female breakdown since c1 now no longer has a gender
result = Value.get_value_summary(ruleset=gender)[0]
self.assertEquals(2, len(result['categories']))
self.assertResult(result, 0, "Male", 1)
self.assertResult(result, 1, "Female", 2)
# back to a full flow
self.run_color_gender_flow(self.c1, "blue", "male", "16")
# ok, now segment by gender
result = Value.get_value_summary(ruleset=color, filters=[], segment=dict(ruleset=gender.pk, categories=["Male", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 1)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 1)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# segment by gender again, but use the contact field to do so
result = Value.get_value_summary(ruleset=color, filters=[], segment=dict(contact_field="Gender", values=["MALE", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 1)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 1)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# add in a filter at the same time
result = Value.get_value_summary(ruleset=color, filters=[dict(ruleset=color.pk, categories=["Blue"])],
segment=dict(ruleset=gender.pk, categories=["Male", "Female"]))
male_result = result[0]
self.assertResult(male_result, 0, "Red", 0)
self.assertResult(male_result, 1, "Blue", 1)
self.assertResult(male_result, 2, "Green", 0)
female_result = result[1]
self.assertResult(female_result, 0, "Red", 0)
self.assertResult(female_result, 1, "Blue", 1)
self.assertResult(female_result, 2, "Green", 0)
# ok, try segmenting by location instead
result = Value.get_value_summary(ruleset=color, segment=dict(location="State"))
eastern_result = result[0]
self.assertEquals('171591', eastern_result['boundary'])
self.assertEquals('Eastern Province', eastern_result['label'])
self.assertResult(eastern_result, 0, "Red", 0)
self.assertResult(eastern_result, 1, "Blue", 0)
self.assertResult(eastern_result, 2, "Green", 0)
kigali_result = result[1]
self.assertEquals('1708283', kigali_result['boundary'])
self.assertEquals('Kigali City', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 2)
self.assertResult(kigali_result, 2, "Green", 0)
# updating state location leads to updated data
self.c2.set_field(self.user, 'state', "Eastern Province")
result = Value.get_value_summary(ruleset=color, segment=dict(location="State"))
eastern_result = result[0]
self.assertEquals('171591', eastern_result['boundary'])
self.assertEquals('Eastern Province', eastern_result['label'])
self.assertResult(eastern_result, 0, "Red", 0)
self.assertResult(eastern_result, 1, "Blue", 1)
self.assertResult(eastern_result, 2, "Green", 0)
kigali_result = result[1]
self.assertEquals('1708283', kigali_result['boundary'])
self.assertEquals('Kigali City', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 1)
self.assertResult(kigali_result, 2, "Green", 0)
# segment by district instead
result = Value.get_value_summary(ruleset=color, segment=dict(parent="1708283", location="District"))
# only on district in kigali
self.assertEquals(1, len(result))
kigali_result = result[0]
self.assertEquals('3963734', kigali_result['boundary'])
self.assertEquals('Nyarugenge', kigali_result['label'])
self.assertResult(kigali_result, 0, "Red", 0)
self.assertResult(kigali_result, 1, "Blue", 2)
self.assertResult(kigali_result, 2, "Green", 0)
# do a sanity check on our choropleth view
self.login(self.admin)
response = self.client.get(reverse('flows.ruleset_choropleth', args=[color.pk]) +
"?_format=json&boundary=" + self.org.country.osm_id)
# response should be valid json
response = json.loads(response.content)
# should have breaks
self.assertTrue('breaks' in response)
# should have two categories, Blue and Others
self.assertEquals(2, len(response['categories']))
self.assertEquals("Blue", response['categories'][0])
self.assertEquals("Others", response['categories'][1])
# assert our kigali result
kigali_result = response['scores']['1708283']
self.assertEquals(1, kigali_result['score'])
self.assertEquals("Kigali City", kigali_result['name'])
self.assertEquals("Blue", kigali_result['results'][0]['label'])
self.assertEquals("Others", kigali_result['results'][1]['label'])
self.assertEquals(1, kigali_result['results'][0]['count'])
self.assertEquals(0, kigali_result['results'][1]['count'])
self.assertEquals(100, kigali_result['results'][0]['percentage'])
self.assertEquals(0, kigali_result['results'][1]['percentage'])
with patch('temba.values.models.Value.get_value_summary') as mock:
mock.return_value = []
response = self.client.get(reverse('flows.ruleset_choropleth', args=[color.pk]) +
"?_format=json&boundary=" + self.org.country.osm_id)
# response should be valid json
response = json.loads(response.content)
# should have two categories, Blue and Others
self.assertEquals(2, len(response['categories']))
self.assertEquals("", response['categories'][0])
self.assertEquals("", response['categories'][1])
# all counts and percentage are 0
self.assertEquals(0, response['totals']['count'])
self.assertEquals(0, response['totals']['results'][0]['count'])
self.assertEquals(0, response['totals']['results'][0]['percentage'])
self.assertEquals(0, response['totals']['results'][1]['count'])
self.assertEquals(0, response['totals']['results'][1]['percentage'])
# and empty string labels
self.assertEquals("", response['totals']['results'][0]['label'])
self.assertEquals("", response['totals']['results'][1]['label'])
# also check our analytics view
response = self.client.get(reverse('flows.ruleset_analytics'))
# make sure we have only one flow in it
flows = json.loads(response.context['flows'])
self.assertEquals(1, len(flows))
self.assertEquals(3, len(flows[0]['rules']))
def test_open_ended_word_frequencies(self):
flow = self.get_flow('random_word')
def run_flow(contact, word):
self.assertEquals("Thank you", self.send_message(flow, word, contact=contact, restart_participants=True))
(c1, c2, c3, c4, c5, c6) = (self.create_contact("Contact1", '0788111111'),
self.create_contact("Contact2", '0788222222'),
self.create_contact("Contact3", '0788333333'),
self.create_contact("Contact4", '0788444444'),
self.create_contact("Contact5", '0788555555'),
self.create_contact("Contact6", '0788666666', is_test=True))
run_flow(c1, "1 better place")
run_flow(c2, "the great coffee")
run_flow(c3, "1 cup of black tea")
run_flow(c4, "awesome than this encore")
run_flow(c5, "from an awesome place in kigali")
run_flow(c6, "awesome coffee")
random = RuleSet.objects.get(flow=flow, label="Random")
result = Value.get_value_summary(ruleset=random)[0]
self.assertEquals(10, len(result['categories']))
self.assertTrue(result['open_ended'])
self.assertResult(result, 0, "awesome", 2)
self.assertResult(result, 1, "place", 2)
self.assertResult(result, 2, "better", 1)
self.assertResult(result, 3, "black", 1)
self.assertResult(result, 4, "coffee", 1)
self.assertResult(result, 5, "cup", 1)
self.assertResult(result, 6, "encore", 1)
self.assertResult(result, 7, "great", 1)
self.assertResult(result, 8, "kigali", 1)
self.assertResult(result, 9, "tea", 1)
# add French to org languages
Language.create(self.org, self.admin, 'French', 'fre')
# make sure we cleared the cache
Value.invalidate_cache(ruleset=random)
# encore is a french stop word and should not be included this time
result = Value.get_value_summary(ruleset=random)[0]
self.assertEquals(9, len(result['categories']))
self.assertTrue(result['open_ended'])
self.assertResult(result, 0, "awesome", 2)
self.assertResult(result, 1, "place", 2)
self.assertResult(result, 2, "better", 1)
self.assertResult(result, 3, "black", 1)
self.assertResult(result, 4, "coffee", 1)
self.assertResult(result, 5, "cup", 1)
self.assertResult(result, 6, "great", 1)
self.assertResult(result, 7, "kigali", 1)
self.assertResult(result, 8, "tea", 1)
| agpl-3.0 | -7,767,810,077,481,635,000 | 46.292906 | 138 | 0.620942 | false |
rdelval/aurora | src/test/python/apache/thermos/cli/commands/test_simplerun.py | 13 | 1425 | #
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import getpass
import mock
from apache.thermos.cli.commands.simplerun import simplerun
@mock.patch('apache.thermos.cli.commands.simplerun.really_run')
def test_simplerun(really_run_mock):
options_mock = mock.Mock(
spec_set=('root', 'user', 'name', 'task_id', 'prebound_ports', 'bindings', 'daemon'))
options_mock.root = '/tmp/root'
options_mock.user = getpass.getuser()
options_mock.name = 'simple'
options_mock.task_id = None
options_mock.prebound_ports = []
options_mock.bindings = {}
options_mock.daemon = False
simplerun(['--', 'echo', 'hello', 'world'], options_mock)
args, kw = really_run_mock.call_args
thermos_task, root, sandbox = args
assert str(thermos_task.task.name()) == options_mock.name
assert str(thermos_task.task.processes()[0].cmdline()) == 'echo hello world'
assert root == '/tmp/root'
assert sandbox is not None
| apache-2.0 | 8,454,334,175,257,099,000 | 33.756098 | 91 | 0.718596 | false |
daskos/mentos | mentos/utils.py | 2 | 4957 | from __future__ import (absolute_import, division, print_function,
unicode_literals)
import logging
from binascii import a2b_base64, b2a_base64
from contextlib import contextmanager
from multiprocessing.pool import ThreadPool
from mentos.exceptions import (DetectorClosed, NoLeadingMaster,
NoRedirectException)
from tornado import gen, ioloop
from tornado.escape import json_decode, json_encode
from zoonado import Zoonado
log = logging.getLogger(__name__)
decode = json_decode
encode = json_encode
def encode_data(data):
return b2a_base64(data).strip().decode('ascii')
def decode_data(data):
return a2b_base64(data)
_workers = ThreadPool(10)
def run_background(func, callback, args=(), kwds={}):
def _callback(result):
ioloop.IOLoop.instance().add_callback(lambda: callback(result))
_workers.apply_async(func, args, kwds, _callback)
@contextmanager
def log_errors(pdb=False): # pragma: no cover
try:
yield
except (gen.Return):
raise
except Exception as e:
log.exception(e)
if pdb:
import pdb
pdb.set_trace()
raise
POSTFIX = {
'ns': 1e-9,
'us': 1e-6,
'ms': 1e-3,
'secs': 1,
'mins': 60,
'hrs': 60 * 60,
'days': 24 * 60 * 60,
'weeks': 7 * 24 * 60 * 60
}
def parse_duration(s):
s = s.strip()
unit = None
postfix = None
for postfix, unit in POSTFIX.items():
if s.endswith(postfix):
try:
return float(s[:-len(postfix)]) * unit
except ValueError: # pragma: no cover
continue
raise Exception('Unknown duration `{}`; supported units are {}'.format(
s, ','.join('`{}`'.format(n) for n in POSTFIX)))
class MasterInfo(object):
detector = None
def __init__(self, uri):
self.uri = uri
self.seq = None
self.info = {'address': {}}
self.closing = False
if 'zk://' in uri:
log.warn('Using Zookeeper for Discovery')
self.quorum = ','.join([zoo[zoo.index('://') + 3:]
for zoo in self.uri.split(',')])
self.detector = Zoonado(self.quorum, session_timeout=6000)
ioloop.IOLoop.current().add_callback(self.detector.start)
self.current_location = None
def redirected_uri(self, uri):
if not self.detector:
self.uri = uri
else:
raise NoRedirectException(
'Using Zookeeper, cannot set a redirect url')
@gen.coroutine
def get_endpoint(self, path=''):
if self.closing:
raise DetectorClosed('Detecor is closed')
if self.detector:
children = yield self.detector.get_children('/mesos')
children = [child for child in children if child != 'log_replicas']
if not children: # pragma: no cover
log.error('No leading Master found in zookeeper')
raise NoLeadingMaster('No leading Master found in zookeeper')
self.seq = min(children)
data = yield self.detector.get_data('/mesos/' + self.seq)
self.info = decode(data)
else:
host_port = self.uri.split(':')
log.debug(host_port)
if len(host_port) == 2:
self.info['address']['hostname'] = host_port[0]
self.info['address']['port'] = int(host_port[1])
else:
self.info['address']['hostname'] = host_port[0]
self.info['address']['port'] = 5050
log.debug('Found new Master, info={info}'.format(info=self.info))
if 'hostname' in self.info['address']:
host = self.info['address']['hostname']
elif 'ip' in self.info['address']: # pragma: no cover
host = self.info['address']['ip']
port = self.info['address']['port']
self.current_location = '{host}:{port}'.format(host=host, port=port)
raise gen.Return('http://{current_location}{path}'.format(
current_location=self.current_location, path=path))
def close(self):
if self.closing:
return
self.closing = True
def on_complete(self):
log.debug('Closed detector')
run_background(self.detector.close, on_complete)
def drain(iterable):
'''Helper method that empties an iterable as it is iterated over.
Works for: dict, collections.deque, list, set
'''
if getattr(iterable, 'popleft', False):
def next_item(coll):
return coll.popleft()
elif getattr(iterable, 'popitem', False):
def next_item(coll):
return coll.popitem()
else:
def next_item(coll):
return coll.pop()
while True:
try:
yield next_item(iterable)
except (IndexError, KeyError):
raise StopIteration
| apache-2.0 | -7,923,254,968,304,999,000 | 27.819767 | 79 | 0.571313 | false |
mwrlabs/veripy | contrib/rfc3736/__init__.py | 1 | 6086 | from veripy.models import ComplianceTestSuite
from veripy.models.decorators import must, should
import client
#import relay_agent
import server
class StatelessDHCPv6ServiceClientSpecification(ComplianceTestSuite):
"""
Stateless Dynamic Host Configuration Protocol Service for IPv6 (DHCPv6 Client)
These tests are designed to verify the readiness of a DHCPv6 client implementation
vis-a-vis the Stateless Dynamic Host Configuration Protocol for IPv6 specification.
@private
Author: MWR
Source: IPv6 Ready DHCPv6 Interoperability Test Suite (Section 7)
"""
TestCase001 = client.basic_message_exchanges.BasicMessageExchangeTestCase
TestCase002 = client.implementation_of_dhcp_constants.MulticastAddressesTestCase
TestCase003 = client.implementation_of_dhcp_constants.ValidUDPPortTestCase
TestCase004 = client.implementation_of_dhcp_constants.InvalidUDPPortTestCase
TestCase005 = client.client_message_format.ClientMessageFormatTestCase
TestCase006 = client.option_request_option_format.DNSRecursiveNameServerOptionTestCase
TestCase007 = client.option_request_option_format.DomainSearchListOptionTestCase
TestCase008 = client.transmission_of_information_request_messages.ReliabilityOfDHCPv6RetransmissionTestCase
TestCase011 = client.reception_of_reply_messages_for_dns_configuration_options.DNSRecursiveNameServerOptionTestCase
TestCase012 = client.reception_of_reply_messages_for_dns_configuration_options.DomainSearchListOptionTestCase
TestCase013 = client.reception_of_invalid_reply_message.NoServerIdentifierOptionTestCase
TestCase014 = client.reception_of_invalid_reply_message.TransactionIDMismatchTestCase
#TestCase015 = client.client_message_validation.SolicitMessageTestCase
#TestCase016 = client.client_message_validation.RequestMessageTestCase
#TestCase017 = client.client_message_validation.ConfirmMessageTestCase
#TestCase018 = client.client_message_validation.RenewMessageTestCase
#TestCase019 = client.client_message_validation.RebindMessageTestCase
#TestCase020 = client.client_message_validation.DeclineMessageTestCase
#TestCase021 = client.client_message_validation.ReleaseMessageTestCase
#TestCase022 = client.client_message_validation.InformationRequestMessageTestCase
#TestCase023 = client.client_message_validation.RelayForwardMessageTestCase
#TestCase024 = client.client_message_validation.RelayReplyMessageTestCase
TestCase025 = client.client_dhcp_unique_identifier.DUIDFormatTestCase
class StatelessDHCPv6ServiceServerSpecification(ComplianceTestSuite):
"""
Stateless Dynamic Host Configuration Protocol Service for IPv6 (DHCPv6 Server)
These tests are designed to verify the readiness of a DHCPv6 server implementation
vis-a-vis the Stateless Dynamic Host Configuration Protocol for IPv6 specification.
@private
Author: MWR
Source: IPv6 Ready DHCPv6 Interoperability Test Suite (Section 8)
"""
TestCase001 = server.basic_message_exchanges.BasicMessageExchangeTestCase
TestCase002 = server.transaction_id_consistency.TransactionIDConsistencyTestCase
TestCase003 = server.implementation_of_dhcp_constants.ValidUDPPortTestCase
TestCase004 = server.implementation_of_dhcp_constants.InvalidUDPPortTestCase
TestCase005 = server.server_message_format.ClientServerMessageFormatTestCase
TestCase006 = server.server_message_format.RelayAgentServerMessageFormatTestCase
TestCase007 = server.server_identifier_option_format.ServerIdentifierOptionFormatTestCase
TestCase008 = server.dhcp_unique_identifier_contents.DHCPUniqueIdentifierContentsTestCase
TestCase009 = server.dns_recursive_name_server_option_format.DNSRecursiveNameServerOptionFormatTestCase
TestCase010 = server.domain_search_list_option_format.DomainSearchListOptionFormatTestCase
TestCase011 = server.interface_id_option_format.InterfaceIDOptionFormatTestCase
TestCase012 = server.relay_message_option_format.RelayMessageOptionFormatTestCase
TestCase013 = should(server.configuration_of_dns_options.ReturningDNSRecursiveNameServerOptionTestCase)
TestCase014 = server.configuration_of_dns_options.ReturningDNSServerandDomainSearchListOptionsTestCase
TestCase015 = should(server.creation_and_transmission_of_reply_messages.ReplyMessageTransmissionTestCase)
TestCase016 = server.creation_and_transmission_of_reply_messages.ReplyMessageTransmissionWithDNSRNSOptionTestCase
TestCase017 = server.creation_and_transmission_of_reply_messages.ReplyMessageTransmissionWithDomainSearchListOptionTestCase
TestCase018 = server.creation_and_transmission_of_reply_messages.RelayReplyMessageWithoutInterfaceIDTestCase
TestCase019 = server.creation_and_transmission_of_reply_messages.RelayReplyMessageWithInterfaceIDTestCase
TestCase020 = server.creation_and_transmission_of_relay_reply_messages.RelayReplyMessageTransmissionTestCase
TestCase021 = server.creation_and_transmission_of_relay_reply_messages.MultipleRelayReplyMessageTransmissionTestCase
TestCase022 = server.creation_and_transmission_of_relay_reply_messages.EncapsulatedRelayReplyMessageTransmissionTestCase
TestCase023 = server.reception_of_invalid_information_request_message.ReceptionOfInformationRequestMessageViaUnicastTestCase
TestCase024 = server.reception_of_invalid_information_request_message.ContainsServerIdentifierOptionTestCase
TestCase025 = server.reception_of_invalid_information_request_message.ContainsIANAOptionTestCase
TestCase026 = server.server_message_validation.AdvertiseMessageTestCase
TestCase027 = server.server_message_validation.ReplyMessageTestCase
TestCase028 = server.server_message_validation.RelayReplyMessageTestCase
ComplianceTestSuite.register('stateless-dhcpv6-client', StatelessDHCPv6ServiceClientSpecification)
#ComplianceTestSuite.register('dhcpv6-relay-agent', StatelessDHCPv6ServiceRelayAgentSpecification)
ComplianceTestSuite.register('stateless-dhcpv6-server', StatelessDHCPv6ServiceServerSpecification)
| gpl-3.0 | -650,322,378,410,412,400 | 68.159091 | 128 | 0.836674 | false |
tpokorra/pykolab | pykolab/cli/cmd_remove_mailaddress.py | 1 | 3257 | # -*- coding: utf-8 -*-
# Copyright 2010-2013 Kolab Systems AG (http://www.kolabsys.com)
#
# Jeroen van Meeuwen (Kolab Systems) <vanmeeuwen a kolabsys.com>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import sys
import commands
import pykolab
from pykolab.auth import Auth
from pykolab import utils
from pykolab.translate import _
log = pykolab.getLogger('pykolab.cli')
conf = pykolab.getConf()
def __init__():
commands.register('remove_mail', execute, description=description())
def description():
return """Remove a recipient's mail address."""
def execute(*args, **kw):
try:
email_address = conf.cli_args.pop(0)
except IndexError, errmsg:
email_address = utils.ask_question("Email address to remove")
# Get the domain from the email address
if len(email_address.split('@')) > 1:
domain = email_address.split('@')[1]
else:
log.error(_("Invalid or unqualified email address."))
sys.exit(1)
auth = Auth()
auth.connect(domain=domain)
recipients = auth.find_recipient(email_address)
if len(recipients) == 0:
log.error(_("No recipient found for email address %r") % (email_address))
sys.exit(1)
log.debug(_("Found the following recipient(s): %r") % (recipients), level=8)
mail_attributes = conf.get_list(domain, 'mail_attributes')
if mail_attributes == None or len(mail_attributes) < 1:
mail_attributes = conf.get_list(conf.get('kolab', 'auth_mechanism'), 'mail_attributes')
log.debug(_("Using the following mail attributes: %r") % (mail_attributes), level=8)
if isinstance(recipients, basestring):
recipient = recipients
# Only a single recipient found, remove the address
attributes = auth.get_entry_attributes(domain, recipient, mail_attributes)
# See which attribute holds the value we're trying to remove
for attribute in attributes.keys():
if isinstance(attributes[attribute], list):
if email_address in attributes[attribute]:
attributes[attribute].pop(attributes[attribute].index(email_address))
replace_attributes = {
attribute: attributes[attribute]
}
auth.set_entry_attributes(domain, recipient, replace_attributes)
else:
if email_address == attributes[attribute]:
auth.set_entry_attributes(domain, recipient, {attribute: None})
pass
else:
print >> sys.stderr, _("Found the following recipients:")
for recipient in recipients:
print recipient
| gpl-3.0 | -5,227,219,519,920,948,000 | 34.021505 | 95 | 0.658274 | false |
CoDEmanX/ArangoDB | 3rdParty/V8-4.3.61/build/gyp/test/win/gyptest-link-pgo.py | 239 | 2993 | #!/usr/bin/env python
# Copyright (c) 2013 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Make sure PGO is working properly.
"""
import TestGyp
import os
import sys
if sys.platform == 'win32':
test = TestGyp.TestGyp(formats=['msvs', 'ninja'])
CHDIR = 'linker-flags'
test.run_gyp('pgo.gyp', chdir=CHDIR)
def IsPGOAvailable():
"""Returns true if the Visual Studio available here supports PGO."""
test.build('pgo.gyp', 'gen_linker_option', chdir=CHDIR)
tmpfile = test.read(test.built_file_path('linker_options.txt', chdir=CHDIR))
return any(line.find('PGOPTIMIZE') for line in tmpfile)
# Test generated build files look fine.
if test.format == 'ninja':
ninja = test.built_file_path('obj/test_pgo_instrument.ninja', chdir=CHDIR)
test.must_contain(ninja, '/LTCG:PGINSTRUMENT')
test.must_contain(ninja, 'test_pgo.pgd')
ninja = test.built_file_path('obj/test_pgo_optimize.ninja', chdir=CHDIR)
test.must_contain(ninja, '/LTCG:PGOPTIMIZE')
test.must_contain(ninja, 'test_pgo.pgd')
ninja = test.built_file_path('obj/test_pgo_update.ninja', chdir=CHDIR)
test.must_contain(ninja, '/LTCG:PGUPDATE')
test.must_contain(ninja, 'test_pgo.pgd')
elif test.format == 'msvs':
LTCG_FORMAT = '<LinkTimeCodeGeneration>%s</LinkTimeCodeGeneration>'
vcproj = test.workpath('linker-flags/test_pgo_instrument.vcxproj')
test.must_contain(vcproj, LTCG_FORMAT % 'PGInstrument')
test.must_contain(vcproj, 'test_pgo.pgd')
vcproj = test.workpath('linker-flags/test_pgo_optimize.vcxproj')
test.must_contain(vcproj, LTCG_FORMAT % 'PGOptimization')
test.must_contain(vcproj, 'test_pgo.pgd')
vcproj = test.workpath('linker-flags/test_pgo_update.vcxproj')
test.must_contain(vcproj, LTCG_FORMAT % 'PGUpdate')
test.must_contain(vcproj, 'test_pgo.pgd')
# When PGO is available, try building binaries with PGO.
if IsPGOAvailable():
pgd_path = test.built_file_path('test_pgo.pgd', chdir=CHDIR)
# Test if 'PGInstrument' generates PGD (Profile-Guided Database) file.
if os.path.exists(pgd_path):
test.unlink(pgd_path)
test.must_not_exist(pgd_path)
test.build('pgo.gyp', 'test_pgo_instrument', chdir=CHDIR)
test.must_exist(pgd_path)
# Test if 'PGOptimize' works well
test.build('pgo.gyp', 'test_pgo_optimize', chdir=CHDIR)
test.must_contain_any_line(test.stdout(), ['profiled functions'])
# Test if 'PGUpdate' works well
test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
# With 'PGUpdate', linker should not complain that sources are changed after
# the previous training run.
test.touch(test.workpath('linker-flags/inline_test_main.cc'))
test.unlink(test.built_file_path('test_pgo_update.exe', chdir=CHDIR))
test.build('pgo.gyp', 'test_pgo_update', chdir=CHDIR)
test.must_contain_any_line(test.stdout(), ['profiled functions'])
test.pass_test()
| apache-2.0 | 1,721,852,941,685,641,200 | 38.906667 | 80 | 0.696625 | false |
toshywoshy/ansible | lib/ansible/plugins/lookup/k8s.py | 38 | 11371 | #
# Copyright 2018 Red Hat | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
lookup: k8s
version_added: "2.5"
short_description: Query the K8s API
description:
- Uses the OpenShift Python client to fetch a specific object by name, all matching objects within a
namespace, or all matching objects for all namespaces, as well as information about the cluster.
- Provides access the full range of K8s APIs.
- Enables authentication via config file, certificates, password or token.
options:
cluster_info:
description:
- Use to specify the type of cluster information you are attempting to retrieve. Will take priority
over all the other options.
api_version:
description:
- Use to specify the API version. If I(resource definition) is provided, the I(apiVersion) from the
I(resource_definition) will override this option.
default: v1
kind:
description:
- Use to specify an object model. If I(resource definition) is provided, the I(kind) from a
I(resource_definition) will override this option.
required: true
resource_name:
description:
- Fetch a specific object by name. If I(resource definition) is provided, the I(metadata.name) value
from the I(resource_definition) will override this option.
namespace:
description:
- Limit the objects returned to a specific namespace. If I(resource definition) is provided, the
I(metadata.namespace) value from the I(resource_definition) will override this option.
label_selector:
description:
- Additional labels to include in the query. Ignored when I(resource_name) is provided.
field_selector:
description:
- Specific fields on which to query. Ignored when I(resource_name) is provided.
resource_definition:
description:
- "Provide a YAML configuration for an object. NOTE: I(kind), I(api_version), I(resource_name),
and I(namespace) will be overwritten by corresponding values found in the provided I(resource_definition)."
src:
description:
- "Provide a path to a file containing a valid YAML definition of an object dated. Mutually
exclusive with I(resource_definition). NOTE: I(kind), I(api_version), I(resource_name), and I(namespace)
will be overwritten by corresponding values found in the configuration read in from the I(src) file."
- Reads from the local file system. To read from the Ansible controller's file system, use the file lookup
plugin or template lookup plugin, combined with the from_yaml filter, and pass the result to
I(resource_definition). See Examples below.
host:
description:
- Provide a URL for accessing the API. Can also be specified via K8S_AUTH_HOST environment variable.
api_key:
description:
- Token used to authenticate with the API. Can also be specified via K8S_AUTH_API_KEY environment variable.
kubeconfig:
description:
- Path to an existing Kubernetes config file. If not provided, and no other connection
options are provided, the openshift client will attempt to load the default
configuration file from I(~/.kube/config.json). Can also be specified via K8S_AUTH_KUBECONFIG environment
variable.
context:
description:
- The name of a context found in the config file. Can also be specified via K8S_AUTH_CONTEXT environment
variable.
username:
description:
- Provide a username for authenticating with the API. Can also be specified via K8S_AUTH_USERNAME environment
variable.
password:
description:
- Provide a password for authenticating with the API. Can also be specified via K8S_AUTH_PASSWORD environment
variable.
client_cert:
description:
- Path to a certificate used to authenticate with the API. Can also be specified via K8S_AUTH_CERT_FILE
environment
variable.
aliases: [ cert_file ]
client_key:
description:
- Path to a key file used to authenticate with the API. Can also be specified via K8S_AUTH_KEY_FILE environment
variable.
aliases: [ key_file ]
ca_cert:
description:
- Path to a CA certificate used to authenticate with the API. Can also be specified via K8S_AUTH_SSL_CA_CERT
environment variable.
aliases: [ ssl_ca_cert ]
validate_certs:
description:
- Whether or not to verify the API server's SSL certificates. Can also be specified via K8S_AUTH_VERIFY_SSL
environment variable.
type: bool
aliases: [ verify_ssl ]
requirements:
- "python >= 2.7"
- "openshift >= 0.6"
- "PyYAML >= 3.11"
notes:
- "The OpenShift Python client wraps the K8s Python client, providing full access to
all of the APIS and models available on both platforms. For API version details and
additional information visit https://github.com/openshift/openshift-restclient-python"
"""
EXAMPLES = """
- name: Fetch a list of namespaces
set_fact:
projects: "{{ lookup('k8s', api_version='v1', kind='Namespace') }}"
- name: Fetch all deployments
set_fact:
deployments: "{{ lookup('k8s', kind='Deployment') }}"
- name: Fetch all deployments in a namespace
set_fact:
deployments: "{{ lookup('k8s', kind='Deployment', namespace='testing') }}"
- name: Fetch a specific deployment by name
set_fact:
deployments: "{{ lookup('k8s', kind='Deployment', namespace='testing', resource_name='elastic') }}"
- name: Fetch with label selector
set_fact:
service: "{{ lookup('k8s', kind='Service', label_selector='app=galaxy') }}"
# Use parameters from a YAML config
- name: Load config from the Ansible controller filesystem
set_fact:
config: "{{ lookup('file', 'service.yml') | from_yaml }}"
- name: Using the config (loaded from a file in prior task), fetch the latest version of the object
set_fact:
service: "{{ lookup('k8s', resource_definition=config) }}"
- name: Use a config from the local filesystem
set_fact:
service: "{{ lookup('k8s', src='service.yml') }}"
"""
RETURN = """
_list:
description:
- One ore more object definitions returned from the API.
type: complex
contains:
api_version:
description: The versioned schema of this representation of an object.
returned: success
type: str
kind:
description: Represents the REST resource this object represents.
returned: success
type: str
metadata:
description: Standard object metadata. Includes name, namespace, annotations, labels, etc.
returned: success
type: complex
spec:
description: Specific attributes of the object. Will vary based on the I(api_version) and I(kind).
returned: success
type: complex
status:
description: Current status details for the object.
returned: success
type: complex
"""
from ansible.plugins.lookup import LookupBase
from ansible.module_utils.k8s.common import K8sAnsibleMixin
from ansible.errors import AnsibleError
try:
from openshift.dynamic import DynamicClient
from openshift.dynamic.exceptions import NotFoundError
HAS_K8S_MODULE_HELPER = True
k8s_import_exception = None
except ImportError as e:
HAS_K8S_MODULE_HELPER = False
k8s_import_exception = e
try:
import yaml
HAS_YAML = True
except ImportError:
HAS_YAML = False
class KubernetesLookup(K8sAnsibleMixin):
def __init__(self):
if not HAS_K8S_MODULE_HELPER:
raise Exception(
"Requires the OpenShift Python client. Try `pip install openshift`. Detail: {0}".format(k8s_import_exception)
)
if not HAS_YAML:
raise Exception(
"Requires PyYAML. Try `pip install PyYAML`"
)
self.kind = None
self.name = None
self.namespace = None
self.api_version = None
self.label_selector = None
self.field_selector = None
self.include_uninitialized = None
self.resource_definition = None
self.helper = None
self.connection = {}
def fail(self, msg=None):
raise AnsibleError(msg)
def run(self, terms, variables=None, **kwargs):
self.params = kwargs
self.client = self.get_api_client()
cluster_info = kwargs.get('cluster_info')
if cluster_info == 'version':
return [self.client.version]
if cluster_info == 'api_groups':
return [self.client.resources.api_groups]
self.kind = kwargs.get('kind')
self.name = kwargs.get('resource_name')
self.namespace = kwargs.get('namespace')
self.api_version = kwargs.get('api_version', 'v1')
self.label_selector = kwargs.get('label_selector')
self.field_selector = kwargs.get('field_selector')
self.include_uninitialized = kwargs.get('include_uninitialized', False)
resource_definition = kwargs.get('resource_definition')
src = kwargs.get('src')
if src:
resource_definition = self.load_resource_definitions(src)[0]
if resource_definition:
self.kind = resource_definition.get('kind', self.kind)
self.api_version = resource_definition.get('apiVersion', self.api_version)
self.name = resource_definition.get('metadata', {}).get('name', self.name)
self.namespace = resource_definition.get('metadata', {}).get('namespace', self.namespace)
if not self.kind:
raise AnsibleError(
"Error: no Kind specified. Use the 'kind' parameter, or provide an object YAML configuration "
"using the 'resource_definition' parameter."
)
resource = self.find_resource(self.kind, self.api_version, fail=True)
try:
k8s_obj = resource.get(name=self.name, namespace=self.namespace, label_selector=self.label_selector, field_selector=self.field_selector)
except NotFoundError:
return []
if self.name:
return [k8s_obj.to_dict()]
return k8s_obj.to_dict().get('items')
class LookupModule(LookupBase):
def run(self, terms, variables=None, **kwargs):
return KubernetesLookup().run(terms, variables=variables, **kwargs)
| gpl-3.0 | 5,912,794,864,356,581,000 | 37.0301 | 148 | 0.658253 | false |
nealtodd/django | tests/admin_custom_urls/models.py | 288 | 2513 | from functools import update_wrapper
from django.contrib import admin
from django.core.urlresolvers import reverse
from django.db import models
from django.http import HttpResponseRedirect
from django.utils.encoding import python_2_unicode_compatible
@python_2_unicode_compatible
class Action(models.Model):
name = models.CharField(max_length=50, primary_key=True)
description = models.CharField(max_length=70)
def __str__(self):
return self.name
class ActionAdmin(admin.ModelAdmin):
"""
A ModelAdmin for the Action model that changes the URL of the add_view
to '<app name>/<model name>/!add/'
The Action model has a CharField PK.
"""
list_display = ('name', 'description')
def remove_url(self, name):
"""
Remove all entries named 'name' from the ModelAdmin instance URL
patterns list
"""
return [url for url in super(ActionAdmin, self).get_urls() if url.name != name]
def get_urls(self):
# Add the URL of our custom 'add_view' view to the front of the URLs
# list. Remove the existing one(s) first
from django.conf.urls import url
def wrap(view):
def wrapper(*args, **kwargs):
return self.admin_site.admin_view(view)(*args, **kwargs)
return update_wrapper(wrapper, view)
info = self.model._meta.app_label, self.model._meta.model_name
view_name = '%s_%s_add' % info
return [
url(r'^!add/$', wrap(self.add_view), name=view_name),
] + self.remove_url(view_name)
class Person(models.Model):
name = models.CharField(max_length=20)
class PersonAdmin(admin.ModelAdmin):
def response_post_save_add(self, request, obj):
return HttpResponseRedirect(
reverse('admin:admin_custom_urls_person_history', args=[obj.pk]))
def response_post_save_change(self, request, obj):
return HttpResponseRedirect(
reverse('admin:admin_custom_urls_person_delete', args=[obj.pk]))
class Car(models.Model):
name = models.CharField(max_length=20)
class CarAdmin(admin.ModelAdmin):
def response_add(self, request, obj, post_url_continue=None):
return super(CarAdmin, self).response_add(
request, obj, post_url_continue=reverse('admin:admin_custom_urls_car_history', args=[obj.pk]))
site = admin.AdminSite(name='admin_custom_urls')
site.register(Action, ActionAdmin)
site.register(Person, PersonAdmin)
site.register(Car, CarAdmin)
| bsd-3-clause | 898,295,780,507,409,400 | 29.277108 | 106 | 0.666932 | false |
jpwhite3/wilmu-linux-toolkit | lab_toolkit/includes/linecache2/tests/test_linecache.py | 17 | 6508 | """ Tests for the linecache module """
import linecache2 as linecache
import unittest2 as unittest
import os.path
import tempfile
from fixtures import NestedTempfile
FILENAME = os.__file__
if FILENAME.endswith('.pyc'):
FILENAME = FILENAME[:-1]
NONEXISTENT_FILENAME = FILENAME + '.missing'
INVALID_NAME = '!@$)(!@#_1'
EMPTY = ''
TESTS = 'inspect_fodder inspect_fodder2 mapping_tests'
TESTS = TESTS.split()
TEST_PATH = os.path.dirname(__file__)
MODULES = "linecache abc".split()
MODULE_PATH = os.path.dirname(FILENAME)
SOURCE_1 = '''
" Docstring "
def function():
return result
'''
SOURCE_2 = '''
def f():
return 1 + 1
a = f()
'''
SOURCE_3 = '''
def f():
return 3''' # No ending newline
class LineCacheTests(unittest.TestCase):
def setUp(self):
tempdir = NestedTempfile()
tempdir.setUp()
self.addCleanup(tempdir.cleanUp)
def test_getline(self):
getline = linecache.getline
# Bad values for line number should return an empty string
self.assertEqual(getline(FILENAME, 2**15), EMPTY)
self.assertEqual(getline(FILENAME, -1), EMPTY)
# Float values currently raise TypeError, should it?
self.assertRaises(TypeError, getline, FILENAME, 1.1)
# Bad filenames should return an empty string
self.assertEqual(getline(EMPTY, 1), EMPTY)
self.assertEqual(getline(INVALID_NAME, 1), EMPTY)
# Check whether lines correspond to those from file iteration
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
with open(filename) as file:
for index, line in enumerate(file):
self.assertEqual(line, getline(filename, index + 1))
# Check module loading
for entry in MODULES:
filename = os.path.join(MODULE_PATH, entry) + '.py'
with open(filename) as file:
for index, line in enumerate(file):
self.assertEqual(line, getline(filename, index + 1))
# Check that bogus data isn't returned (issue #1309567)
empty = linecache.getlines('a/b/c/__init__.py')
self.assertEqual(empty, [])
def test_no_ending_newline(self):
temp_file = tempfile.NamedTemporaryFile(
suffix='.py', mode='w', delete=False)
self.addCleanup(os.unlink, temp_file.name)
with open(temp_file.name, "w") as fp:
fp.write(SOURCE_3)
lines = linecache.getlines(temp_file.name)
self.assertEqual(lines, ["\n", "def f():\n", " return 3\n"])
def test_clearcache(self):
cached = []
for entry in TESTS:
filename = os.path.join(TEST_PATH, entry) + '.py'
cached.append(filename)
linecache.getline(filename, 1)
# Are all files cached?
cached_empty = [fn for fn in cached if fn not in linecache.cache]
self.assertEqual(cached_empty, [])
# Can we clear the cache?
linecache.clearcache()
cached_empty = [fn for fn in cached if fn in linecache.cache]
self.assertEqual(cached_empty, [])
def test_checkcache(self):
getline = linecache.getline
# Create a source file and cache its contents
temp_file = tempfile.NamedTemporaryFile(
suffix='.py', mode='w', delete=False)
source_name = temp_file.name
self.addCleanup(os.unlink, source_name)
with open(source_name, 'w') as source:
source.write(SOURCE_1)
getline(source_name, 1)
# Keep a copy of the old contents
source_list = []
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
with open(source_name, 'w') as source:
source.write(SOURCE_2)
# Try to update a bogus cache entry
linecache.checkcache('dummy')
# Check that the cache matches the old contents
for index, line in enumerate(source_list):
self.assertEqual(line, getline(source_name, index + 1))
# Update the cache and check whether it matches the new source file
linecache.checkcache(source_name)
with open(source_name) as source:
for index, line in enumerate(source):
self.assertEqual(line, getline(source_name, index + 1))
source_list.append(line)
def test_lazycache_no_globals(self):
lines = linecache.getlines(FILENAME)
linecache.clearcache()
self.assertEqual(False, linecache.lazycache(FILENAME, None))
self.assertEqual(lines, linecache.getlines(FILENAME))
@unittest.skipIf("__loader__" not in globals(), "Modules not PEP302 by default")
def test_lazycache_smoke(self):
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
linecache.clearcache()
self.assertEqual(
True, linecache.lazycache(NONEXISTENT_FILENAME, globals()))
self.assertEqual(1, len(linecache.cache[NONEXISTENT_FILENAME]))
# Note here that we're looking up a non existant filename with no
# globals: this would error if the lazy value wasn't resolved.
self.assertEqual(lines, linecache.getlines(NONEXISTENT_FILENAME))
def test_lazycache_provide_after_failed_lookup(self):
linecache.clearcache()
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
linecache.clearcache()
linecache.getlines(NONEXISTENT_FILENAME)
linecache.lazycache(NONEXISTENT_FILENAME, globals())
self.assertEqual(lines, linecache.updatecache(NONEXISTENT_FILENAME))
def test_lazycache_check(self):
linecache.clearcache()
linecache.lazycache(NONEXISTENT_FILENAME, globals())
linecache.checkcache()
def test_lazycache_bad_filename(self):
linecache.clearcache()
self.assertEqual(False, linecache.lazycache('', globals()))
self.assertEqual(False, linecache.lazycache('<foo>', globals()))
@unittest.skipIf("__loader__" not in globals(), "Modules not PEP302 by default")
def test_lazycache_already_cached(self):
linecache.clearcache()
lines = linecache.getlines(NONEXISTENT_FILENAME, globals())
self.assertEqual(
False,
linecache.lazycache(NONEXISTENT_FILENAME, globals()))
self.assertEqual(4, len(linecache.cache[NONEXISTENT_FILENAME]))
| mit | -2,525,976,616,793,523,700 | 34.369565 | 84 | 0.633067 | false |
daviddupont69/CouchPotatoServer | libs/sqlalchemy/sql/__init__.py | 18 | 1138 | # sql/__init__.py
# Copyright (C) 2005-2013 the SQLAlchemy authors and contributors <see AUTHORS file>
#
# This module is part of SQLAlchemy and is released under
# the MIT License: http://www.opensource.org/licenses/mit-license.php
from sqlalchemy.sql.expression import (
Alias,
ClauseElement,
ColumnCollection,
ColumnElement,
CompoundSelect,
Delete,
FromClause,
Insert,
Join,
Select,
Selectable,
TableClause,
Update,
alias,
and_,
asc,
between,
bindparam,
case,
cast,
collate,
column,
delete,
desc,
distinct,
except_,
except_all,
exists,
extract,
false,
func,
insert,
intersect,
intersect_all,
join,
label,
literal,
literal_column,
modifier,
not_,
null,
or_,
outerjoin,
outparam,
over,
select,
subquery,
table,
text,
true,
tuple_,
type_coerce,
union,
union_all,
update,
)
from sqlalchemy.sql.visitors import ClauseVisitor
__tmp = locals().keys()
__all__ = sorted([i for i in __tmp if not i.startswith('__')])
| gpl-3.0 | -7,069,498,983,474,220,000 | 15.492754 | 84 | 0.596661 | false |
robynbergeron/ansible-modules-extras | network/f5/bigip_node.py | 77 | 13267 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# (c) 2013, Matt Hite <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
DOCUMENTATION = '''
---
module: bigip_node
short_description: "Manages F5 BIG-IP LTM nodes"
description:
- "Manages F5 BIG-IP LTM nodes via iControl SOAP API"
version_added: "1.4"
author: "Matt Hite (@mhite)"
notes:
- "Requires BIG-IP software version >= 11"
- "F5 developed module 'bigsuds' required (see http://devcentral.f5.com)"
- "Best run as a local_action in your playbook"
requirements:
- bigsuds
options:
server:
description:
- BIG-IP host
required: true
default: null
choices: []
aliases: []
user:
description:
- BIG-IP username
required: true
default: null
choices: []
aliases: []
password:
description:
- BIG-IP password
required: true
default: null
choices: []
aliases: []
validate_certs:
description:
- If C(no), SSL certificates will not be validated. This should only be used
on personally controlled sites using self-signed certificates.
required: false
default: 'yes'
choices: ['yes', 'no']
version_added: 2.0
state:
description:
- Pool member state
required: true
default: present
choices: ['present', 'absent']
aliases: []
session_state:
description:
- Set new session availability status for node
version_added: "1.9"
required: false
default: null
choices: ['enabled', 'disabled']
aliases: []
monitor_state:
description:
- Set monitor availability status for node
version_added: "1.9"
required: false
default: null
choices: ['enabled', 'disabled']
aliases: []
partition:
description:
- Partition
required: false
default: 'Common'
choices: []
aliases: []
name:
description:
- "Node name"
required: false
default: null
choices: []
host:
description:
- "Node IP. Required when state=present and node does not exist. Error when state=absent."
required: true
default: null
choices: []
aliases: ['address', 'ip']
description:
description:
- "Node description."
required: false
default: null
choices: []
'''
EXAMPLES = '''
## playbook task examples:
---
# file bigip-test.yml
# ...
- hosts: bigip-test
tasks:
- name: Add node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
host="{{ ansible_default_ipv4["address"] }}"
name="{{ ansible_default_ipv4["address"] }}"
# Note that the BIG-IP automatically names the node using the
# IP address specified in previous play's host parameter.
# Future plays referencing this node no longer use the host
# parameter but instead use the name parameter.
# Alternatively, you could have specified a name with the
# name parameter when state=present.
- name: Modify node description
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
description="Our best server yet"
- name: Delete node
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=absent
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
# The BIG-IP GUI doesn't map directly to the API calls for "Node ->
# General Properties -> State". The following states map to API monitor
# and session states.
#
# Enabled (all traffic allowed):
# monitor_state=enabled, session_state=enabled
# Disabled (only persistent or active connections allowed):
# monitor_state=enabled, session_state=disabled
# Forced offline (only active connections allowed):
# monitor_state=disabled, session_state=disabled
#
# See https://devcentral.f5.com/questions/icontrol-equivalent-call-for-b-node-down
- name: Force node offline
local_action: >
bigip_node
server=lb.mydomain.com
user=admin
password=mysecret
state=present
session_state=disabled
monitor_state=disabled
partition=matthite
name="{{ ansible_default_ipv4["address"] }}"
'''
def node_exists(api, address):
# hack to determine if node exists
result = False
try:
api.LocalLB.NodeAddressV2.get_object_status(nodes=[address])
result = True
except bigsuds.OperationFailed, e:
if "was not found" in str(e):
result = False
else:
# genuine exception
raise
return result
def create_node_address(api, address, name):
try:
api.LocalLB.NodeAddressV2.create(nodes=[name], addresses=[address], limits=[0])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "already exists" in str(e):
result = False
desc = "referenced name or IP already in use"
else:
# genuine exception
raise
return (result, desc)
def get_node_address(api, name):
return api.LocalLB.NodeAddressV2.get_address(nodes=[name])[0]
def delete_node_address(api, address):
try:
api.LocalLB.NodeAddressV2.delete_node_address(nodes=[address])
result = True
desc = ""
except bigsuds.OperationFailed, e:
if "is referenced by a member of pool" in str(e):
result = False
desc = "node referenced by pool"
else:
# genuine exception
raise
return (result, desc)
def set_node_description(api, name, description):
api.LocalLB.NodeAddressV2.set_description(nodes=[name],
descriptions=[description])
def get_node_description(api, name):
return api.LocalLB.NodeAddressV2.get_description(nodes=[name])[0]
def set_node_session_enabled_state(api, name, session_state):
session_state = "STATE_%s" % session_state.strip().upper()
api.LocalLB.NodeAddressV2.set_session_enabled_state(nodes=[name],
states=[session_state])
def get_node_session_status(api, name):
result = api.LocalLB.NodeAddressV2.get_session_status(nodes=[name])[0]
result = result.split("SESSION_STATUS_")[-1].lower()
return result
def set_node_monitor_state(api, name, monitor_state):
monitor_state = "STATE_%s" % monitor_state.strip().upper()
api.LocalLB.NodeAddressV2.set_monitor_state(nodes=[name],
states=[monitor_state])
def get_node_monitor_status(api, name):
result = api.LocalLB.NodeAddressV2.get_monitor_status(nodes=[name])[0]
result = result.split("MONITOR_STATUS_")[-1].lower()
return result
def main():
argument_spec=f5_argument_spec();
argument_spec.update(dict(
session_state = dict(type='str', choices=['enabled', 'disabled']),
monitor_state = dict(type='str', choices=['enabled', 'disabled']),
name = dict(type='str', required=True),
host = dict(type='str', aliases=['address', 'ip']),
description = dict(type='str')
)
)
module = AnsibleModule(
argument_spec = argument_spec,
supports_check_mode=True
)
(server,user,password,state,partition,validate_certs) = f5_parse_arguments(module)
session_state = module.params['session_state']
monitor_state = module.params['monitor_state']
host = module.params['host']
name = module.params['name']
address = fq_name(partition, name)
description = module.params['description']
if state == 'absent' and host is not None:
module.fail_json(msg="host parameter invalid when state=absent")
try:
api = bigip_api(server, user, password)
result = {'changed': False} # default
if state == 'absent':
if node_exists(api, address):
if not module.check_mode:
deleted, desc = delete_node_address(api, address)
if not deleted:
module.fail_json(msg="unable to delete: %s" % desc)
else:
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
elif state == 'present':
if not node_exists(api, address):
if host is None:
module.fail_json(msg="host parameter required when " \
"state=present and node does not exist")
if not module.check_mode:
created, desc = create_node_address(api, address=host, name=address)
if not created:
module.fail_json(msg="unable to create: %s" % desc)
else:
result = {'changed': True}
if session_state is not None:
set_node_session_enabled_state(api, address,
session_state)
result = {'changed': True}
if monitor_state is not None:
set_node_monitor_state(api, address, monitor_state)
result = {'changed': True}
if description is not None:
set_node_description(api, address, description)
result = {'changed': True}
else:
# check-mode return value
result = {'changed': True}
else:
# node exists -- potentially modify attributes
if host is not None:
if get_node_address(api, address) != host:
module.fail_json(msg="Changing the node address is " \
"not supported by the API; " \
"delete and recreate the node.")
if session_state is not None:
session_status = get_node_session_status(api, address)
if session_state == 'enabled' and \
session_status == 'forced_disabled':
if not module.check_mode:
set_node_session_enabled_state(api, address,
session_state)
result = {'changed': True}
elif session_state == 'disabled' and \
session_status != 'force_disabled':
if not module.check_mode:
set_node_session_enabled_state(api, address,
session_state)
result = {'changed': True}
if monitor_state is not None:
monitor_status = get_node_monitor_status(api, address)
if monitor_state == 'enabled' and \
monitor_status == 'forced_down':
if not module.check_mode:
set_node_monitor_state(api, address,
monitor_state)
result = {'changed': True}
elif monitor_state == 'disabled' and \
monitor_status != 'forced_down':
if not module.check_mode:
set_node_monitor_state(api, address,
monitor_state)
result = {'changed': True}
if description is not None:
if get_node_description(api, address) != description:
if not module.check_mode:
set_node_description(api, address, description)
result = {'changed': True}
except Exception, e:
module.fail_json(msg="received exception: %s" % e)
module.exit_json(**result)
# import module snippets
from ansible.module_utils.basic import *
from ansible.module_utils.f5 import *
main()
| gpl-3.0 | -6,276,996,959,391,056,000 | 33.730366 | 102 | 0.555363 | false |
lonvia/osgende | tools/osgende-mapserv-falcon.py | 1 | 9619 | # This file is part of Osgende
# Copyright (C) 2020 Sarah Hoffmann
#
# This is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA.
"""
Falcon-based tile server for tile databases generated with osgende-mapgen.
Use with uWSGI.
"""
import datetime
import os
import sys
import threading
import hashlib
from math import pi,exp,atan
import falcon
import mapnik
RAD_TO_DEG = 180/pi
class TileProjection:
def __init__(self,levels=18):
self.Bc = []
self.Cc = []
self.zc = []
self.Ac = []
c = 256
for d in range(0,levels + 1):
e = c/2;
self.Bc.append(c/360.0)
self.Cc.append(c/(2 * pi))
self.zc.append((e,e))
self.Ac.append(c)
c *= 2
def fromTileToLL(self, zoom, x, y):
e = self.zc[zoom]
f = (x*256.0 - e[0])/self.Bc[zoom]
g = (y*256.0 - e[1])/-self.Cc[zoom]
h = RAD_TO_DEG * ( 2 * atan(exp(g)) - 0.5 * pi)
return (f,h)
def mk_tileid(zoom, x, y):
"""Create a unique 64 bit tile ID.
Works up to zoom level 24."
"""
return zoom + (x << 5) + (y << (5 + zoom))
class DummyCache(object):
""" A tile cache that does not remember any tiles.
Useful when testing out a new style.
"""
def __init__(self, config):
pass
def get(self, zoom, x, y, fmt):
return None
def set(self, zoom, x, y, fmt, image=None):
pass
class PostgresCache(object):
""" A cache that saves tiles in postgres.
"""
def __init__(self, config):
self.empty = dict()
for fmt, fname in config['empty_tile'].items():
with open(fname, 'rb') as myfile:
self.empty[fmt] = myfile.read()
self.max_zoom = config.get('max_zoom', 100)
self.pg = __import__('psycopg2')
self.dba = config['dba']
self.cmd_get = "SELECT pixbuf FROM %s WHERE id=%%s" % config['table']
self.cmd_check = "SELECT count(*) FROM %s WHERE id=%%s" % config['table']
self.cmd_set = "UPDATE %s SET pixbuf=%%s WHERE id=%%s AND pixbuf is Null" % config['table']
self.thread_data = threading.local()
def get_db(self):
if not hasattr(self.thread_data, 'cache_db'):
self.thread_data.cache_db = self.pg.connect(self.dba)
# set into autocommit mode so that tiles still can be
# read while the db is updated
self.thread_data.cache_db.autocommit = True
self.thread_data.cache_db.cursor().execute("SET synchronous_commit TO OFF")
return self.thread_data.cache_db
def get(self, zoom, x, y, fmt):
c = self.get_db().cursor()
if zoom > self.max_zoom:
shift = zoom - self.max_zoom
c.execute(self.cmd_check,
(mk_tileid(self.max_zoom, x >> shift, y >> shift), ))
if c.fetchone()[0]:
return None
else:
c.execute(self.cmd_get, (mk_tileid(zoom, x, y), ))
if c.rowcount > 0:
tile = c.fetchone()[0]
return bytes(tile) if tile is not None else None
return self.empty[fmt]
def set(self, zoom, x, y, fmt, image=None):
if zoom <= self.max_zoom:
c = self.get_db().cursor()
c.execute(self.cmd_set, (image, mk_tileid(zoom, x, y)))
class MapnikRenderer(object):
def __init__(self, name, config, styleconfig):
self.name = name
# defaults
self.config = dict({ 'formats' : [ 'png' ],
'tile_size' : (256, 256),
'max_zoom' : 18
})
self.stylecfg = dict()
# local configuration
if config is not None:
self.config.update(config)
if styleconfig is not None:
self.stylecfg.update(styleconfig)
if self.config['source_type'] == 'xml':
self.create_map = self._create_map_xml
if self.config['source_type'] == 'python':
self.python_map =__import__(self.config['source'])
self.create_map = self._create_map_python
m = mapnik.Map(*self.config['tile_size'])
self.create_map(m)
self.mproj = mapnik.Projection(m.srs)
self.gproj = TileProjection(self.config['max_zoom'])
self.thread_data = threading.local()
def get_map(self):
self.thread_map()
return self.thread_data.map
def thread_map(self):
if not hasattr(self.thread_data, 'map'):
m = mapnik.Map(*self.config['tile_size'])
self.create_map(m)
self.thread_data.map = m
def _create_map_xml(self, mapnik_map):
src = os.path.join(self.config['source'])
mapnik.load_map(mapnik_map, src)
def _create_map_python(self, mapnik_map):
self.python_map.construct_map(mapnik_map, self.stylecfg)
def split_url(self, zoom, x, y):
ypt = y.find('.')
if ypt < 0:
return None
tiletype = y[ypt+1:]
if tiletype not in self.config['formats']:
return None
try:
zoom = int(zoom)
x = int(x)
y = int(y[:ypt])
except ValueError:
return None
if zoom > self.config['max_zoom']:
return None
return (zoom, x, y, tiletype)
def render(self, zoom, x, y, fmt):
p0 = self.gproj.fromTileToLL(zoom, x, y+1)
p1 = self.gproj.fromTileToLL(zoom, x+1, y)
c0 = self.mproj.forward(mapnik.Coord(p0[0],p0[1]))
c1 = self.mproj.forward(mapnik.Coord(p1[0],p1[1]))
bbox = mapnik.Box2d(c0.x, c0.y, c1.x, c1.y)
im = mapnik.Image(256, 256)
m = self.get_map()
m.zoom_to_box(bbox)
mapnik.render(m, im)
return im.tostring('png256')
class TestMap(object):
DEFAULT_TESTMAP="""\
<!DOCTYPE html>
<html>
<head>
<title>Testmap - %(style)s</title>
<link rel="stylesheet" href="%(leaflet_path)s/leaflet.css" />
</head>
<body >
<div id="map" style="position: absolute; width: 99%%; height: 97%%"></div>
<script src="%(leaflet_path)s/leaflet.js"></script>
<script src="%(leaflet_path)s/leaflet-hash.js"></script>
<script>
var map = L.map('map').setView([47.3317, 8.5017], 13);
var hash = new L.Hash(map);
L.tileLayer('http://a.tile.openstreetmap.org/{z}/{x}/{y}.png', {
maxZoom: 18,
}).addTo(map);
L.tileLayer('%(script_name)s/%(style)s/{z}/{x}/{y}.png', {
maxZoom: 18,
}).addTo(map);
</script>
</body>
</html>
"""
def __init__(self, style, script):
self.map_config = {
'style' : style,
'script_name' : script,
'leaflet_path' : os.environ.get('LEAFLET_PATH',
'http://cdn.leafletjs.com/leaflet-0.7.5')
}
def on_get(self, req, resp):
resp.content_type = falcon.MEDIA_HTML
resp.body = self.DEFAULT_TESTMAP % self.map_config
class TileServer(object):
def __init__(self, style, config):
self.cachecfg = dict({ 'type' : 'DummyCache'})
if 'TILE_CACHE' in config:
self.cachecfg.update(config['TILE_CACHE'])
cacheclass = globals()[self.cachecfg['type']]
self.cache = cacheclass(self.cachecfg)
self.renderer = MapnikRenderer(style,
config.get('RENDERER'),
config.get('TILE_STYLE'))
def on_get(self, req, resp, zoom, x, y):
tile_desc = self.renderer.split_url(zoom, x, y)
if tile_desc is None:
raise falcon.HTTPNotFound()
tile = self.cache.get(*tile_desc)
if tile is None:
tile = self.renderer.render(*tile_desc)
self.cache.set(*tile_desc, image=tile)
# compute etag
m = hashlib.md5()
m.update(tile)
content_etag = m.hexdigest()
for etag in (req.if_none_match or []):
if etag == '*' or etag == content_etag:
resp.status = falcon.HTTP_304
return
resp.content_type = falcon.MEDIA_PNG
resp.expires = datetime.datetime.utcnow() + datetime.timedelta(hours=3)
resp.body = tile
resp.etag = content_etag
def setup_site(app, site, script_name=''):
try:
__import__(site)
except ImportError:
print("Missing config for site '%s'. Skipping." % site)
return
site_cfg = dict()
for var in dir(sys.modules[site]):
site_cfg[var] = getattr(sys.modules[site], var)
basename = site.split('.')[-1]
print("Setting up site", basename)
app.add_route('/' + basename + '/test-map', TestMap(basename, script_name))
app.add_route('/' + basename + '/{zoom}/{x}/{y}', TileServer(basename, site_cfg))
application = falcon.API()
for site in os.environ['TILE_SITES'].split(','):
setup_site(application, site)
| gpl-3.0 | -4,295,489,087,702,811,000 | 29.731629 | 99 | 0.556087 | false |
Softmotions/edx-platform | common/test/acceptance/tests/studio/test_studio_settings_details.py | 19 | 7658 | """
Acceptance tests for Studio's Settings Details pages
"""
from unittest import skip
from .base_studio_test import StudioCourseTest
from ...fixtures.course import CourseFixture
from ...pages.studio.settings import SettingsPage
from ...pages.studio.overview import CourseOutlinePage
from ...tests.studio.base_studio_test import StudioCourseTest
from ..helpers import (
generate_course_key,
select_option_by_value,
is_option_value_selected,
element_has_text,
)
class SettingsMilestonesTest(StudioCourseTest):
"""
Tests for milestones feature in Studio's settings tab
"""
def setUp(self, is_staff=True):
super(SettingsMilestonesTest, self).setUp(is_staff=is_staff)
self.settings_detail = SettingsPage(
self.browser,
self.course_info['org'],
self.course_info['number'],
self.course_info['run']
)
# Before every test, make sure to visit the page first
self.settings_detail.visit()
self.assertTrue(self.settings_detail.is_browser_on_page())
def test_page_has_prerequisite_field(self):
"""
Test to make sure page has pre-requisite course field if milestones app is enabled.
"""
self.assertTrue(self.settings_detail.pre_requisite_course_options)
def test_prerequisite_course_save_successfully(self):
"""
Scenario: Selecting course from Pre-Requisite course drop down save the selected course as pre-requisite
course.
Given that I am on the Schedule & Details page on studio
When I select an item in pre-requisite course drop down and click Save Changes button
Then My selected item should be saved as pre-requisite course
And My selected item should be selected after refreshing the page.'
"""
course_number = self.unique_id
CourseFixture(
org='test_org',
number=course_number,
run='test_run',
display_name='Test Course' + course_number
).install()
pre_requisite_course_key = generate_course_key(
org='test_org',
number=course_number,
run='test_run'
)
pre_requisite_course_id = unicode(pre_requisite_course_key)
# Refresh the page to load the new course fixture and populate the prrequisite course dropdown
# Then select the prerequisite course and save the changes
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again and confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
))
# Set the prerequisite course back to None and save the changes
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the None selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
self.assertTrue(is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=''
))
# Re-pick the prerequisite course and confirm no errors are thrown (covers a discovered bug)
select_option_by_value(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.settings_detail.save_changes()
self.assertEqual(
'Your changes have been saved.',
self.settings_detail.alert_confirmation_title.text
)
# Refresh the page again to confirm the prerequisite course selection is properly reflected
self.settings_detail.refresh_page()
self.settings_detail.wait_for_prerequisite_course_options()
dropdown_status = is_option_value_selected(
browser_query=self.settings_detail.pre_requisite_course_options,
value=pre_requisite_course_id
)
self.assertTrue(dropdown_status)
def test_page_has_enable_entrance_exam_field(self):
"""
Test to make sure page has 'enable entrance exam' field.
"""
self.assertTrue(self.settings_detail.entrance_exam_field)
@skip('Passes in devstack, passes individually in Jenkins, fails in suite in Jenkins.')
def test_enable_entrance_exam_for_course(self):
"""
Test that entrance exam should be created after checking the 'enable entrance exam' checkbox.
And also that the entrance exam is destroyed after deselecting the checkbox.
"""
self.settings_detail.require_entrance_exam(required=True)
self.settings_detail.save_changes()
# getting the course outline page.
course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
course_outline_page.visit()
# title with text 'Entrance Exam' should be present on page.
self.assertTrue(element_has_text(
page=course_outline_page,
css_selector='span.section-title',
text='Entrance Exam'
))
# Delete the currently created entrance exam.
self.settings_detail.visit()
self.settings_detail.require_entrance_exam(required=False)
self.settings_detail.save_changes()
course_outline_page.visit()
self.assertFalse(element_has_text(
page=course_outline_page,
css_selector='span.section-title',
text='Entrance Exam'
))
def test_entrance_exam_has_unit_button(self):
"""
Test that entrance exam should be created after checking the 'enable entrance exam' checkbox.
And user has option to add units only instead of any Subsection.
"""
self.settings_detail.require_entrance_exam(required=True)
self.settings_detail.save_changes()
# getting the course outline page.
course_outline_page = CourseOutlinePage(
self.browser, self.course_info['org'], self.course_info['number'], self.course_info['run']
)
course_outline_page.visit()
course_outline_page.wait_for_ajax()
# button with text 'New Unit' should be present.
self.assertTrue(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Unit'
))
# button with text 'New Subsection' should not be present.
self.assertFalse(element_has_text(
page=course_outline_page,
css_selector='.add-item a.button-new',
text='New Subsection'
))
| agpl-3.0 | -8,328,423,218,592,449,000 | 38.271795 | 113 | 0.643641 | false |
dhuang/incubator-airflow | airflow/www/api/experimental/endpoints.py | 2 | 8145 | # -*- coding: utf-8 -*-
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import airflow.api
from airflow.api.common.experimental import pool as pool_api
from airflow.api.common.experimental import trigger_dag as trigger
from airflow.api.common.experimental import delete_dag as delete
from airflow.api.common.experimental.get_task import get_task
from airflow.api.common.experimental.get_task_instance import get_task_instance
from airflow.exceptions import AirflowException
from airflow.utils.log.logging_mixin import LoggingMixin
from airflow.utils import timezone
from airflow.www.app import csrf
from flask import (
g, Markup, Blueprint, redirect, jsonify, abort,
request, current_app, send_file, url_for
)
_log = LoggingMixin().log
requires_authentication = airflow.api.api_auth.requires_authentication
api_experimental = Blueprint('api_experimental', __name__)
@csrf.exempt
@api_experimental.route('/dags/<string:dag_id>/dag_runs', methods=['POST'])
@requires_authentication
def trigger_dag(dag_id):
"""
Trigger a new dag run for a Dag with an execution date of now unless
specified in the data.
"""
data = request.get_json(force=True)
run_id = None
if 'run_id' in data:
run_id = data['run_id']
conf = None
if 'conf' in data:
conf = data['conf']
execution_date = None
if 'execution_date' in data and data['execution_date'] is not None:
execution_date = data['execution_date']
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'
.format(execution_date))
_log.info(error_message)
response = jsonify({'error': error_message})
response.status_code = 400
return response
try:
dr = trigger.trigger_dag(dag_id, run_id, conf, execution_date)
except AirflowException as err:
_log.error(err)
response = jsonify(error="{}".format(err))
response.status_code = 404
return response
if getattr(g, 'user', None):
_log.info("User {} created {}".format(g.user, dr))
response = jsonify(message="Created {}".format(dr))
return response
@csrf.exempt
@api_experimental.route('/dags/<string:dag_id>', methods=['DELETE'])
@requires_authentication
def delete_dag(dag_id):
"""
Delete all DB records related to the specified Dag.
"""
try:
count = delete.delete_dag(dag_id)
except AirflowException as e:
_log.error(e)
response = jsonify(error="{}".format(e))
response.status_code = getattr(e, 'status', 500)
return response
return jsonify(message="Removed {} record(s)".format(count), count=count)
@api_experimental.route('/test', methods=['GET'])
@requires_authentication
def test():
return jsonify(status='OK')
@api_experimental.route('/dags/<string:dag_id>/tasks/<string:task_id>', methods=['GET'])
@requires_authentication
def task_info(dag_id, task_id):
"""Returns a JSON with a task's public instance variables. """
try:
info = get_task(dag_id, task_id)
except AirflowException as err:
_log.info(err)
response = jsonify(error="{}".format(err))
response.status_code = 404
return response
# JSONify and return.
fields = {k: str(v)
for k, v in vars(info).items()
if not k.startswith('_')}
return jsonify(fields)
@api_experimental.route('/dags/<string:dag_id>/dag_runs/<string:execution_date>/tasks/<string:task_id>', methods=['GET'])
@requires_authentication
def task_instance_info(dag_id, execution_date, task_id):
"""
Returns a JSON with a task instance's public instance variables.
The format for the exec_date is expected to be
"YYYY-mm-DDTHH:MM:SS", for example: "2016-11-16T11:34:15". This will
of course need to have been encoded for URL in the request.
"""
# Convert string datetime into actual datetime
try:
execution_date = timezone.parse(execution_date)
except ValueError:
error_message = (
'Given execution date, {}, could not be identified '
'as a date. Example date format: 2015-11-16T14:34:15+00:00'
.format(execution_date))
_log.info(error_message)
response = jsonify({'error': error_message})
response.status_code = 400
return response
try:
info = get_task_instance(dag_id, task_id, execution_date)
except AirflowException as err:
_log.info(err)
response = jsonify(error="{}".format(err))
response.status_code = 404
return response
# JSONify and return.
fields = {k: str(v)
for k, v in vars(info).items()
if not k.startswith('_')}
return jsonify(fields)
@api_experimental.route('/latest_runs', methods=['GET'])
@requires_authentication
def latest_dag_runs():
"""Returns the latest DagRun for each DAG formatted for the UI. """
from airflow.models import DagRun
dagruns = DagRun.get_latest_runs()
payload = []
for dagrun in dagruns:
if dagrun.execution_date:
payload.append({
'dag_id': dagrun.dag_id,
'execution_date': dagrun.execution_date.isoformat(),
'start_date': ((dagrun.start_date or '') and
dagrun.start_date.isoformat()),
'dag_run_url': url_for('airflow.graph', dag_id=dagrun.dag_id,
execution_date=dagrun.execution_date)
})
return jsonify(items=payload) # old flask versions dont support jsonifying arrays
@api_experimental.route('/pools/<string:name>', methods=['GET'])
@requires_authentication
def get_pool(name):
"""Get pool by a given name."""
try:
pool = pool_api.get_pool(name=name)
except AirflowException as e:
_log.error(e)
response = jsonify(error="{}".format(e))
response.status_code = getattr(e, 'status', 500)
return response
else:
return jsonify(pool.to_json())
@api_experimental.route('/pools', methods=['GET'])
@requires_authentication
def get_pools():
"""Get all pools."""
try:
pools = pool_api.get_pools()
except AirflowException as e:
_log.error(e)
response = jsonify(error="{}".format(e))
response.status_code = getattr(e, 'status', 500)
return response
else:
return jsonify([p.to_json() for p in pools])
@csrf.exempt
@api_experimental.route('/pools', methods=['POST'])
@requires_authentication
def create_pool():
"""Create a pool."""
params = request.get_json(force=True)
try:
pool = pool_api.create_pool(**params)
except AirflowException as e:
_log.error(e)
response = jsonify(error="{}".format(e))
response.status_code = getattr(e, 'status', 500)
return response
else:
return jsonify(pool.to_json())
@csrf.exempt
@api_experimental.route('/pools/<string:name>', methods=['DELETE'])
@requires_authentication
def delete_pool(name):
"""Delete pool."""
try:
pool = pool_api.delete_pool(name=name)
except AirflowException as e:
_log.error(e)
response = jsonify(error="{}".format(e))
response.status_code = getattr(e, 'status', 500)
return response
else:
return jsonify(pool.to_json())
| apache-2.0 | 3,183,907,584,005,694,000 | 31.58 | 121 | 0.638306 | false |
jinzo/django-dbpool-backend | django_dbpool_backends/mysql/creation.py | 311 | 3019 | from django.db.backends.creation import BaseDatabaseCreation
class DatabaseCreation(BaseDatabaseCreation):
# This dictionary maps Field objects to their associated MySQL column
# types, as strings. Column-type strings can contain format strings; they'll
# be interpolated against the values of Field.__dict__ before being output.
# If a column type is set to None, it won't be included in the output.
data_types = {
'AutoField': 'integer AUTO_INCREMENT',
'BooleanField': 'bool',
'CharField': 'varchar(%(max_length)s)',
'CommaSeparatedIntegerField': 'varchar(%(max_length)s)',
'DateField': 'date',
'DateTimeField': 'datetime',
'DecimalField': 'numeric(%(max_digits)s, %(decimal_places)s)',
'FileField': 'varchar(%(max_length)s)',
'FilePathField': 'varchar(%(max_length)s)',
'FloatField': 'double precision',
'IntegerField': 'integer',
'BigIntegerField': 'bigint',
'IPAddressField': 'char(15)',
'NullBooleanField': 'bool',
'OneToOneField': 'integer',
'PositiveIntegerField': 'integer UNSIGNED',
'PositiveSmallIntegerField': 'smallint UNSIGNED',
'SlugField': 'varchar(%(max_length)s)',
'SmallIntegerField': 'smallint',
'TextField': 'longtext',
'TimeField': 'time',
}
def sql_table_creation_suffix(self):
suffix = []
if self.connection.settings_dict['TEST_CHARSET']:
suffix.append('CHARACTER SET %s' % self.connection.settings_dict['TEST_CHARSET'])
if self.connection.settings_dict['TEST_COLLATION']:
suffix.append('COLLATE %s' % self.connection.settings_dict['TEST_COLLATION'])
return ' '.join(suffix)
def sql_for_inline_foreign_key_references(self, field, known_models, style):
"All inline references are pending under MySQL"
return [], True
def sql_for_inline_many_to_many_references(self, model, field, style):
from django.db import models
opts = model._meta
qn = self.connection.ops.quote_name
table_output = [
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_column_name())),
style.SQL_COLTYPE(models.ForeignKey(model).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL')),
' %s %s %s,' %
(style.SQL_FIELD(qn(field.m2m_reverse_name())),
style.SQL_COLTYPE(models.ForeignKey(field.rel.to).db_type(connection=self.connection)),
style.SQL_KEYWORD('NOT NULL'))
]
deferred = [
(field.m2m_db_table(), field.m2m_column_name(), opts.db_table,
opts.pk.column),
(field.m2m_db_table(), field.m2m_reverse_name(),
field.rel.to._meta.db_table, field.rel.to._meta.pk.column)
]
return table_output, deferred
| bsd-3-clause | -6,064,831,720,178,194,000 | 45.446154 | 99 | 0.586618 | false |
tumbl3w33d/ansible | lib/ansible/modules/cloud/vmware/vmware_guest_disk.py | 9 | 34453 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# Copyright: (c) 2018, Abhijeet Kasurde <[email protected]>
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {
'metadata_version': '1.1',
'status': ['preview'],
'supported_by': 'community'
}
DOCUMENTATION = '''
---
module: vmware_guest_disk
short_description: Manage disks related to virtual machine in given vCenter infrastructure
description:
- This module can be used to add, remove and update disks belonging to given virtual machine.
- All parameters and VMware object names are case sensitive.
- This module is destructive in nature, please read documentation carefully before proceeding.
- Be careful while removing disk specified as this may lead to data loss.
version_added: 2.8
author:
- Abhijeet Kasurde (@Akasurde) <[email protected]>
notes:
- Tested on vSphere 6.0 and 6.5
requirements:
- "python >= 2.6"
- PyVmomi
options:
name:
description:
- Name of the virtual machine.
- This is a required parameter, if parameter C(uuid) or C(moid) is not supplied.
type: str
uuid:
description:
- UUID of the instance to gather facts if known, this is VMware's unique identifier.
- This is a required parameter, if parameter C(name) or C(moid) is not supplied.
type: str
moid:
description:
- Managed Object ID of the instance to manage if known, this is a unique identifier only within a single vCenter instance.
- This is required if C(name) or C(uuid) is not supplied.
version_added: '2.9'
type: str
folder:
description:
- Destination folder, absolute or relative path to find an existing guest.
- This is a required parameter, only if multiple VMs are found with same name.
- The folder should include the datacenter. ESX's datacenter is ha-datacenter
- 'Examples:'
- ' folder: /ha-datacenter/vm'
- ' folder: ha-datacenter/vm'
- ' folder: /datacenter1/vm'
- ' folder: datacenter1/vm'
- ' folder: /datacenter1/vm/folder1'
- ' folder: datacenter1/vm/folder1'
- ' folder: /folder1/datacenter1/vm'
- ' folder: folder1/datacenter1/vm'
- ' folder: /folder1/datacenter1/vm/folder2'
type: str
datacenter:
description:
- The datacenter name to which virtual machine belongs to.
required: True
type: str
use_instance_uuid:
description:
- Whether to use the VMware instance UUID rather than the BIOS UUID.
default: no
type: bool
version_added: '2.8'
disk:
description:
- A list of disks to add.
- The virtual disk related information is provided using this list.
- All values and parameters are case sensitive.
- 'Valid attributes are:'
- ' - C(size[_tb,_gb,_mb,_kb]) (integer): Disk storage size in specified unit.'
- ' If C(size) specified then unit must be specified. There is no space allowed in between size number and unit.'
- ' Only first occurrence in disk element will be considered, even if there are multiple size* parameters available.'
- ' - C(type) (string): Valid values are:'
- ' - C(thin) thin disk'
- ' - C(eagerzeroedthick) eagerzeroedthick disk'
- ' - C(thick) thick disk'
- ' Default: C(thick) thick disk, no eagerzero.'
- ' - C(disk_mode) (string): Type of disk mode. Valid values are:'
- ' - C(persistent) Changes are immediately and permanently written to the virtual disk. This is default.'
- ' - C(independent_persistent) Same as persistent, but not affected by snapshots.'
- ' - C(independent_nonpersistent) Changes to virtual disk are made to a redo log and discarded at power off, but not affected by snapshots.'
- ' - C(datastore) (string): Name of datastore or datastore cluster to be used for the disk.'
- ' - C(autoselect_datastore) (bool): Select the less used datastore. Specify only if C(datastore) is not specified.'
- ' - C(scsi_controller) (integer): SCSI controller number. Valid value range from 0 to 3.'
- ' Only 4 SCSI controllers are allowed per VM.'
- ' Care should be taken while specifying C(scsi_controller) is 0 and C(unit_number) as 0 as this disk may contain OS.'
- ' - C(unit_number) (integer): Disk Unit Number. Valid value range from 0 to 15. Only 15 disks are allowed per SCSI Controller.'
- ' - C(scsi_type) (string): Type of SCSI controller. This value is required only for the first occurrence of SCSI Controller.'
- ' This value is ignored, if SCSI Controller is already present or C(state) is C(absent).'
- ' Valid values are C(buslogic), C(lsilogic), C(lsilogicsas) and C(paravirtual).'
- ' C(paravirtual) is default value for this parameter.'
- ' - C(state) (string): State of disk. This is either "absent" or "present".'
- ' If C(state) is set to C(absent), disk will be removed permanently from virtual machine configuration and from VMware storage.'
- ' If C(state) is set to C(present), disk will be added if not present at given SCSI Controller and Unit Number.'
- ' If C(state) is set to C(present) and disk exists with different size, disk size is increased.'
- ' Reducing disk size is not allowed.'
default: []
type: list
extends_documentation_fragment: vmware.documentation
'''
EXAMPLES = '''
- name: Add disks to virtual machine using UUID
vmware_guest_disk:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
validate_certs: no
uuid: 421e4592-c069-924d-ce20-7e7533fab926
disk:
- size_mb: 10
type: thin
datastore: datacluster0
state: present
scsi_controller: 1
unit_number: 1
scsi_type: 'paravirtual'
disk_mode: 'persistent'
- size_gb: 10
type: eagerzeroedthick
state: present
autoselect_datastore: True
scsi_controller: 2
scsi_type: 'buslogic'
unit_number: 12
disk_mode: 'independent_persistent'
- size: 10Gb
type: eagerzeroedthick
state: present
autoselect_datastore: True
scsi_controller: 2
scsi_type: 'buslogic'
unit_number: 1
disk_mode: 'independent_nonpersistent'
delegate_to: localhost
register: disk_facts
- name: Remove disks from virtual machine using name
vmware_guest_disk:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
validate_certs: no
name: VM_225
disk:
- state: absent
scsi_controller: 1
unit_number: 1
delegate_to: localhost
register: disk_facts
- name: Remove disks from virtual machine using moid
vmware_guest_disk:
hostname: "{{ vcenter_hostname }}"
username: "{{ vcenter_username }}"
password: "{{ vcenter_password }}"
datacenter: "{{ datacenter_name }}"
validate_certs: no
moid: vm-42
disk:
- state: absent
scsi_controller: 1
unit_number: 1
delegate_to: localhost
register: disk_facts
'''
RETURN = """
disk_status:
description: metadata about the virtual machine's disks after managing them
returned: always
type: dict
sample: {
"0": {
"backing_datastore": "datastore2",
"backing_disk_mode": "persistent",
"backing_eagerlyscrub": false,
"backing_filename": "[datastore2] VM_225/VM_225.vmdk",
"backing_thinprovisioned": false,
"backing_writethrough": false,
"capacity_in_bytes": 10485760,
"capacity_in_kb": 10240,
"controller_key": 1000,
"key": 2000,
"label": "Hard disk 1",
"summary": "10,240 KB",
"unit_number": 0
},
}
"""
import re
try:
from pyVmomi import vim
except ImportError:
pass
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.vmware import PyVmomi, vmware_argument_spec, wait_for_task, find_obj, get_all_objs
class PyVmomiHelper(PyVmomi):
def __init__(self, module):
super(PyVmomiHelper, self).__init__(module)
self.desired_disks = self.params['disk'] # Match with vmware_guest parameter
self.vm = None
self.scsi_device_type = dict(lsilogic=vim.vm.device.VirtualLsiLogicController,
paravirtual=vim.vm.device.ParaVirtualSCSIController,
buslogic=vim.vm.device.VirtualBusLogicController,
lsilogicsas=vim.vm.device.VirtualLsiLogicSASController)
self.config_spec = vim.vm.ConfigSpec()
self.config_spec.deviceChange = []
def create_scsi_controller(self, scsi_type, scsi_bus_number):
"""
Create SCSI Controller with given SCSI Type and SCSI Bus Number
Args:
scsi_type: Type of SCSI
scsi_bus_number: SCSI Bus number to be assigned
Returns: Virtual device spec for SCSI Controller
"""
scsi_ctl = vim.vm.device.VirtualDeviceSpec()
scsi_ctl.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
scsi_ctl.device = self.scsi_device_type[scsi_type]()
scsi_ctl.device.unitNumber = 3
scsi_ctl.device.busNumber = scsi_bus_number
scsi_ctl.device.hotAddRemove = True
scsi_ctl.device.sharedBus = 'noSharing'
scsi_ctl.device.scsiCtlrUnitNumber = 7
return scsi_ctl
@staticmethod
def create_scsi_disk(scsi_ctl_key, disk_index, disk_mode):
"""
Create Virtual Device Spec for virtual disk
Args:
scsi_ctl_key: Unique SCSI Controller Key
disk_index: Disk unit number at which disk needs to be attached
Returns: Virtual Device Spec for virtual disk
"""
disk_spec = vim.vm.device.VirtualDeviceSpec()
disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.add
disk_spec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.create
disk_spec.device = vim.vm.device.VirtualDisk()
disk_spec.device.backing = vim.vm.device.VirtualDisk.FlatVer2BackingInfo()
disk_spec.device.backing.diskMode = disk_mode
disk_spec.device.controllerKey = scsi_ctl_key
disk_spec.device.unitNumber = disk_index
return disk_spec
def reconfigure_vm(self, config_spec, device_type):
"""
Reconfigure virtual machine after modifying device spec
Args:
config_spec: Config Spec
device_type: Type of device being modified
Returns: Boolean status 'changed' and actual task result
"""
changed, results = (False, '')
try:
# Perform actual VM reconfiguration
task = self.vm.ReconfigVM_Task(spec=config_spec)
changed, results = wait_for_task(task)
except vim.fault.InvalidDeviceSpec as invalid_device_spec:
self.module.fail_json(msg="Failed to manage %s on given virtual machine due to invalid"
" device spec : %s" % (device_type, to_native(invalid_device_spec.msg)),
details="Please check ESXi server logs for more details.")
except vim.fault.RestrictedVersion as e:
self.module.fail_json(msg="Failed to reconfigure virtual machine due to"
" product versioning restrictions: %s" % to_native(e.msg))
return changed, results
def ensure_disks(self, vm_obj=None):
"""
Manage internal state of virtual machine disks
Args:
vm_obj: Managed object of virtual machine
"""
# Set vm object
self.vm = vm_obj
# Sanitize user input
disk_data = self.sanitize_disk_inputs()
# Create stateful information about SCSI devices
current_scsi_info = dict()
results = dict(changed=False, disk_data=None, disk_changes=dict())
# Deal with SCSI Controller
for device in vm_obj.config.hardware.device:
if isinstance(device, tuple(self.scsi_device_type.values())):
# Found SCSI device
if device.busNumber not in current_scsi_info:
device_bus_number = 1000 + device.busNumber
current_scsi_info[device_bus_number] = dict(disks=dict())
scsi_changed = False
for disk in disk_data:
scsi_controller = disk['scsi_controller'] + 1000
if scsi_controller not in current_scsi_info and disk['state'] == 'present':
scsi_ctl = self.create_scsi_controller(disk['scsi_type'], disk['scsi_controller'])
current_scsi_info[scsi_controller] = dict(disks=dict())
self.config_spec.deviceChange.append(scsi_ctl)
scsi_changed = True
if scsi_changed:
self.reconfigure_vm(self.config_spec, 'SCSI Controller')
self.config_spec = vim.vm.ConfigSpec()
self.config_spec.deviceChange = []
# Deal with Disks
for device in vm_obj.config.hardware.device:
if isinstance(device, vim.vm.device.VirtualDisk):
# Found Virtual Disk device
if device.controllerKey not in current_scsi_info:
current_scsi_info[device.controllerKey] = dict(disks=dict())
current_scsi_info[device.controllerKey]['disks'][device.unitNumber] = device
vm_name = self.vm.name
disk_change_list = []
for disk in disk_data:
disk_change = False
scsi_controller = disk['scsi_controller'] + 1000 # VMware auto assign 1000 + SCSI Controller
if disk['disk_unit_number'] not in current_scsi_info[scsi_controller]['disks'] and disk['state'] == 'present':
# Add new disk
disk_spec = self.create_scsi_disk(scsi_controller, disk['disk_unit_number'], disk['disk_mode'])
disk_spec.device.capacityInKB = disk['size']
if disk['disk_type'] == 'thin':
disk_spec.device.backing.thinProvisioned = True
elif disk['disk_type'] == 'eagerzeroedthick':
disk_spec.device.backing.eagerlyScrub = True
disk_spec.device.backing.fileName = "[%s] %s/%s_%s_%s.vmdk" % (disk['datastore'].name,
vm_name, vm_name,
str(scsi_controller),
str(disk['disk_unit_number']))
disk_spec.device.backing.datastore = disk['datastore']
self.config_spec.deviceChange.append(disk_spec)
disk_change = True
current_scsi_info[scsi_controller]['disks'][disk['disk_unit_number']] = disk_spec.device
results['disk_changes'][disk['disk_index']] = "Disk created."
elif disk['disk_unit_number'] in current_scsi_info[scsi_controller]['disks']:
if disk['state'] == 'present':
disk_spec = vim.vm.device.VirtualDeviceSpec()
# set the operation to edit so that it knows to keep other settings
disk_spec.device = current_scsi_info[scsi_controller]['disks'][disk['disk_unit_number']]
# Edit and no resizing allowed
if disk['size'] < disk_spec.device.capacityInKB:
self.module.fail_json(msg="Given disk size at disk index [%s] is smaller than found (%d < %d)."
" Reducing disks is not allowed." % (disk['disk_index'],
disk['size'],
disk_spec.device.capacityInKB))
if disk['size'] != disk_spec.device.capacityInKB:
disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.edit
disk_spec.device.capacityInKB = disk['size']
self.config_spec.deviceChange.append(disk_spec)
disk_change = True
results['disk_changes'][disk['disk_index']] = "Disk size increased."
else:
results['disk_changes'][disk['disk_index']] = "Disk already exists."
elif disk['state'] == 'absent':
# Disk already exists, deleting
disk_spec = vim.vm.device.VirtualDeviceSpec()
disk_spec.operation = vim.vm.device.VirtualDeviceSpec.Operation.remove
disk_spec.fileOperation = vim.vm.device.VirtualDeviceSpec.FileOperation.destroy
disk_spec.device = current_scsi_info[scsi_controller]['disks'][disk['disk_unit_number']]
self.config_spec.deviceChange.append(disk_spec)
disk_change = True
results['disk_changes'][disk['disk_index']] = "Disk deleted."
if disk_change:
# Adding multiple disks in a single attempt raises weird errors
# So adding single disk at a time.
self.reconfigure_vm(self.config_spec, 'disks')
self.config_spec = vim.vm.ConfigSpec()
self.config_spec.deviceChange = []
disk_change_list.append(disk_change)
if any(disk_change_list):
results['changed'] = True
results['disk_data'] = self.gather_disk_facts(vm_obj=self.vm)
self.module.exit_json(**results)
def sanitize_disk_inputs(self):
"""
Check correctness of disk input provided by user
Returns: A list of dictionary containing disk information
"""
disks_data = list()
if not self.desired_disks:
self.module.exit_json(changed=False, msg="No disks provided for virtual"
" machine '%s' for management." % self.vm.name)
for disk_index, disk in enumerate(self.desired_disks):
# Initialize default value for disk
current_disk = dict(disk_index=disk_index,
state='present',
datastore=None,
autoselect_datastore=True,
disk_unit_number=0,
scsi_controller=0,
disk_mode='persistent')
# Check state
if 'state' in disk:
if disk['state'] not in ['absent', 'present']:
self.module.fail_json(msg="Invalid state provided '%s' for disk index [%s]."
" State can be either - 'absent', 'present'" % (disk['state'],
disk_index))
else:
current_disk['state'] = disk['state']
if current_disk['state'] == 'present':
# Select datastore or datastore cluster
if 'datastore' in disk:
if 'autoselect_datastore' in disk:
self.module.fail_json(msg="Please specify either 'datastore' "
"or 'autoselect_datastore' for disk index [%s]" % disk_index)
# Check if given value is datastore or datastore cluster
datastore_name = disk['datastore']
datastore_cluster = find_obj(self.content, [vim.StoragePod], datastore_name)
if datastore_cluster:
# If user specified datastore cluster so get recommended datastore
datastore_name = self.get_recommended_datastore(datastore_cluster_obj=datastore_cluster)
# Check if get_recommended_datastore or user specified datastore exists or not
datastore = find_obj(self.content, [vim.Datastore], datastore_name)
if datastore is None:
self.module.fail_json(msg="Failed to find datastore named '%s' "
"in given configuration." % disk['datastore'])
current_disk['datastore'] = datastore
current_disk['autoselect_datastore'] = False
elif 'autoselect_datastore' in disk:
# Find datastore which fits requirement
datastores = get_all_objs(self.content, [vim.Datastore])
if not datastores:
self.module.fail_json(msg="Failed to gather information about"
" available datastores in given datacenter.")
datastore = None
datastore_freespace = 0
for ds in datastores:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
datastore = ds
datastore_freespace = ds.summary.freeSpace
current_disk['datastore'] = datastore
if 'datastore' not in disk and 'autoselect_datastore' not in disk:
self.module.fail_json(msg="Either 'datastore' or 'autoselect_datastore' is"
" required parameter while creating disk for "
"disk index [%s]." % disk_index)
if [x for x in disk.keys() if x.startswith('size_') or x == 'size']:
# size, size_tb, size_gb, size_mb, size_kb
disk_size_parse_failed = False
if 'size' in disk:
size_regex = re.compile(r'(\d+(?:\.\d+)?)([tgmkTGMK][bB])')
disk_size_m = size_regex.match(disk['size'])
if disk_size_m:
expected = disk_size_m.group(1)
unit = disk_size_m.group(2)
else:
disk_size_parse_failed = True
try:
if re.match(r'\d+\.\d+', expected):
# We found float value in string, let's typecast it
expected = float(expected)
else:
# We found int value in string, let's typecast it
expected = int(expected)
except (TypeError, ValueError, NameError):
disk_size_parse_failed = True
else:
# Even multiple size_ parameter provided by user,
# consider first value only
param = [x for x in disk.keys() if x.startswith('size_')][0]
unit = param.split('_')[-1]
disk_size = disk[param]
if isinstance(disk_size, (float, int)):
disk_size = str(disk_size)
try:
if re.match(r'\d+\.\d+', disk_size):
# We found float value in string, let's typecast it
expected = float(disk_size)
else:
# We found int value in string, let's typecast it
expected = int(disk_size)
except (TypeError, ValueError, NameError):
disk_size_parse_failed = True
if disk_size_parse_failed:
# Common failure
self.module.fail_json(msg="Failed to parse disk size for disk index [%s],"
" please review value provided"
" using documentation." % disk_index)
disk_units = dict(tb=3, gb=2, mb=1, kb=0)
unit = unit.lower()
if unit in disk_units:
current_disk['size'] = expected * (1024 ** disk_units[unit])
else:
self.module.fail_json(msg="%s is not a supported unit for disk size for disk index [%s]."
" Supported units are ['%s']." % (unit,
disk_index,
"', '".join(disk_units.keys())))
else:
# No size found but disk, fail
self.module.fail_json(msg="No size, size_kb, size_mb, size_gb or size_tb"
" attribute found into disk index [%s] configuration." % disk_index)
# Check SCSI controller key
if 'scsi_controller' in disk:
try:
temp_disk_controller = int(disk['scsi_controller'])
except ValueError:
self.module.fail_json(msg="Invalid SCSI controller ID '%s' specified"
" at index [%s]" % (disk['scsi_controller'], disk_index))
if temp_disk_controller not in range(0, 4):
# Only 4 SCSI controllers are allowed per VM
self.module.fail_json(msg="Invalid SCSI controller ID specified [%s],"
" please specify value between 0 to 3 only." % temp_disk_controller)
current_disk['scsi_controller'] = temp_disk_controller
else:
self.module.fail_json(msg="Please specify 'scsi_controller' under disk parameter"
" at index [%s], which is required while creating disk." % disk_index)
# Check for disk unit number
if 'unit_number' in disk:
try:
temp_disk_unit_number = int(disk['unit_number'])
except ValueError:
self.module.fail_json(msg="Invalid Disk unit number ID '%s'"
" specified at index [%s]" % (disk['unit_number'], disk_index))
if temp_disk_unit_number not in range(0, 16):
self.module.fail_json(msg="Invalid Disk unit number ID specified for disk [%s] at index [%s],"
" please specify value between 0 to 15"
" only (excluding 7)." % (temp_disk_unit_number, disk_index))
if temp_disk_unit_number == 7:
self.module.fail_json(msg="Invalid Disk unit number ID specified for disk at index [%s],"
" please specify value other than 7 as it is reserved"
"for SCSI Controller" % disk_index)
current_disk['disk_unit_number'] = temp_disk_unit_number
else:
self.module.fail_json(msg="Please specify 'unit_number' under disk parameter"
" at index [%s], which is required while creating disk." % disk_index)
# Type of Disk
disk_type = disk.get('type', 'thick').lower()
if disk_type not in ['thin', 'thick', 'eagerzeroedthick']:
self.module.fail_json(msg="Invalid 'disk_type' specified for disk index [%s]. Please specify"
" 'disk_type' value from ['thin', 'thick', 'eagerzeroedthick']." % disk_index)
current_disk['disk_type'] = disk_type
# Mode of Disk
temp_disk_mode = disk.get('disk_mode', 'persistent').lower()
if temp_disk_mode not in ['persistent', 'independent_persistent', 'independent_nonpersistent']:
self.module.fail_json(msg="Invalid 'disk_mode' specified for disk index [%s]. Please specify"
" 'disk_mode' value from ['persistent', 'independent_persistent', 'independent_nonpersistent']." % disk_index)
current_disk['disk_mode'] = temp_disk_mode
# SCSI Controller Type
scsi_contrl_type = disk.get('scsi_type', 'paravirtual').lower()
if scsi_contrl_type not in self.scsi_device_type.keys():
self.module.fail_json(msg="Invalid 'scsi_type' specified for disk index [%s]. Please specify"
" 'scsi_type' value from ['%s']" % (disk_index,
"', '".join(self.scsi_device_type.keys())))
current_disk['scsi_type'] = scsi_contrl_type
disks_data.append(current_disk)
return disks_data
def get_recommended_datastore(self, datastore_cluster_obj):
"""
Return Storage DRS recommended datastore from datastore cluster
Args:
datastore_cluster_obj: datastore cluster managed object
Returns: Name of recommended datastore from the given datastore cluster,
Returns None if no datastore recommendation found.
"""
# Check if Datastore Cluster provided by user is SDRS ready
sdrs_status = datastore_cluster_obj.podStorageDrsEntry.storageDrsConfig.podConfig.enabled
if sdrs_status:
# We can get storage recommendation only if SDRS is enabled on given datastorage cluster
pod_sel_spec = vim.storageDrs.PodSelectionSpec()
pod_sel_spec.storagePod = datastore_cluster_obj
storage_spec = vim.storageDrs.StoragePlacementSpec()
storage_spec.podSelectionSpec = pod_sel_spec
storage_spec.type = 'create'
try:
rec = self.content.storageResourceManager.RecommendDatastores(storageSpec=storage_spec)
rec_action = rec.recommendations[0].action[0]
return rec_action.destination.name
except Exception:
# There is some error so we fall back to general workflow
pass
datastore = None
datastore_freespace = 0
for ds in datastore_cluster_obj.childEntity:
if ds.summary.freeSpace > datastore_freespace:
# If datastore field is provided, filter destination datastores
datastore = ds
datastore_freespace = ds.summary.freeSpace
if datastore:
return datastore.name
return None
@staticmethod
def gather_disk_facts(vm_obj):
"""
Gather facts about VM's disks
Args:
vm_obj: Managed object of virtual machine
Returns: A list of dict containing disks information
"""
disks_facts = dict()
if vm_obj is None:
return disks_facts
disk_index = 0
for disk in vm_obj.config.hardware.device:
if isinstance(disk, vim.vm.device.VirtualDisk):
disks_facts[disk_index] = dict(
key=disk.key,
label=disk.deviceInfo.label,
summary=disk.deviceInfo.summary,
backing_filename=disk.backing.fileName,
backing_datastore=disk.backing.datastore.name,
backing_disk_mode=disk.backing.diskMode,
backing_writethrough=disk.backing.writeThrough,
backing_thinprovisioned=disk.backing.thinProvisioned,
backing_eagerlyscrub=bool(disk.backing.eagerlyScrub),
controller_key=disk.controllerKey,
unit_number=disk.unitNumber,
capacity_in_kb=disk.capacityInKB,
capacity_in_bytes=disk.capacityInBytes,
)
disk_index += 1
return disks_facts
def main():
argument_spec = vmware_argument_spec()
argument_spec.update(
name=dict(type='str'),
uuid=dict(type='str'),
moid=dict(type='str'),
folder=dict(type='str'),
datacenter=dict(type='str', required=True),
disk=dict(type='list', default=[]),
use_instance_uuid=dict(type='bool', default=False),
)
module = AnsibleModule(
argument_spec=argument_spec,
required_one_of=[
['name', 'uuid', 'moid']
]
)
if module.params['folder']:
# FindByInventoryPath() does not require an absolute path
# so we should leave the input folder path unmodified
module.params['folder'] = module.params['folder'].rstrip('/')
pyv = PyVmomiHelper(module)
# Check if the VM exists before continuing
vm = pyv.get_vm()
if not vm:
# We unable to find the virtual machine user specified
# Bail out
vm_id = (module.params.get('name') or module.params.get('uuid') or module.params.get('moid'))
module.fail_json(msg="Unable to manage disks for non-existing"
" virtual machine '%s'." % vm_id)
# VM exists
try:
pyv.ensure_disks(vm_obj=vm)
except Exception as exc:
module.fail_json(msg="Failed to manage disks for virtual machine"
" '%s' with exception : %s" % (vm.name,
to_native(exc)))
if __name__ == '__main__':
main()
| gpl-3.0 | 7,138,436,697,907,119,000 | 47.118715 | 152 | 0.551012 | false |
ddayguerrero/blogme | flask/lib/python3.4/site-packages/pip/index.py | 45 | 40374 | """Routines related to PyPI, indexes"""
import sys
import os
import re
import mimetypes
import posixpath
from pip.log import logger
from pip.util import Inf, normalize_name, splitext, is_prerelease
from pip.exceptions import (DistributionNotFound, BestVersionAlreadyInstalled,
InstallationError, InvalidWheelFilename, UnsupportedWheel)
from pip.backwardcompat import urlparse, url2pathname
from pip.download import PipSession, url_to_path, path_to_url
from pip.wheel import Wheel, wheel_ext
from pip.pep425tags import supported_tags, supported_tags_noarch, get_platform
import html5lib, requests, pkg_resources
from requests.exceptions import SSLError
__all__ = ['PackageFinder']
DEFAULT_MIRROR_HOSTNAME = "last.pypi.python.org"
INSECURE_SCHEMES = {
"http": ["https"],
}
class PackageFinder(object):
"""This finds packages.
This is meant to match easy_install's technique for looking for
packages, by reading pages and looking for appropriate links
"""
def __init__(self, find_links, index_urls,
use_wheel=True, allow_external=[], allow_unverified=[],
allow_all_external=False, allow_all_prereleases=False,
process_dependency_links=False, session=None):
self.find_links = find_links
self.index_urls = index_urls
self.dependency_links = []
self.cache = PageCache()
# These are boring links that have already been logged somehow:
self.logged_links = set()
self.use_wheel = use_wheel
# Do we allow (safe and verifiable) externally hosted files?
self.allow_external = set(normalize_name(n) for n in allow_external)
# Which names are allowed to install insecure and unverifiable files?
self.allow_unverified = set(
normalize_name(n) for n in allow_unverified
)
# Anything that is allowed unverified is also allowed external
self.allow_external |= self.allow_unverified
# Do we allow all (safe and verifiable) externally hosted files?
self.allow_all_external = allow_all_external
# Stores if we ignored any external links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_external = False
# Stores if we ignored any unsafe links so that we can instruct
# end users how to install them if no distributions are available
self.need_warn_unverified = False
# Do we want to allow _all_ pre-releases?
self.allow_all_prereleases = allow_all_prereleases
# Do we process dependency links?
self.process_dependency_links = process_dependency_links
self._have_warned_dependency_links = False
# The Session we'll use to make requests
self.session = session or PipSession()
def add_dependency_links(self, links):
## FIXME: this shouldn't be global list this, it should only
## apply to requirements of the package that specifies the
## dependency_links value
## FIXME: also, we should track comes_from (i.e., use Link)
if self.process_dependency_links:
if not self._have_warned_dependency_links:
logger.deprecated(
"1.6",
"Dependency Links processing has been deprecated with an "
"accelerated time schedule and will be removed in pip 1.6",
)
self._have_warned_dependency_links = True
self.dependency_links.extend(links)
def _sort_locations(self, locations):
"""
Sort locations into "files" (archives) and "urls", and return
a pair of lists (files,urls)
"""
files = []
urls = []
# puts the url for the given file path into the appropriate list
def sort_path(path):
url = path_to_url(path)
if mimetypes.guess_type(url, strict=False)[0] == 'text/html':
urls.append(url)
else:
files.append(url)
for url in locations:
is_local_path = os.path.exists(url)
is_file_url = url.startswith('file:')
is_find_link = url in self.find_links
if is_local_path or is_file_url:
if is_local_path:
path = url
else:
path = url_to_path(url)
if is_find_link and os.path.isdir(path):
path = os.path.realpath(path)
for item in os.listdir(path):
sort_path(os.path.join(path, item))
elif is_file_url and os.path.isdir(path):
urls.append(url)
elif os.path.isfile(path):
sort_path(path)
else:
urls.append(url)
return files, urls
def _link_sort_key(self, link_tuple):
"""
Function used to generate link sort key for link tuples.
The greater the return value, the more preferred it is.
If not finding wheels, then sorted by version only.
If finding wheels, then the sort order is by version, then:
1. existing installs
2. wheels ordered via Wheel.support_index_min()
3. source archives
Note: it was considered to embed this logic into the Link
comparison operators, but then different sdist links
with the same version, would have to be considered equal
"""
parsed_version, link, _ = link_tuple
if self.use_wheel:
support_num = len(supported_tags)
if link == INSTALLED_VERSION:
pri = 1
elif link.ext == wheel_ext:
wheel = Wheel(link.filename) # can raise InvalidWheelFilename
if not wheel.supported():
raise UnsupportedWheel("%s is not a supported wheel for this platform. It can't be sorted." % wheel.filename)
pri = -(wheel.support_index_min())
else: # sdist
pri = -(support_num)
return (parsed_version, pri)
else:
return parsed_version
def _sort_versions(self, applicable_versions):
"""
Bring the latest version (and wheels) to the front, but maintain the existing ordering as secondary.
See the docstring for `_link_sort_key` for details.
This function is isolated for easier unit testing.
"""
return sorted(applicable_versions, key=self._link_sort_key, reverse=True)
def find_requirement(self, req, upgrade):
def mkurl_pypi_url(url):
loc = posixpath.join(url, url_name)
# For maximum compatibility with easy_install, ensure the path
# ends in a trailing slash. Although this isn't in the spec
# (and PyPI can handle it without the slash) some other index
# implementations might break if they relied on easy_install's behavior.
if not loc.endswith('/'):
loc = loc + '/'
return loc
url_name = req.url_name
# Only check main index if index URL is given:
main_index_url = None
if self.index_urls:
# Check that we have the url_name correctly spelled:
main_index_url = Link(mkurl_pypi_url(self.index_urls[0]), trusted=True)
# This will also cache the page, so it's okay that we get it again later:
page = self._get_page(main_index_url, req)
if page is None:
url_name = self._find_url_name(Link(self.index_urls[0], trusted=True), url_name, req) or req.url_name
if url_name is not None:
locations = [
mkurl_pypi_url(url)
for url in self.index_urls] + self.find_links
else:
locations = list(self.find_links)
for version in req.absolute_versions:
if url_name is not None and main_index_url is not None:
locations = [
posixpath.join(main_index_url.url, version)] + locations
file_locations, url_locations = self._sort_locations(locations)
_flocations, _ulocations = self._sort_locations(self.dependency_links)
file_locations.extend(_flocations)
# We trust every url that the user has given us whether it was given
# via --index-url or --find-links
locations = [Link(url, trusted=True) for url in url_locations]
# We explicitly do not trust links that came from dependency_links
locations.extend([Link(url) for url in _ulocations])
logger.debug('URLs to search for versions for %s:' % req)
for location in locations:
logger.debug('* %s' % location)
# Determine if this url used a secure transport mechanism
parsed = urlparse.urlparse(str(location))
if parsed.scheme in INSECURE_SCHEMES:
secure_schemes = INSECURE_SCHEMES[parsed.scheme]
if len(secure_schemes) == 1:
ctx = (location, parsed.scheme, secure_schemes[0],
parsed.netloc)
logger.warn("%s uses an insecure transport scheme (%s). "
"Consider using %s if %s has it available" %
ctx)
elif len(secure_schemes) > 1:
ctx = (location, parsed.scheme, ", ".join(secure_schemes),
parsed.netloc)
logger.warn("%s uses an insecure transport scheme (%s). "
"Consider using one of %s if %s has any of "
"them available" % ctx)
else:
ctx = (location, parsed.scheme)
logger.warn("%s uses an insecure transport scheme (%s)." %
ctx)
found_versions = []
found_versions.extend(
self._package_versions(
# We trust every directly linked archive in find_links
[Link(url, '-f', trusted=True) for url in self.find_links], req.name.lower()))
page_versions = []
for page in self._get_pages(locations, req):
logger.debug('Analyzing links from page %s' % page.url)
logger.indent += 2
try:
page_versions.extend(self._package_versions(page.links, req.name.lower()))
finally:
logger.indent -= 2
dependency_versions = list(self._package_versions(
[Link(url) for url in self.dependency_links], req.name.lower()))
if dependency_versions:
logger.info('dependency_links found: %s' % ', '.join([link.url for parsed, link, version in dependency_versions]))
file_versions = list(self._package_versions(
[Link(url) for url in file_locations], req.name.lower()))
if not found_versions and not page_versions and not dependency_versions and not file_versions:
logger.fatal('Could not find any downloads that satisfy the requirement %s' % req)
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
"--allow-external %s to allow)." % req.name)
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
raise DistributionNotFound('No distributions at all found for %s' % req)
installed_version = []
if req.satisfied_by is not None:
installed_version = [(req.satisfied_by.parsed_version, INSTALLED_VERSION, req.satisfied_by.version)]
if file_versions:
file_versions.sort(reverse=True)
logger.info('Local files found: %s' % ', '.join([url_to_path(link.url) for parsed, link, version in file_versions]))
#this is an intentional priority ordering
all_versions = installed_version + file_versions + found_versions + page_versions + dependency_versions
applicable_versions = []
for (parsed_version, link, version) in all_versions:
if version not in req.req:
logger.info("Ignoring link %s, version %s doesn't match %s"
% (link, version, ','.join([''.join(s) for s in req.req.specs])))
continue
elif is_prerelease(version) and not (self.allow_all_prereleases or req.prereleases):
# If this version isn't the already installed one, then
# ignore it if it's a pre-release.
if link is not INSTALLED_VERSION:
logger.info("Ignoring link %s, version %s is a pre-release (use --pre to allow)." % (link, version))
continue
applicable_versions.append((parsed_version, link, version))
applicable_versions = self._sort_versions(applicable_versions)
existing_applicable = bool([link for parsed_version, link, version in applicable_versions if link is INSTALLED_VERSION])
if not upgrade and existing_applicable:
if applicable_versions[0][1] is INSTALLED_VERSION:
logger.info('Existing installed version (%s) is most up-to-date and satisfies requirement'
% req.satisfied_by.version)
else:
logger.info('Existing installed version (%s) satisfies requirement (most up-to-date version is %s)'
% (req.satisfied_by.version, applicable_versions[0][2]))
return None
if not applicable_versions:
logger.fatal('Could not find a version that satisfies the requirement %s (from versions: %s)'
% (req, ', '.join([version for parsed_version, link, version in all_versions])))
if self.need_warn_external:
logger.warn("Some externally hosted files were ignored (use "
"--allow-external to allow).")
if self.need_warn_unverified:
logger.warn("Some insecure and unverifiable files were ignored"
" (use --allow-unverified %s to allow)." %
req.name)
raise DistributionNotFound('No distributions matching the version for %s' % req)
if applicable_versions[0][1] is INSTALLED_VERSION:
# We have an existing version, and its the best version
logger.info('Installed version (%s) is most up-to-date (past versions: %s)'
% (req.satisfied_by.version, ', '.join([version for parsed_version, link, version in applicable_versions[1:]]) or 'none'))
raise BestVersionAlreadyInstalled
if len(applicable_versions) > 1:
logger.info('Using version %s (newest of versions: %s)' %
(applicable_versions[0][2], ', '.join([version for parsed_version, link, version in applicable_versions])))
selected_version = applicable_versions[0][1]
if (selected_version.internal is not None
and not selected_version.internal):
logger.warn("%s an externally hosted file and may be "
"unreliable" % req.name)
if (selected_version.verifiable is not None
and not selected_version.verifiable):
logger.warn("%s is potentially insecure and "
"unverifiable." % req.name)
if selected_version._deprecated_regex:
logger.deprecated(
"1.7",
"%s discovered using a deprecated method of parsing, "
"in the future it will no longer be discovered" % req.name
)
return selected_version
def _find_url_name(self, index_url, url_name, req):
"""Finds the true URL name of a package, when the given name isn't quite correct.
This is usually used to implement case-insensitivity."""
if not index_url.url.endswith('/'):
# Vaguely part of the PyPI API... weird but true.
## FIXME: bad to modify this?
index_url.url += '/'
page = self._get_page(index_url, req)
if page is None:
logger.fatal('Cannot fetch index base URL %s' % index_url)
return
norm_name = normalize_name(req.url_name)
for link in page.links:
base = posixpath.basename(link.path.rstrip('/'))
if norm_name == normalize_name(base):
logger.notify('Real name of requirement %s is %s' % (url_name, base))
return base
return None
def _get_pages(self, locations, req):
"""
Yields (page, page_url) from the given locations, skipping
locations that have errors, and adding download/homepage links
"""
all_locations = list(locations)
seen = set()
while all_locations:
location = all_locations.pop(0)
if location in seen:
continue
seen.add(location)
page = self._get_page(location, req)
if page is None:
continue
yield page
for link in page.rel_links():
normalized = normalize_name(req.name).lower()
if (not normalized in self.allow_external
and not self.allow_all_external):
self.need_warn_external = True
logger.debug("Not searching %s for files because external "
"urls are disallowed." % link)
continue
if (link.trusted is not None
and not link.trusted
and not normalized in self.allow_unverified):
logger.debug("Not searching %s for urls, it is an "
"untrusted link and cannot produce safe or "
"verifiable files." % link)
self.need_warn_unverified = True
continue
all_locations.append(link)
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
_egg_info_re = re.compile(r'([a-z0-9_.]+)-([a-z0-9_.-]+)', re.I)
_py_version_re = re.compile(r'-py([123]\.?[0-9]?)$')
def _sort_links(self, links):
"Returns elements of links in order, non-egg links first, egg links second, while eliminating duplicates"
eggs, no_eggs = [], []
seen = set()
for link in links:
if link not in seen:
seen.add(link)
if link.egg_fragment:
eggs.append(link)
else:
no_eggs.append(link)
return no_eggs + eggs
def _package_versions(self, links, search_name):
for link in self._sort_links(links):
for v in self._link_package_versions(link, search_name):
yield v
def _known_extensions(self):
extensions = ('.tar.gz', '.tar.bz2', '.tar', '.tgz', '.zip')
if self.use_wheel:
return extensions + (wheel_ext,)
return extensions
def _link_package_versions(self, link, search_name):
"""
Return an iterable of triples (pkg_resources_version_key,
link, python_version) that can be extracted from the given
link.
Meant to be overridden by subclasses, not called by clients.
"""
platform = get_platform()
version = None
if link.egg_fragment:
egg_info = link.egg_fragment
else:
egg_info, ext = link.splitext()
if not ext:
if link not in self.logged_links:
logger.debug('Skipping link %s; not a file' % link)
self.logged_links.add(link)
return []
if egg_info.endswith('.tar'):
# Special double-extension case:
egg_info = egg_info[:-4]
ext = '.tar' + ext
if ext not in self._known_extensions():
if link not in self.logged_links:
logger.debug('Skipping link %s; unknown archive format: %s' % (link, ext))
self.logged_links.add(link)
return []
if "macosx10" in link.path and ext == '.zip':
if link not in self.logged_links:
logger.debug('Skipping link %s; macosx10 one' % (link))
self.logged_links.add(link)
return []
if ext == wheel_ext:
try:
wheel = Wheel(link.filename)
except InvalidWheelFilename:
logger.debug('Skipping %s because the wheel filename is invalid' % link)
return []
if wheel.name.lower() != search_name.lower():
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
return []
if not wheel.supported():
logger.debug('Skipping %s because it is not compatible with this Python' % link)
return []
# This is a dirty hack to prevent installing Binary Wheels from
# PyPI unless it is a Windows or Mac Binary Wheel. This is
# paired with a change to PyPI disabling uploads for the
# same. Once we have a mechanism for enabling support for binary
# wheels on linux that deals with the inherent problems of
# binary distribution this can be removed.
comes_from = getattr(link, "comes_from", None)
if ((
not platform.startswith('win')
and not platform.startswith('macosx')
)
and comes_from is not None
and urlparse.urlparse(comes_from.url).netloc.endswith(
"pypi.python.org")):
if not wheel.supported(tags=supported_tags_noarch):
logger.debug(
"Skipping %s because it is a pypi-hosted binary "
"Wheel on an unsupported platform" % link
)
return []
version = wheel.version
if not version:
version = self._egg_info_matches(egg_info, search_name, link)
if version is None:
logger.debug('Skipping link %s; wrong project name (not %s)' % (link, search_name))
return []
if (link.internal is not None
and not link.internal
and not normalize_name(search_name).lower() in self.allow_external
and not self.allow_all_external):
# We have a link that we are sure is external, so we should skip
# it unless we are allowing externals
logger.debug("Skipping %s because it is externally hosted." % link)
self.need_warn_external = True
return []
if (link.verifiable is not None
and not link.verifiable
and not (normalize_name(search_name).lower()
in self.allow_unverified)):
# We have a link that we are sure we cannot verify it's integrity,
# so we should skip it unless we are allowing unsafe installs
# for this requirement.
logger.debug("Skipping %s because it is an insecure and "
"unverifiable file." % link)
self.need_warn_unverified = True
return []
match = self._py_version_re.search(version)
if match:
version = version[:match.start()]
py_version = match.group(1)
if py_version != sys.version[:3]:
logger.debug('Skipping %s because Python version is incorrect' % link)
return []
logger.debug('Found link %s, version: %s' % (link, version))
return [(pkg_resources.parse_version(version),
link,
version)]
def _egg_info_matches(self, egg_info, search_name, link):
match = self._egg_info_re.search(egg_info)
if not match:
logger.debug('Could not parse version from link: %s' % link)
return None
name = match.group(0).lower()
# To match the "safe" name that pkg_resources creates:
name = name.replace('_', '-')
# project name and version must be separated by a dash
look_for = search_name.lower() + "-"
if name.startswith(look_for):
return match.group(0)[len(look_for):]
else:
return None
def _get_page(self, link, req):
return HTMLPage.get_page(link, req,
cache=self.cache,
session=self.session,
)
class PageCache(object):
"""Cache of HTML pages"""
failure_limit = 3
def __init__(self):
self._failures = {}
self._pages = {}
self._archives = {}
def too_many_failures(self, url):
return self._failures.get(url, 0) >= self.failure_limit
def get_page(self, url):
return self._pages.get(url)
def is_archive(self, url):
return self._archives.get(url, False)
def set_is_archive(self, url, value=True):
self._archives[url] = value
def add_page_failure(self, url, level):
self._failures[url] = self._failures.get(url, 0)+level
def add_page(self, urls, page):
for url in urls:
self._pages[url] = page
class HTMLPage(object):
"""Represents one page, along with its URL"""
## FIXME: these regexes are horrible hacks:
_homepage_re = re.compile(r'<th>\s*home\s*page', re.I)
_download_re = re.compile(r'<th>\s*download\s+url', re.I)
_href_re = re.compile('href=(?:"([^"]*)"|\'([^\']*)\'|([^>\\s\\n]*))', re.I|re.S)
def __init__(self, content, url, headers=None, trusted=None):
self.content = content
self.parsed = html5lib.parse(self.content, namespaceHTMLElements=False)
self.url = url
self.headers = headers
self.trusted = trusted
def __str__(self):
return self.url
@classmethod
def get_page(cls, link, req, cache=None, skip_archives=True, session=None):
if session is None:
session = PipSession()
url = link.url
url = url.split('#', 1)[0]
if cache.too_many_failures(url):
return None
# Check for VCS schemes that do not support lookup as web pages.
from pip.vcs import VcsSupport
for scheme in VcsSupport.schemes:
if url.lower().startswith(scheme) and url[len(scheme)] in '+:':
logger.debug('Cannot look at %(scheme)s URL %(link)s' % locals())
return None
if cache is not None:
inst = cache.get_page(url)
if inst is not None:
return inst
try:
if skip_archives:
if cache is not None:
if cache.is_archive(url):
return None
filename = link.filename
for bad_ext in ['.tar', '.tar.gz', '.tar.bz2', '.tgz', '.zip']:
if filename.endswith(bad_ext):
content_type = cls._get_content_type(url,
session=session,
)
if content_type.lower().startswith('text/html'):
break
else:
logger.debug('Skipping page %s because of Content-Type: %s' % (link, content_type))
if cache is not None:
cache.set_is_archive(url)
return None
logger.debug('Getting page %s' % url)
# Tack index.html onto file:// URLs that point to directories
(scheme, netloc, path, params, query, fragment) = urlparse.urlparse(url)
if scheme == 'file' and os.path.isdir(url2pathname(path)):
# add trailing slash if not present so urljoin doesn't trim final segment
if not url.endswith('/'):
url += '/'
url = urlparse.urljoin(url, 'index.html')
logger.debug(' file: URL is directory, getting %s' % url)
resp = session.get(url, headers={"Accept": "text/html"})
resp.raise_for_status()
# The check for archives above only works if the url ends with
# something that looks like an archive. However that is not a
# requirement. For instance http://sourceforge.net/projects/docutils/files/docutils/0.8.1/docutils-0.8.1.tar.gz/download
# redirects to http://superb-dca3.dl.sourceforge.net/project/docutils/docutils/0.8.1/docutils-0.8.1.tar.gz
# Unless we issue a HEAD request on every url we cannot know
# ahead of time for sure if something is HTML or not. However we
# can check after we've downloaded it.
content_type = resp.headers.get('Content-Type', 'unknown')
if not content_type.lower().startswith("text/html"):
logger.debug('Skipping page %s because of Content-Type: %s' %
(link, content_type))
if cache is not None:
cache.set_is_archive(url)
return None
inst = cls(resp.text, resp.url, resp.headers, trusted=link.trusted)
except requests.HTTPError as exc:
level = 2 if exc.response.status_code == 404 else 1
cls._handle_fail(req, link, exc, url, cache=cache, level=level)
except requests.ConnectionError as exc:
cls._handle_fail(
req, link, "connection error: %s" % exc, url,
cache=cache,
)
except requests.Timeout:
cls._handle_fail(req, link, "timed out", url, cache=cache)
except SSLError as exc:
reason = ("There was a problem confirming the ssl certificate: "
"%s" % exc)
cls._handle_fail(req, link, reason, url,
cache=cache,
level=2,
meth=logger.notify,
)
else:
if cache is not None:
cache.add_page([url, resp.url], inst)
return inst
@staticmethod
def _handle_fail(req, link, reason, url, cache=None, level=1, meth=None):
if meth is None:
meth = logger.info
meth("Could not fetch URL %s: %s", link, reason)
meth("Will skip URL %s when looking for download links for %s" %
(link.url, req))
if cache is not None:
cache.add_page_failure(url, level)
@staticmethod
def _get_content_type(url, session=None):
"""Get the Content-Type of the given url, using a HEAD request"""
if session is None:
session = PipSession()
scheme, netloc, path, query, fragment = urlparse.urlsplit(url)
if not scheme in ('http', 'https', 'ftp', 'ftps'):
## FIXME: some warning or something?
## assertion error?
return ''
resp = session.head(url, allow_redirects=True)
resp.raise_for_status()
return resp.headers.get("Content-Type", "")
@property
def api_version(self):
if not hasattr(self, "_api_version"):
_api_version = None
metas = [x for x in self.parsed.findall(".//meta")
if x.get("name", "").lower() == "api-version"]
if metas:
try:
_api_version = int(metas[0].get("value", None))
except (TypeError, ValueError):
_api_version = None
self._api_version = _api_version
return self._api_version
@property
def base_url(self):
if not hasattr(self, "_base_url"):
base = self.parsed.find(".//base")
if base is not None and base.get("href"):
self._base_url = base.get("href")
else:
self._base_url = self.url
return self._base_url
@property
def links(self):
"""Yields all links in the page"""
for anchor in self.parsed.findall(".//a"):
if anchor.get("href"):
href = anchor.get("href")
url = self.clean_link(urlparse.urljoin(self.base_url, href))
# Determine if this link is internal. If that distinction
# doesn't make sense in this context, then we don't make
# any distinction.
internal = None
if self.api_version and self.api_version >= 2:
# Only api_versions >= 2 have a distinction between
# external and internal links
internal = bool(anchor.get("rel")
and "internal" in anchor.get("rel").split())
yield Link(url, self, internal=internal)
def rel_links(self):
for url in self.explicit_rel_links():
yield url
for url in self.scraped_rel_links():
yield url
def explicit_rel_links(self, rels=('homepage', 'download')):
"""Yields all links with the given relations"""
rels = set(rels)
for anchor in self.parsed.findall(".//a"):
if anchor.get("rel") and anchor.get("href"):
found_rels = set(anchor.get("rel").split())
# Determine the intersection between what rels were found and
# what rels were being looked for
if found_rels & rels:
href = anchor.get("href")
url = self.clean_link(urlparse.urljoin(self.base_url, href))
yield Link(url, self, trusted=False)
def scraped_rel_links(self):
# Can we get rid of this horrible horrible method?
for regex in (self._homepage_re, self._download_re):
match = regex.search(self.content)
if not match:
continue
href_match = self._href_re.search(self.content, pos=match.end())
if not href_match:
continue
url = href_match.group(1) or href_match.group(2) or href_match.group(3)
if not url:
continue
url = self.clean_link(urlparse.urljoin(self.base_url, url))
yield Link(url, self, trusted=False, _deprecated_regex=True)
_clean_re = re.compile(r'[^a-z0-9$&+,/:;=?@.#%_\\|-]', re.I)
def clean_link(self, url):
"""Makes sure a link is fully encoded. That is, if a ' ' shows up in
the link, it will be rewritten to %20 (while not over-quoting
% or other characters)."""
return self._clean_re.sub(
lambda match: '%%%2x' % ord(match.group(0)), url)
class Link(object):
def __init__(self, url, comes_from=None, internal=None, trusted=None,
_deprecated_regex=False):
self.url = url
self.comes_from = comes_from
self.internal = internal
self.trusted = trusted
self._deprecated_regex = _deprecated_regex
def __str__(self):
if self.comes_from:
return '%s (from %s)' % (self.url, self.comes_from)
else:
return str(self.url)
def __repr__(self):
return '<Link %s>' % self
def __eq__(self, other):
return self.url == other.url
def __ne__(self, other):
return self.url != other.url
def __lt__(self, other):
return self.url < other.url
def __le__(self, other):
return self.url <= other.url
def __gt__(self, other):
return self.url > other.url
def __ge__(self, other):
return self.url >= other.url
def __hash__(self):
return hash(self.url)
@property
def filename(self):
_, netloc, path, _, _ = urlparse.urlsplit(self.url)
name = posixpath.basename(path.rstrip('/')) or netloc
assert name, ('URL %r produced no filename' % self.url)
return name
@property
def scheme(self):
return urlparse.urlsplit(self.url)[0]
@property
def path(self):
return urlparse.urlsplit(self.url)[2]
def splitext(self):
return splitext(posixpath.basename(self.path.rstrip('/')))
@property
def ext(self):
return self.splitext()[1]
@property
def url_without_fragment(self):
scheme, netloc, path, query, fragment = urlparse.urlsplit(self.url)
return urlparse.urlunsplit((scheme, netloc, path, query, None))
_egg_fragment_re = re.compile(r'#egg=([^&]*)')
@property
def egg_fragment(self):
match = self._egg_fragment_re.search(self.url)
if not match:
return None
return match.group(1)
_hash_re = re.compile(r'(sha1|sha224|sha384|sha256|sha512|md5)=([a-f0-9]+)')
@property
def hash(self):
match = self._hash_re.search(self.url)
if match:
return match.group(2)
return None
@property
def hash_name(self):
match = self._hash_re.search(self.url)
if match:
return match.group(1)
return None
@property
def show_url(self):
return posixpath.basename(self.url.split('#', 1)[0].split('?', 1)[0])
@property
def verifiable(self):
"""
Returns True if this link can be verified after download, False if it
cannot, and None if we cannot determine.
"""
trusted = self.trusted or getattr(self.comes_from, "trusted", None)
if trusted is not None and trusted:
# This link came from a trusted source. It *may* be verifiable but
# first we need to see if this page is operating under the new
# API version.
try:
api_version = getattr(self.comes_from, "api_version", None)
api_version = int(api_version)
except (ValueError, TypeError):
api_version = None
if api_version is None or api_version <= 1:
# This link is either trusted, or it came from a trusted,
# however it is not operating under the API version 2 so
# we can't make any claims about if it's safe or not
return
if self.hash:
# This link came from a trusted source and it has a hash, so we
# can consider it safe.
return True
else:
# This link came from a trusted source, using the new API
# version, and it does not have a hash. It is NOT verifiable
return False
elif trusted is not None:
# This link came from an untrusted source and we cannot trust it
return False
# An object to represent the "link" for the installed version of a requirement.
# Using Inf as the url makes it sort higher.
INSTALLED_VERSION = Link(Inf)
def get_requirement_from_url(url):
"""Get a requirement from the URL, if possible. This looks for #egg
in the URL"""
link = Link(url)
egg_info = link.egg_fragment
if not egg_info:
egg_info = splitext(link.filename)[0]
return package_to_requirement(egg_info)
def package_to_requirement(package_name):
"""Translate a name like Foo-1.2 to Foo==1.3"""
match = re.search(r'^(.*?)-(dev|\d.*)', package_name)
if match:
name = match.group(1)
version = match.group(2)
else:
name = package_name
version = ''
if version:
return '%s==%s' % (name, version)
else:
return name
| mit | -8,495,584,003,430,750,000 | 39.781818 | 146 | 0.553772 | false |
jontrulson/upm | examples/python/aeotecdsb09104.py | 7 | 2756 | #!/usr/bin/python
# Author: Jon Trulson <[email protected]>
# Copyright (c) 2016 Intel Corporation.
#
# Permission is hereby granted, free of charge, to any person obtaining
# a copy of this software and associated documentation files (the
# "Software"), to deal in the Software without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Software, and to
# permit persons to whom the Software is furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be
# included in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
# NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE
# LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION
# OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION
# WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
from __future__ import print_function
import time, sys, signal, atexit
from upm import pyupm_ozw as sensorObj
def main():
# This function lets you run code on exit
def exitHandler():
print("Exiting")
sys.exit(0)
# Register exit handlers
atexit.register(exitHandler)
defaultDev = "/dev/ttyACM0"
if (len(sys.argv) > 1):
defaultDev = sys.argv[1]
print("Using device", defaultDev)
# Instantiate an Aeotec DSB09104 instance, on device node 12. You
# will almost certainly need to change this to reflect your own
# network. Use the ozwdump example to see what nodes are available.
sensor = sensorObj.AeotecDSB09104(12)
# The first thing to do is create options, then lock them when done.
sensor.optionsCreate()
sensor.optionsLock()
# Next, initialize it.
print("Initializing, this may take awhile depending on your ZWave network")
sensor.init(defaultDev)
print("Initialization complete")
print("Querying data...")
while (True):
sensor.update()
print("Watts, Channel 1: %0.03f W" % sensor.getWattsC1())
print("Watts, Channel 2: %0.03f W" % sensor.getWattsC2())
print("Watts, Channel 3: %0.03f W" % sensor.getWattsC3())
print("Energy, Channel 1: %0.03f kWh" % sensor.getEnergyC1())
print("Energy, Channel 2: %0.03f kWh" % sensor.getEnergyC2())
print("Energy, Channel 3: %0.03f kWh" % sensor.getEnergyC3())
print("Battery Level: %d\n" % sensor.getBatteryLevel())
time.sleep(3)
if __name__ == '__main__':
main()
| mit | -4,375,153,690,606,213,000 | 36.243243 | 79 | 0.698839 | false |
dshen1/trading-with-python | lib/functions.py | 76 | 11627 | # -*- coding: utf-8 -*-
"""
twp support functions
@author: Jev Kuznetsov
Licence: GPL v2
"""
from scipy import polyfit, polyval
import datetime as dt
#from datetime import datetime, date
from pandas import DataFrame, Index, Series
import csv
import matplotlib.pyplot as plt
import numpy as np
import pandas as pd
def nans(shape, dtype=float):
''' create a nan numpy array '''
a = np.empty(shape, dtype)
a.fill(np.nan)
return a
def plotCorrelationMatrix(price, thresh = None):
''' plot a correlation matrix as a heatmap image
inputs:
price: prices DataFrame
thresh: correlation threshold to use for checking, default None
'''
symbols = price.columns.tolist()
R = price.pct_change()
correlationMatrix = R.corr()
if thresh is not None:
correlationMatrix = correlationMatrix > thresh
plt.imshow(abs(correlationMatrix.values),interpolation='none')
plt.xticks(range(len(symbols)),symbols)
plt.yticks(range(len(symbols)),symbols)
plt.colorbar()
plt.title('Correlation matrix')
return correlationMatrix
def pca(A):
""" performs principal components analysis
(PCA) on the n-by-p DataFrame A
Rows of A correspond to observations, columns to variables.
Returns :
coeff : principal components, column-wise
transform: A in principal component space
latent : eigenvalues
"""
# computing eigenvalues and eigenvectors of covariance matrix
M = (A - A.mean()).T # subtract the mean (along columns)
[latent,coeff] = np.linalg.eig(np.cov(M)) # attention:not always sorted
idx = np.argsort(latent) # sort eigenvalues
idx = idx[::-1] # in ascending order
coeff = coeff[:,idx]
latent = latent[idx]
score = np.dot(coeff.T,A.T) # projection of the data in the new space
transform = DataFrame(index = A.index, data = score.T)
return coeff,transform,latent
def pos2pnl(price,position , ibTransactionCost=False ):
"""
calculate pnl based on price and position
Inputs:
---------
price: series or dataframe of price
position: number of shares at each time. Column names must be same as in price
ibTransactionCost: use bundled Interactive Brokers transaction cost of 0.005$/share
Returns a portfolio DataFrame
"""
delta=position.diff()
port = DataFrame(index=price.index)
if isinstance(price,Series): # no need to sum along 1 for series
port['cash'] = (-delta*price).cumsum()
port['stock'] = (position*price)
else: # dealing with DataFrame here
port['cash'] = (-delta*price).sum(axis=1).cumsum()
port['stock'] = (position*price).sum(axis=1)
if ibTransactionCost:
tc = -0.005*position.diff().abs() # basic transaction cost
tc[(tc>-1) & (tc<0)] = -1 # everything under 1$ will be ceil'd to 1$
if isinstance(price,DataFrame):
tc = tc.sum(axis=1)
port['tc'] = tc.cumsum()
else:
port['tc'] = 0.
port['total'] = port['stock']+port['cash']+port['tc']
return port
def tradeBracket(price,entryBar,maxTradeLength,bracket):
'''
trade a symmetrical bracket on price series, return price delta and exit bar #
Input
------
price : series of price values
entryBar: entry bar number
maxTradeLength : max trade duration in bars
bracket : allowed price deviation
'''
lastBar = min(entryBar+maxTradeLength,len(price)-1)
p = price[entryBar:lastBar]-price[entryBar]
idxOutOfBound = np.nonzero(abs(p)>bracket) # find indices where price comes out of bracket
if idxOutOfBound[0].any(): # found match
priceDelta = p[idxOutOfBound[0][0]]
exitBar = idxOutOfBound[0][0]+entryBar
else: # all in bracket, exiting based on time
priceDelta = p[-1]
exitBar = lastBar
return priceDelta, exitBar
def estimateBeta(priceY,priceX,algo = 'standard'):
'''
estimate stock Y vs stock X beta using iterative linear
regression. Outliers outside 3 sigma boundary are filtered out
Parameters
--------
priceX : price series of x (usually market)
priceY : price series of y (estimate beta of this price)
Returns
--------
beta : stockY beta relative to stock X
'''
X = DataFrame({'x':priceX,'y':priceY})
if algo=='returns':
ret = (X/X.shift(1)-1).dropna().values
#print len(ret)
x = ret[:,0]
y = ret[:,1]
# filter high values
low = np.percentile(x,20)
high = np.percentile(x,80)
iValid = (x>low) & (x<high)
x = x[iValid]
y = y[iValid]
iteration = 1
nrOutliers = 1
while iteration < 10 and nrOutliers > 0 :
(a,b) = polyfit(x,y,1)
yf = polyval([a,b],x)
#plot(x,y,'x',x,yf,'r-')
err = yf-y
idxOutlier = abs(err) > 3*np.std(err)
nrOutliers =sum(idxOutlier)
beta = a
#print 'Iteration: %i beta: %.2f outliers: %i' % (iteration,beta, nrOutliers)
x = x[~idxOutlier]
y = y[~idxOutlier]
iteration += 1
elif algo=='log':
x = np.log(X['x'])
y = np.log(X['y'])
(a,b) = polyfit(x,y,1)
beta = a
elif algo=='standard':
ret =np.log(X).diff().dropna()
beta = ret['x'].cov(ret['y'])/ret['x'].var()
else:
raise TypeError("unknown algorithm type, use 'standard', 'log' or 'returns'")
return beta
def estimateVolatility(ohlc, N=10, algo='YangZhang'):
"""
Volatility estimation
Possible algorithms: ['YangZhang', 'CC']
"""
cc = np.log(ohlc.close/ohlc.close.shift(1))
if algo == 'YangZhang': # Yang-zhang volatility
ho = np.log(ohlc.high/ohlc.open)
lo = np.log(ohlc.low/ohlc.open)
co = np.log(ohlc.close/ohlc.open)
oc = np.log(ohlc.open/ohlc.close.shift(1))
oc_sq = oc**2
cc_sq = cc**2
rs = ho*(ho-co)+lo*(lo-co)
close_vol = pd.rolling_sum(cc_sq, window=N) * (1.0 / (N - 1.0))
open_vol = pd.rolling_sum(oc_sq, window=N) * (1.0 / (N - 1.0))
window_rs = pd.rolling_sum(rs, window=N) * (1.0 / (N - 1.0))
result = (open_vol + 0.164333 * close_vol + 0.835667 * window_rs).apply(np.sqrt) * np.sqrt(252)
result[:N-1] = np.nan
elif algo == 'CC': # standard close-close estimator
result = np.sqrt(252)*np.sqrt(((pd.rolling_sum(cc**2,N))/N))
else:
raise ValueError('Unknown algo type.')
return result*100
def rank(current,past):
''' calculate a relative rank 0..1 for a value against series '''
return (current>past).sum()/float(past.count())
def returns(df):
return (df/df.shift(1)-1)
def logReturns(df):
t = np.log(df)
return t-t.shift(1)
def dateTimeToDate(idx):
''' convert datetime index to date '''
dates = []
for dtm in idx:
dates.append(dtm.date())
return dates
def readBiggerScreener(fName):
''' import data from Bigger Capital screener '''
with open(fName,'rb') as f:
reader = csv.reader(f)
rows = [row for row in reader]
header = rows[0]
data = [[] for i in range(len(header))]
for row in rows[1:]:
for i,elm in enumerate(row):
try:
data[i].append(float(elm))
except Exception:
data[i].append(str(elm))
return DataFrame(dict(zip(header,data)),index=Index(range(len(data[0]))))[header]
def sharpe(pnl):
return np.sqrt(250)*pnl.mean()/pnl.std()
def drawdown(s):
"""
calculate max drawdown and duration
Input:
s, price or cumulative pnl curve $
Returns:
drawdown : vector of drawdwon values
duration : vector of drawdown duration
"""
# convert to array if got pandas series, 10x speedup
if isinstance(s,pd.Series):
idx = s.index
s = s.values
returnSeries = True
else:
returnSeries = False
if s.min() < 0: # offset if signal minimum is less than zero
s = s-s.min()
highwatermark = np.zeros(len(s))
drawdown = np.zeros(len(s))
drawdowndur = np.zeros(len(s))
for t in range(1,len(s)):
highwatermark[t] = max(highwatermark[t-1], s[t])
drawdown[t] = (highwatermark[t]-s[t])
drawdowndur[t]= (0 if drawdown[t] == 0 else drawdowndur[t-1]+1)
if returnSeries:
return pd.Series(index=idx,data=drawdown), pd.Series(index=idx,data=drawdowndur)
else:
return drawdown , drawdowndur
def profitRatio(pnl):
'''
calculate profit ratio as sum(pnl)/drawdown
Input: pnl - daily pnl, Series or DataFrame
'''
def processVector(pnl): # process a single column
s = pnl.fillna(0)
dd = drawdown(s)[0]
p = s.sum()/dd.max()
return p
if isinstance(pnl,Series):
return processVector(pnl)
elif isinstance(pnl,DataFrame):
p = Series(index = pnl.columns)
for col in pnl.columns:
p[col] = processVector(pnl[col])
return p
else:
raise TypeError("Input must be DataFrame or Series, not "+str(type(pnl)))
def candlestick(df,width=0.5, colorup='b', colordown='r'):
''' plot a candlestick chart of a dataframe '''
O = df['open'].values
H = df['high'].values
L = df['low'].values
C = df['close'].values
fig = plt.gcf()
ax = plt.axes()
#ax.hold(True)
X = df.index
#plot high and low
ax.bar(X,height=H-L,bottom=L,width=0.1,color='k')
idxUp = C>O
ax.bar(X[idxUp],height=(C-O)[idxUp],bottom=O[idxUp],width=width,color=colorup)
idxDown = C<=O
ax.bar(X[idxDown],height=(O-C)[idxDown],bottom=C[idxDown],width=width,color=colordown)
try:
fig.autofmt_xdate()
except Exception: # pragma: no cover
pass
ax.grid(True)
#ax.bar(x,height=H-L,bottom=L,width=0.01,color='k')
def datetime2matlab(t):
''' convert datetime timestamp to matlab numeric timestamp '''
mdn = t + dt.timedelta(days = 366)
frac = (t-dt.datetime(t.year,t.month,t.day,0,0,0)).seconds / (24.0 * 60.0 * 60.0)
return mdn.toordinal() + frac
def getDataSources(fName = None):
''' return data sources directories for this machine.
directories are defined in datasources.ini or provided filepath'''
import socket
from ConfigParser import ConfigParser
pcName = socket.gethostname()
p = ConfigParser()
p.optionxform = str
if fName is None:
fName = 'datasources.ini'
p.read(fName)
if pcName not in p.sections():
raise NameError('Host name section %s not found in file %s' %(pcName,fName))
dataSources = {}
for option in p.options(pcName):
dataSources[option] = p.get(pcName,option)
return dataSources
if __name__ == '__main__':
df = DataFrame({'open':[1,2,3],'high':[5,6,7],'low':[-2,-1,0],'close':[2,1,4]})
plt.clf()
candlestick(df) | bsd-3-clause | -5,354,100,116,955,763,000 | 24.794931 | 103 | 0.570224 | false |
nicoboss/Floatmotion | pygame/tests/run_tests__tests/print_stdout/fake_3_test.py | 18 | 1249 | import sys
if __name__ == '__main__':
import os
pkg_dir = (os.path.split(
os.path.split(
os.path.split(
os.path.abspath(__file__))[0])[0])[0])
parent_dir, pkg_name = os.path.split(pkg_dir)
is_pygame_pkg = (pkg_name == 'tests' and
os.path.split(parent_dir)[1] == 'pygame')
if not is_pygame_pkg:
sys.path.insert(0, parent_dir)
else:
is_pygame_pkg = __name__.startswith('pygame.tests.')
if is_pygame_pkg:
from pygame.tests import test_utils
from pygame.tests.test_utils import unittest
else:
from test import test_utils
from test.test_utils import unittest
class KeyModuleTest(unittest.TestCase):
def test_get_focused(self):
self.assert_(True)
def test_get_mods(self):
self.assert_(True)
def test_get_pressed(self):
sys.stdout.write("jibberish ruins everything\n")
self.assert_(False)
def test_name(self):
sys.stdout.write("forgot to remove debug crap\n")
self.assert_(True)
def test_set_mods(self):
self.assert_(True)
def test_set_repeat(self):
self.assert_(True)
if __name__ == '__main__':
unittest.main()
| agpl-3.0 | -8,732,142,755,742,425,000 | 26.755556 | 65 | 0.577262 | false |
popazerty/e2-gui | lib/python/Components/Renderer/RollerCharLCD.py | 7 | 1894 | from Components.config import config
from Renderer import Renderer
from enigma import eLabel, eTimer
from boxbranding import getMachineProcModel
from Components.VariableText import VariableText
class RollerCharLCD(VariableText, Renderer):
def __init__(self):
Renderer.__init__(self)
VariableText.__init__(self)
if getMachineProcModel().startswith("ini-90"):
self.stringlength = 25
else:
self.stringlength = 16
GUI_WIDGET = eLabel
def connect(self, source):
Renderer.connect(self, source)
self.changed((self.CHANGED_DEFAULT,))
def changed(self, what):
if what[0] == self.CHANGED_CLEAR:
self.text = ''
else:
self.text = self.source.text
if len(self.text) > self.stringlength:
self.text = self.source.text + ' ' * self.stringlength + self.source.text[:self.stringlength + 1]
self.x = len(self.text) - self.stringlength
self.idx = 0
self.backtext = self.text
self.status = 'start'
self.moveTimerText = eTimer()
self.moveTimerText.timeout.get().append(self.moveTimerTextRun)
self.moveTimerText.start(2000)
else:
self.text = self.source.text
self.x = len(self.text)
self.idx = 0
self.backtext = self.text
def moveTimerTextRun(self):
self.moveTimerText.stop()
if self.x > 0:
txttmp = self.backtext[self.idx:]
self.text = txttmp[:self.stringlength]
self.idx += 1
self.x -= 1
if self.x == 0:
self.status = 'end'
self.text = self.backtext
if self.status != 'end':
self.scrollspeed = int(config.lcd.scroll_speed.value)
self.moveTimerText.start(self.scrollspeed)
if config.lcd.scroll_delay.value != 'noscrolling':
self.scrolldelay = int(config.lcd.scroll_delay.value)
self.delayTimer = eTimer()
self.delayTimer.timeout.get().append(self.delayTimergo)
self.delayTimer.start(self.scrolldelay)
def delayTimergo(self):
self.delayTimer.stop()
self.changed((self.CHANGED_DEFAULT,))
| gpl-2.0 | -2,155,141,426,947,937,500 | 29.063492 | 100 | 0.709609 | false |
molebot/brython | www/tests/test_strings.py | 4 | 2887 | # strings
assert 'a'.__class__ == str
assert isinstance('a',str)
hello = "This is a rather long string containing\n\
several lines of text just as you would do in C.\n\
Note that whitespace at the beginning of the line is\
significant."
hello = """\
Usage: thingy [OPTIONS]
-h Display this usage message
-H hostname Hostname to connect to
"""
hello = r"This is a rather long string containing\n\
several lines of text much as you would do in C."
word = 'Help' + 'A'
assert word=='HelpA'
assert '<' + word*5 + '>'=='<HelpAHelpAHelpAHelpAHelpA>'
x = 'str' 'ing'
assert x=='string'
assert 'str'.strip() + 'ing'=='string'
# string methods
x='fooss'
assert x.replace('o','X',20) == 'fXXss'
assert 'GhFF'.lower() == 'ghff'
assert x.lstrip('of') == 'ss'
x='aZjhkhZyuy'
assert x.find('Z')==1
assert x.rfind('Z')==6
assert x.rindex('Z')==6
try:
print(x.rindex('K'))
except ValueError:
pass
assert x.split() == [x]
assert x.split('h') == ['aZj', 'k', 'Zyuy']
#print(x.split('h',1))
assert x.startswith('aZ')
assert x.strip('auy') == 'ZjhkhZ'
assert x.upper()=='AZJHKHZYUY'
x = "zer"
assert x.capitalize() == "Zer"
assert str.capitalize(x) == "Zer"
x = "azert$t y t"
assert x.count('t')==3
assert str.count(x,'t')==3
assert x.endswith("y t")==True
assert x.find('t')==4
assert x.find('$')==5
assert x.find('p')==-1
assert x.index('t')==4
items = ['sd','kj']
assert '-'.join(items)=="sd-kj"
assert "ZER".lower()=="zer"
assert "azerty".lstrip('a')=="zerty"
assert "azerty".lstrip('za')=="erty"
assert "azaerty".lstrip('az')=="erty"
assert "$XE$".replace("$XE$", "!")=="!"
assert "$XE".replace("$XE", "!")=='!'
assert "XE$".replace("XE$", "!")=="!"
assert "XE$".replace("$", "!")=="XE!"
assert "$XE".replace("$", "!")=="!XE"
assert "?XE".replace("?", "!")=="!XE"
assert "XE?".replace("?", "!")=="XE!"
assert "XE!".replace("!", "?")=="XE?"
assert "azterty".find('t')==2
assert "azterty".rfind('t')==5
assert "azterty".rfind('p')==-1
assert "azterty".rindex('t')==5
try:
"azterty".rindex('p')
except ValueError:
pass
assert "azerty".rstrip('y')=="azert"
assert "azerty".rstrip('yt')=="azer"
assert "azeryty".rstrip('ty')=="azer"
assert "az er ty".split()==["az","er","ty"]
assert "azferfty".split('f')==["az","er","ty"]
assert " aBc dEf ".split(maxsplit=1)==['aBc','dEf ']
assert " aBc dEf ".split()==['aBc','dEf']
assert "az\ner\nty".splitlines()==["az","er","ty"]
assert "azerty".startswith('az')
assert " azerty ".strip() == "azerty"
assert "bghggbazertyhbg".strip("bhg") == "azerty"
assert "zer".upper() == "ZER"
assert r'(?:([\w ]+) ([\w.]+) .*\[.* ([\d.]+)\])' == (r'(?:([\w ]+) ([\w.]+) '
'.*'
'\[.* ([\d.]+)\])'), 'raw string continuation'
# issue 265
assert "" in "test"
assert "" in ""
assert not "a" in ""
# issue 285
assert "ab"[1:0:-1] == 'b'
print("passed all tests...")
| bsd-3-clause | -7,616,710,428,659,700,000 | 22.282258 | 79 | 0.573259 | false |
dyoung418/tensorflow | tensorflow/tools/dist_test/python/mnist_replica.py | 44 | 10714 | # Copyright 2016 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Distributed MNIST training and validation, with model replicas.
A simple softmax model with one hidden layer is defined. The parameters
(weights and biases) are located on one parameter server (ps), while the ops
are executed on two worker nodes by default. The TF sessions also run on the
worker node.
Multiple invocations of this script can be done in parallel, with different
values for --task_index. There should be exactly one invocation with
--task_index, which will create a master session that carries out variable
initialization. The other, non-master, sessions will wait for the master
session to finish the initialization before proceeding to the training stage.
The coordination between the multiple worker invocations occurs due to
the definition of the parameters on the same ps devices. The parameter updates
from one worker is visible to all other workers. As such, the workers can
perform forward computation and gradient calculation in parallel, which
should lead to increased training speed for the simple model.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import math
import sys
import tempfile
import time
import tensorflow as tf
from tensorflow.examples.tutorials.mnist import input_data
flags = tf.app.flags
flags.DEFINE_string("data_dir", "/tmp/mnist-data",
"Directory for storing mnist data")
flags.DEFINE_boolean("download_only", False,
"Only perform downloading of data; Do not proceed to "
"session preparation, model definition or training")
flags.DEFINE_integer("task_index", None,
"Worker task index, should be >= 0. task_index=0 is "
"the master worker task the performs the variable "
"initialization ")
flags.DEFINE_integer("num_gpus", 1,
"Total number of gpus for each machine."
"If you don't use GPU, please set it to '0'")
flags.DEFINE_integer("replicas_to_aggregate", None,
"Number of replicas to aggregate before parameter update"
"is applied (For sync_replicas mode only; default: "
"num_workers)")
flags.DEFINE_integer("hidden_units", 100,
"Number of units in the hidden layer of the NN")
flags.DEFINE_integer("train_steps", 200,
"Number of (global) training steps to perform")
flags.DEFINE_integer("batch_size", 100, "Training batch size")
flags.DEFINE_float("learning_rate", 0.01, "Learning rate")
flags.DEFINE_boolean("sync_replicas", False,
"Use the sync_replicas (synchronized replicas) mode, "
"wherein the parameter updates from workers are aggregated "
"before applied to avoid stale gradients")
flags.DEFINE_boolean(
"existing_servers", False, "Whether servers already exists. If True, "
"will use the worker hosts via their GRPC URLs (one client process "
"per worker host). Otherwise, will create an in-process TensorFlow "
"server.")
flags.DEFINE_string("ps_hosts","localhost:2222",
"Comma-separated list of hostname:port pairs")
flags.DEFINE_string("worker_hosts", "localhost:2223,localhost:2224",
"Comma-separated list of hostname:port pairs")
flags.DEFINE_string("job_name", None,"job name: worker or ps")
FLAGS = flags.FLAGS
IMAGE_PIXELS = 28
def main(unused_argv):
mnist = input_data.read_data_sets(FLAGS.data_dir, one_hot=True)
if FLAGS.download_only:
sys.exit(0)
if FLAGS.job_name is None or FLAGS.job_name == "":
raise ValueError("Must specify an explicit `job_name`")
if FLAGS.task_index is None or FLAGS.task_index =="":
raise ValueError("Must specify an explicit `task_index`")
print("job name = %s" % FLAGS.job_name)
print("task index = %d" % FLAGS.task_index)
#Construct the cluster and start the server
ps_spec = FLAGS.ps_hosts.split(",")
worker_spec = FLAGS.worker_hosts.split(",")
# Get the number of workers.
num_workers = len(worker_spec)
cluster = tf.train.ClusterSpec({
"ps": ps_spec,
"worker": worker_spec})
if not FLAGS.existing_servers:
# Not using existing servers. Create an in-process server.
server = tf.train.Server(
cluster, job_name=FLAGS.job_name, task_index=FLAGS.task_index)
if FLAGS.job_name == "ps":
server.join()
is_chief = (FLAGS.task_index == 0)
if FLAGS.num_gpus > 0:
# Avoid gpu allocation conflict: now allocate task_num -> #gpu
# for each worker in the corresponding machine
gpu = (FLAGS.task_index % FLAGS.num_gpus)
worker_device = "/job:worker/task:%d/gpu:%d" % (FLAGS.task_index, gpu)
elif FLAGS.num_gpus == 0:
# Just allocate the CPU to worker server
cpu = 0
worker_device = "/job:worker/task:%d/cpu:%d" % (FLAGS.task_index, cpu)
# The device setter will automatically place Variables ops on separate
# parameter servers (ps). The non-Variable ops will be placed on the workers.
# The ps use CPU and workers use corresponding GPU
with tf.device(
tf.train.replica_device_setter(
worker_device=worker_device,
ps_device="/job:ps/cpu:0",
cluster=cluster)):
global_step = tf.Variable(0, name="global_step", trainable=False)
# Variables of the hidden layer
hid_w = tf.Variable(
tf.truncated_normal(
[IMAGE_PIXELS * IMAGE_PIXELS, FLAGS.hidden_units],
stddev=1.0 / IMAGE_PIXELS),
name="hid_w")
hid_b = tf.Variable(tf.zeros([FLAGS.hidden_units]), name="hid_b")
# Variables of the softmax layer
sm_w = tf.Variable(
tf.truncated_normal(
[FLAGS.hidden_units, 10],
stddev=1.0 / math.sqrt(FLAGS.hidden_units)),
name="sm_w")
sm_b = tf.Variable(tf.zeros([10]), name="sm_b")
# Ops: located on the worker specified with FLAGS.task_index
x = tf.placeholder(tf.float32, [None, IMAGE_PIXELS * IMAGE_PIXELS])
y_ = tf.placeholder(tf.float32, [None, 10])
hid_lin = tf.nn.xw_plus_b(x, hid_w, hid_b)
hid = tf.nn.relu(hid_lin)
y = tf.nn.softmax(tf.nn.xw_plus_b(hid, sm_w, sm_b))
cross_entropy = -tf.reduce_sum(y_ * tf.log(tf.clip_by_value(y, 1e-10, 1.0)))
opt = tf.train.AdamOptimizer(FLAGS.learning_rate)
if FLAGS.sync_replicas:
if FLAGS.replicas_to_aggregate is None:
replicas_to_aggregate = num_workers
else:
replicas_to_aggregate = FLAGS.replicas_to_aggregate
opt = tf.train.SyncReplicasOptimizer(
opt,
replicas_to_aggregate=replicas_to_aggregate,
total_num_replicas=num_workers,
name="mnist_sync_replicas")
train_step = opt.minimize(cross_entropy, global_step=global_step)
if FLAGS.sync_replicas:
local_init_op = opt.local_step_init_op
if is_chief:
local_init_op = opt.chief_init_op
ready_for_local_init_op = opt.ready_for_local_init_op
# Initial token and chief queue runners required by the sync_replicas mode
chief_queue_runner = opt.get_chief_queue_runner()
sync_init_op = opt.get_init_tokens_op()
init_op = tf.global_variables_initializer()
train_dir = tempfile.mkdtemp()
if FLAGS.sync_replicas:
sv = tf.train.Supervisor(
is_chief=is_chief,
logdir=train_dir,
init_op=init_op,
local_init_op=local_init_op,
ready_for_local_init_op=ready_for_local_init_op,
recovery_wait_secs=1,
global_step=global_step)
else:
sv = tf.train.Supervisor(
is_chief=is_chief,
logdir=train_dir,
init_op=init_op,
recovery_wait_secs=1,
global_step=global_step)
sess_config = tf.ConfigProto(
allow_soft_placement=True,
log_device_placement=False,
device_filters=["/job:ps", "/job:worker/task:%d" % FLAGS.task_index])
# The chief worker (task_index==0) session will prepare the session,
# while the remaining workers will wait for the preparation to complete.
if is_chief:
print("Worker %d: Initializing session..." % FLAGS.task_index)
else:
print("Worker %d: Waiting for session to be initialized..." %
FLAGS.task_index)
if FLAGS.existing_servers:
server_grpc_url = "grpc://" + worker_spec[FLAGS.task_index]
print("Using existing server at: %s" % server_grpc_url)
sess = sv.prepare_or_wait_for_session(server_grpc_url,
config=sess_config)
else:
sess = sv.prepare_or_wait_for_session(server.target, config=sess_config)
print("Worker %d: Session initialization complete." % FLAGS.task_index)
if FLAGS.sync_replicas and is_chief:
# Chief worker will start the chief queue runner and call the init op.
sess.run(sync_init_op)
sv.start_queue_runners(sess, [chief_queue_runner])
# Perform training
time_begin = time.time()
print("Training begins @ %f" % time_begin)
local_step = 0
while True:
# Training feed
batch_xs, batch_ys = mnist.train.next_batch(FLAGS.batch_size)
train_feed = {x: batch_xs, y_: batch_ys}
_, step = sess.run([train_step, global_step], feed_dict=train_feed)
local_step += 1
now = time.time()
print("%f: Worker %d: training step %d done (global step: %d)" %
(now, FLAGS.task_index, local_step, step))
if step >= FLAGS.train_steps:
break
time_end = time.time()
print("Training ends @ %f" % time_end)
training_time = time_end - time_begin
print("Training elapsed time: %f s" % training_time)
# Validation feed
val_feed = {x: mnist.validation.images, y_: mnist.validation.labels}
val_xent = sess.run(cross_entropy, feed_dict=val_feed)
print("After %d training step(s), validation cross entropy = %g" %
(FLAGS.train_steps, val_xent))
if __name__ == "__main__":
tf.app.run()
| apache-2.0 | -192,293,613,748,258,780 | 37.401434 | 81 | 0.650924 | false |
nephila/django-knocker | docs/conf.py | 1 | 8313 | # -*- coding: utf-8 -*-
#
# complexity documentation build configuration file, created by
# sphinx-quickstart on Tue Jul 9 22:26:36 2013.
#
# This file is execfile()d with the current directory set to its containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import os
import sys
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
parent = os.path.abspath(os.path.dirname(os.path.dirname(__file__)))
sys.path.insert(0, parent)
sys.path.insert(0, os.path.join(parent, 'tests'))
import cms_helper # isort:skip
import knocker # isort:skip
cms_helper.setup()
# -- General configuration -----------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be extensions
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
extensions = ['sphinx.ext.autodoc', 'sphinx.ext.viewcode']
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix of source filenames.
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = u'django-knocker'
copyright = u'2016, Iacopo Spalletti'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = knocker.__version__
# The full version, including alpha/beta/rc tags.
release = knocker.__version__
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['_build']
# The reST default role (used for this markup: `text`) to use for all documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# -- Options for HTML output ---------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'default'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
#html_static_path = ['_static']
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Output file base name for HTML help builder.
htmlhelp_basename = 'django-knockerdoc'
# -- Options for LaTeX output --------------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title, author, documentclass [howto/manual]).
latex_documents = [
('index', 'django-knocker.tex', u'django-knocker Documentation',
u'Iacopo Spalletti', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output --------------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
('index', 'django-knocker', u'django-knocker Documentation',
[u'Iacopo Spalletti'], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output ------------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
('index', 'django-knocker', u'django-knocker Documentation',
u'Iacopo Spalletti', 'django-knocker', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
| bsd-3-clause | 4,269,550,155,118,463,500 | 31.346304 | 80 | 0.707566 | false |
Romain-Geissler-1A/avro | lang/py/src/avro/protocol.py | 64 | 7963 | # Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Protocol implementation.
"""
try:
from hashlib import md5
except ImportError:
from md5 import md5
try:
import json
except ImportError:
import simplejson as json
from avro import schema
#
# Constants
#
# TODO(hammer): confirmed 'fixed' with Doug
VALID_TYPE_SCHEMA_TYPES = ('enum', 'record', 'error', 'fixed')
#
# Exceptions
#
class ProtocolParseException(schema.AvroException):
pass
#
# Base Classes
#
class Protocol(object):
"""An application protocol."""
def _parse_types(self, types, type_names):
type_objects = []
for type in types:
type_object = schema.make_avsc_object(type, type_names)
if type_object.type not in VALID_TYPE_SCHEMA_TYPES:
fail_msg = 'Type %s not an enum, fixed, record, or error.' % type
raise ProtocolParseException(fail_msg)
type_objects.append(type_object)
return type_objects
def _parse_messages(self, messages, names):
message_objects = {}
for name, body in messages.iteritems():
if message_objects.has_key(name):
fail_msg = 'Message name "%s" repeated.' % name
raise ProtocolParseException(fail_msg)
elif not(hasattr(body, 'get') and callable(body.get)):
fail_msg = 'Message name "%s" has non-object body %s.' % (name, body)
raise ProtocolParseException(fail_msg)
request = body.get('request')
response = body.get('response')
errors = body.get('errors')
message_objects[name] = Message(name, request, response, errors, names)
return message_objects
def __init__(self, name, namespace=None, types=None, messages=None):
# Ensure valid ctor args
if not name:
fail_msg = 'Protocols must have a non-empty name.'
raise ProtocolParseException(fail_msg)
elif not isinstance(name, basestring):
fail_msg = 'The name property must be a string.'
raise ProtocolParseException(fail_msg)
elif namespace is not None and not isinstance(namespace, basestring):
fail_msg = 'The namespace property must be a string.'
raise ProtocolParseException(fail_msg)
elif types is not None and not isinstance(types, list):
fail_msg = 'The types property must be a list.'
raise ProtocolParseException(fail_msg)
elif (messages is not None and
not(hasattr(messages, 'get') and callable(messages.get))):
fail_msg = 'The messages property must be a JSON object.'
raise ProtocolParseException(fail_msg)
self._props = {}
self.set_prop('name', name)
type_names = schema.Names()
if namespace is not None:
self.set_prop('namespace', namespace)
type_names.default_namespace = namespace
if types is not None:
self.set_prop('types', self._parse_types(types, type_names))
if messages is not None:
self.set_prop('messages', self._parse_messages(messages, type_names))
self._md5 = md5(str(self)).digest()
# read-only properties
name = property(lambda self: self.get_prop('name'))
namespace = property(lambda self: self.get_prop('namespace'))
fullname = property(lambda self:
schema.Name(self.name, self.namespace).fullname)
types = property(lambda self: self.get_prop('types'))
types_dict = property(lambda self: dict([(type.name, type)
for type in self.types]))
messages = property(lambda self: self.get_prop('messages'))
md5 = property(lambda self: self._md5)
props = property(lambda self: self._props)
# utility functions to manipulate properties dict
def get_prop(self, key):
return self.props.get(key)
def set_prop(self, key, value):
self.props[key] = value
def to_json(self):
to_dump = {}
to_dump['protocol'] = self.name
names = schema.Names(default_namespace=self.namespace)
if self.namespace:
to_dump['namespace'] = self.namespace
if self.types:
to_dump['types'] = [ t.to_json(names) for t in self.types ]
if self.messages:
messages_dict = {}
for name, body in self.messages.iteritems():
messages_dict[name] = body.to_json(names)
to_dump['messages'] = messages_dict
return to_dump
def __str__(self):
return json.dumps(self.to_json())
def __eq__(self, that):
to_cmp = json.loads(str(self))
return to_cmp == json.loads(str(that))
class Message(object):
"""A Protocol message."""
def _parse_request(self, request, names):
if not isinstance(request, list):
fail_msg = 'Request property not a list: %s' % request
raise ProtocolParseException(fail_msg)
return schema.RecordSchema(None, None, request, names, 'request')
def _parse_response(self, response, names):
if isinstance(response, basestring) and names.has_name(response, None):
return names.get_name(response, None)
else:
return schema.make_avsc_object(response, names)
def _parse_errors(self, errors, names):
if not isinstance(errors, list):
fail_msg = 'Errors property not a list: %s' % errors
raise ProtocolParseException(fail_msg)
errors_for_parsing = {'type': 'error_union', 'declared_errors': errors}
return schema.make_avsc_object(errors_for_parsing, names)
def __init__(self, name, request, response, errors=None, names=None):
self._name = name
self._props = {}
self.set_prop('request', self._parse_request(request, names))
self.set_prop('response', self._parse_response(response, names))
if errors is not None:
self.set_prop('errors', self._parse_errors(errors, names))
# read-only properties
name = property(lambda self: self._name)
request = property(lambda self: self.get_prop('request'))
response = property(lambda self: self.get_prop('response'))
errors = property(lambda self: self.get_prop('errors'))
props = property(lambda self: self._props)
# utility functions to manipulate properties dict
def get_prop(self, key):
return self.props.get(key)
def set_prop(self, key, value):
self.props[key] = value
def __str__(self):
return json.dumps(self.to_json())
def to_json(self, names=None):
if names is None:
names = schema.Names()
to_dump = {}
to_dump['request'] = self.request.to_json(names)
to_dump['response'] = self.response.to_json(names)
if self.errors:
to_dump['errors'] = self.errors.to_json(names)
return to_dump
def __eq__(self, that):
return self.name == that.name and self.props == that.props
def make_avpr_object(json_data):
"""Build Avro Protocol from data parsed out of JSON string."""
if hasattr(json_data, 'get') and callable(json_data.get):
name = json_data.get('protocol')
namespace = json_data.get('namespace')
types = json_data.get('types')
messages = json_data.get('messages')
return Protocol(name, namespace, types, messages)
else:
raise ProtocolParseException('Not a JSON object: %s' % json_data)
def parse(json_string):
"""Constructs the Protocol from the JSON text."""
try:
json_data = json.loads(json_string)
except:
raise ProtocolParseException('Error parsing JSON: %s' % json_string)
# construct the Avro Protocol object
return make_avpr_object(json_data)
| apache-2.0 | -3,720,925,151,094,978,600 | 34.549107 | 77 | 0.676127 | false |
gtrensch/nest-simulator | pynest/nest/tests/test_facetshw_stdp.py | 20 | 5894 | # -*- coding: utf-8 -*-
#
# test_facetshw_stdp.py
#
# This file is part of NEST.
#
# Copyright (C) 2004 The NEST Initiative
#
# NEST is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 2 of the License, or
# (at your option) any later version.
#
# NEST is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with NEST. If not, see <http://www.gnu.org/licenses/>.
import nest
import numpy as np
import unittest
class FacetsTestCase(unittest.TestCase):
"""
This script is testing the accumulation of spike pairs and
the weight update mechanism as implemented in the FACETS hardware.
Author: Thomas Pfeil
Date of first version: 21.01.2013
"""
def test_facetshw_stdp(self):
nest.ResetKernel()
modelName = 'stdp_facetshw_synapse_hom'
# homogeneous parameters for all synapses
Wmax = 100.0
# see *.cpp file of synapse model and Pfeil et al. 2012 for LUT
# configuration
lut_0 = [2, 3, 4, 4, 5, 6, 7, 8, 9, 10, 11, 12, 13, 14, 14, 15]
lut_1 = [0, 0, 1, 2, 3, 4, 5, 6, 7, 8, 9, 10, 10, 11, 12, 13]
lut_2 = range(16) # identity
config_0 = [0, 0, 1, 0]
config_1 = [0, 1, 0, 0]
reset_pattern = 6 * [1] # reset all
# individual parameters for each synapse
# reached every 36 runs (e^(-10/20) = 21.83510375)
lut_th_causal = 21.835
lut_th_acausal = lut_th_causal
# other parameters
startWeight = 0 # as digital value [0, 1, ..., 15]
tau = 20.0
timeBetweenPairs = 100.0
# frequency_of_pairs = 10Hz => delta_t(+) = 10ms, delta_t(-) = 90ms
delay = 5.0
spikesIn = np.arange(10.0, 60000.0, timeBetweenPairs)
synapseDict = {'tau_plus': tau,
'tau_minus_stdp': tau,
'Wmax': Wmax,
'synapses_per_driver': 50,
'driver_readout_time': 15.0,
'lookuptable_0': lut_0,
'lookuptable_1': lut_1,
'lookuptable_2': lut_2,
'configbit_0': config_0,
'configbit_1': config_1,
'reset_pattern': reset_pattern,
'a_thresh_th': lut_th_causal,
'a_thresh_tl': lut_th_acausal}
# build network
stim = nest.Create('spike_generator')
neuronA = nest.Create('parrot_neuron')
neuronB = nest.Create('parrot_neuron')
nest.SetStatus(stim, [{'spike_times': spikesIn}])
nest.SetDefaults(modelName, synapseDict)
# check if GetDefaults returns same values as have been set
synapseDictGet = nest.GetDefaults(modelName)
for key in synapseDict.keys():
self.assertTrue(
all(np.atleast_1d(synapseDictGet[key] == synapseDict[key])))
nest.Connect(stim, neuronA)
nest.Connect(neuronA, neuronB, syn_spec={
'weight': float(startWeight) / 15.0 * Wmax,
'delay': delay, 'synapse_model': modelName})
nest.Simulate(50.0)
weightTrace = []
for run in range(len(spikesIn)):
nest.Simulate(timeBetweenPairs)
connections = nest.GetConnections(neuronA)
if (connections.get('synapse_model') == modelName):
weightTrace.append(
[run, connections.get('weight'),
connections.get('a_causal'),
connections.get('a_acausal')])
# analysis
weightTrace = np.array(weightTrace)
# just before theoretical updates
weightTraceMod36pre = weightTrace[35::36]
# just after theoretical updates
weightTraceMod36 = weightTrace[::36]
weightIndex = int(startWeight)
for i in range(len(weightTraceMod36pre)):
# check weight value before update
# (after spike pair with index 35, 71, ...)
self.assertTrue(np.allclose(weightTraceMod36pre[i][1],
1.0 / 15.0 * weightIndex * Wmax,
atol=1e-6))
weightIndex = lut_0[weightIndex]
weightIndex = int(startWeight)
for i in range(len(weightTraceMod36)):
# check weight value after update
# (after spike pair with index 0, 36, 72, ...)
self.assertTrue(np.allclose(weightTraceMod36[i][1],
1.0 / 15.0 * weightIndex * Wmax,
atol=1e-6))
# check charge on causal capacitor
self.assertTrue(np.allclose(weightTraceMod36[i][2],
np.ones_like(weightTraceMod36[i][2]) *
np.exp(-2 * delay / tau), atol=1e-6))
weightIndex = lut_0[weightIndex]
# check charge on anti-causal capacitor after each pair
for i in range(len(weightTrace) - 1):
# TODO: global params
self.assertTrue(np.allclose(weightTrace[i, 3], ((i % 36) + 1) *
np.exp(-(timeBetweenPairs -
2 * delay) / tau),
atol=1e-6))
def suite():
suite = unittest.makeSuite(FacetsTestCase, 'test')
return suite
def run():
runner = unittest.TextTestRunner(verbosity=2)
runner.run(suite())
if __name__ == "__main__":
run()
| gpl-2.0 | 1,944,758,697,676,592,000 | 34.939024 | 78 | 0.54564 | false |
bixbydev/Bixby | google/dist/gdata-2.0.18/tests/gdata_tests/blogger/live_client_test.py | 39 | 5831 | #!/usr/bin/env python
#
# Copyright (C) 2009 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# This module is used for version 2 of the Google Data APIs.
# These tests attempt to connect to Google servers.
__author__ = '[email protected] (Jeff Scudder)'
import unittest
import gdata.blogger.client
import gdata.blogger.data
import gdata.gauth
import gdata.client
import atom.http_core
import atom.mock_http_core
import atom.core
import gdata.data
import gdata.test_config as conf
conf.options.register_option(conf.BLOG_ID_OPTION)
class BloggerClientTest(unittest.TestCase):
def setUp(self):
self.client = None
if conf.options.get_value('runlive') == 'true':
self.client = gdata.blogger.client.BloggerClient()
conf.configure_client(self.client, 'BloggerTest', 'blogger')
def tearDown(self):
conf.close_client(self.client)
def test_create_update_delete(self):
if not conf.options.get_value('runlive') == 'true':
return
# Either load the recording or prepare to make a live request.
conf.configure_cache(self.client, 'test_create_update_delete')
# Add a blog post.
created = self.client.add_post(conf.options.get_value('blogid'),
'test post from BloggerClientTest',
'Hey look, another test!',
labels=['test', 'python'])
self.assertEqual(created.title.text, 'test post from BloggerClientTest')
self.assertEqual(created.content.text, 'Hey look, another test!')
self.assertEqual(len(created.category), 2)
self.assert_(created.control is None)
# Change the title of the blog post we just added.
created.title.text = 'Edited'
updated = self.client.update(created)
self.assertEqual(updated.title.text, 'Edited')
self.assert_(isinstance(updated, gdata.blogger.data.BlogPost))
self.assertEqual(updated.content.text, created.content.text)
# Delete the test entry from the blog.
self.client.delete(updated)
def test_create_draft_post(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_create_draft_post')
# Add a draft blog post.
created = self.client.add_post(conf.options.get_value('blogid'),
'draft test post from BloggerClientTest',
'This should only be a draft.',
labels=['test2', 'python'], draft=True)
self.assertEqual(created.title.text,
'draft test post from BloggerClientTest')
self.assertEqual(created.content.text, 'This should only be a draft.')
self.assertEqual(len(created.category), 2)
self.assert_(created.control is not None)
self.assert_(created.control.draft is not None)
self.assertEqual(created.control.draft.text, 'yes')
# Publish the blog post.
created.control.draft.text = 'no'
updated = self.client.update(created)
if updated.control is not None and updated.control.draft is not None:
self.assertNotEqual(updated.control.draft.text, 'yes')
# Delete the test entry from the blog using the URL instead of the entry.
self.client.delete(updated.find_edit_link())
def test_create_draft_page(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_create_draft_page')
# List all pages on the blog.
pages_before = self.client.get_pages(conf.options.get_value('blogid'))
# Add a draft page to blog.
created = self.client.add_page(conf.options.get_value('blogid'),
'draft page from BloggerClientTest',
'draft content',
draft=True)
self.assertEqual(created.title.text, 'draft page from BloggerClientTest')
self.assertEqual(created.content.text, 'draft content')
self.assert_(created.control is not None)
self.assert_(created.control.draft is not None)
self.assertEqual(created.control.draft.text, 'yes')
self.assertEqual(str(int(created.get_page_id())), created.get_page_id())
# List all pages after adding one.
pages_after = self.client.get_pages(conf.options.get_value('blogid'))
self.assertEqual(len(pages_before.entry) + 1, len(pages_after.entry))
# Publish page.
created.control.draft.text = 'no'
updated = self.client.update(created)
if updated.control is not None and updated.control.draft is not None:
self.assertNotEqual(updated.control.draft.text, 'yes')
# Delete test page.
self.client.delete(updated.find_edit_link())
pages_after = self.client.get_pages(conf.options.get_value('blogid'))
self.assertEqual(len(pages_before.entry), len(pages_after.entry))
def test_retrieve_post_with_categories(self):
if not conf.options.get_value('runlive') == 'true':
return
conf.configure_cache(self.client, 'test_retrieve_post_with_categories')
query = gdata.blogger.client.Query(categories=["news"], strict=True)
posts = self.client.get_posts(conf.options.get_value('blogid'), query=query)
def suite():
return conf.build_suite([BloggerClientTest])
if __name__ == '__main__':
unittest.TextTestRunner().run(suite())
| gpl-3.0 | 3,635,495,686,903,031,300 | 35.44375 | 80 | 0.673469 | false |
sdoran35/hate-to-hugs | venv/lib/python3.6/site-packages/pip/_vendor/appdirs.py | 327 | 22368 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (c) 2005-2010 ActiveState Software Inc.
# Copyright (c) 2013 Eddy Petrișor
"""Utilities for determining application-specific dirs.
See <http://github.com/ActiveState/appdirs> for details and usage.
"""
# Dev Notes:
# - MSDN on where to store app data files:
# http://support.microsoft.com/default.aspx?scid=kb;en-us;310294#XSLTH3194121123120121120120
# - macOS: http://developer.apple.com/documentation/MacOSX/Conceptual/BPFileSystem/index.html
# - XDG spec for Un*x: http://standards.freedesktop.org/basedir-spec/basedir-spec-latest.html
__version_info__ = (1, 4, 0)
__version__ = '.'.join(map(str, __version_info__))
import sys
import os
PY3 = sys.version_info[0] == 3
if PY3:
unicode = str
if sys.platform.startswith('java'):
import platform
os_name = platform.java_ver()[3][0]
if os_name.startswith('Windows'): # "Windows XP", "Windows 7", etc.
system = 'win32'
elif os_name.startswith('Mac'): # "macOS", etc.
system = 'darwin'
else: # "Linux", "SunOS", "FreeBSD", etc.
# Setting this to "linux2" is not ideal, but only Windows or Mac
# are actually checked for and the rest of the module expects
# *sys.platform* style strings.
system = 'linux2'
else:
system = sys.platform
def user_data_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
macOS: ~/Library/Application Support/<AppName>
Unix: ~/.local/share/<AppName> # or in $XDG_DATA_HOME, if defined
Win XP (not roaming): C:\Documents and Settings\<username>\Application Data\<AppAuthor>\<AppName>
Win XP (roaming): C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>
Win 7 (not roaming): C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>
Win 7 (roaming): C:\Users\<username>\AppData\Roaming\<AppAuthor>\<AppName>
For Unix, we follow the XDG spec and support $XDG_DATA_HOME.
That means, by default "~/.local/share/<AppName>".
"""
if system == "win32":
if appauthor is None:
appauthor = appname
const = roaming and "CSIDL_APPDATA" or "CSIDL_LOCAL_APPDATA"
path = os.path.normpath(_get_win_folder(const))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('~/Library/Application Support/')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_DATA_HOME', os.path.expanduser("~/.local/share"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_data_dir(appname=None, appauthor=None, version=None, multipath=False):
"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of data dirs should be
returned. By default, the first item from XDG_DATA_DIRS is
returned, or '/usr/local/share/<AppName>',
if XDG_DATA_DIRS is not set
Typical user data directories are:
macOS: /Library/Application Support/<AppName>
Unix: /usr/local/share/<AppName> or /usr/share/<AppName>
Win XP: C:\Documents and Settings\All Users\Application Data\<AppAuthor>\<AppName>
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
Win 7: C:\ProgramData\<AppAuthor>\<AppName> # Hidden, but writeable on Win 7.
For Unix, this is using the $XDG_DATA_DIRS[0] default.
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_COMMON_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
elif system == 'darwin':
path = os.path.expanduser('/Library/Application Support')
if appname:
path = os.path.join(path, appname)
else:
# XDG default for $XDG_DATA_DIRS
# only first, if multipath is False
path = os.getenv('XDG_DATA_DIRS',
os.pathsep.join(['/usr/local/share', '/usr/share']))
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
if appname and version:
path = os.path.join(path, version)
return path
def user_config_dir(appname=None, appauthor=None, version=None, roaming=False):
r"""Return full path to the user-specific config dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"roaming" (boolean, default False) can be set True to use the Windows
roaming appdata directory. That means that for users on a Windows
network setup for roaming profiles, this user data will be
sync'd on login. See
<http://technet.microsoft.com/en-us/library/cc766489(WS.10).aspx>
for a discussion of issues.
Typical user data directories are:
macOS: same as user_data_dir
Unix: ~/.config/<AppName> # or in $XDG_CONFIG_HOME, if defined
Win *: same as user_data_dir
For Unix, we follow the XDG spec and support $XDG_CONFIG_HOME.
That means, by deafult "~/.config/<AppName>".
"""
if system in ["win32", "darwin"]:
path = user_data_dir(appname, appauthor, None, roaming)
else:
path = os.getenv('XDG_CONFIG_HOME', os.path.expanduser("~/.config"))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def site_config_dir(appname=None, appauthor=None, version=None, multipath=False):
"""Return full path to the user-shared data dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"multipath" is an optional parameter only applicable to *nix
which indicates that the entire list of config dirs should be
returned. By default, the first item from XDG_CONFIG_DIRS is
returned, or '/etc/xdg/<AppName>', if XDG_CONFIG_DIRS is not set
Typical user data directories are:
macOS: same as site_data_dir
Unix: /etc/xdg/<AppName> or $XDG_CONFIG_DIRS[i]/<AppName> for each value in
$XDG_CONFIG_DIRS
Win *: same as site_data_dir
Vista: (Fail! "C:\ProgramData" is a hidden *system* directory on Vista.)
For Unix, this is using the $XDG_CONFIG_DIRS[0] default, if multipath=False
WARNING: Do not use this on Windows. See the Vista-Fail note above for why.
"""
if system in ["win32", "darwin"]:
path = site_data_dir(appname, appauthor)
if appname and version:
path = os.path.join(path, version)
else:
# XDG default for $XDG_CONFIG_DIRS
# only first, if multipath is False
path = os.getenv('XDG_CONFIG_DIRS', '/etc/xdg')
pathlist = [os.path.expanduser(x.rstrip(os.sep)) for x in path.split(os.pathsep)]
if appname:
if version:
appname = os.path.join(appname, version)
pathlist = [os.sep.join([x, appname]) for x in pathlist]
if multipath:
path = os.pathsep.join(pathlist)
else:
path = pathlist[0]
return path
def user_cache_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific cache dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Cache" to the base app data dir for Windows. See
discussion below.
Typical user cache directories are:
macOS: ~/Library/Caches/<AppName>
Unix: ~/.cache/<AppName> (XDG default)
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Cache
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Cache
On Windows the only suggestion in the MSDN docs is that local settings go in
the `CSIDL_LOCAL_APPDATA` directory. This is identical to the non-roaming
app data dir (the default returned by `user_data_dir` above). Apps typically
put cache data somewhere *under* the given dir here. Some examples:
...\Mozilla\Firefox\Profiles\<ProfileName>\Cache
...\Acme\SuperApp\Cache\1.0
OPINION: This function appends "Cache" to the `CSIDL_LOCAL_APPDATA` value.
This can be disabled with the `opinion=False` option.
"""
if system == "win32":
if appauthor is None:
appauthor = appname
path = os.path.normpath(_get_win_folder("CSIDL_LOCAL_APPDATA"))
if appname:
if appauthor is not False:
path = os.path.join(path, appauthor, appname)
else:
path = os.path.join(path, appname)
if opinion:
path = os.path.join(path, "Cache")
elif system == 'darwin':
path = os.path.expanduser('~/Library/Caches')
if appname:
path = os.path.join(path, appname)
else:
path = os.getenv('XDG_CACHE_HOME', os.path.expanduser('~/.cache'))
if appname:
path = os.path.join(path, appname)
if appname and version:
path = os.path.join(path, version)
return path
def user_log_dir(appname=None, appauthor=None, version=None, opinion=True):
r"""Return full path to the user-specific log dir for this application.
"appname" is the name of application.
If None, just the system directory is returned.
"appauthor" (only used on Windows) is the name of the
appauthor or distributing body for this application. Typically
it is the owning company name. This falls back to appname. You may
pass False to disable it.
"version" is an optional version path element to append to the
path. You might want to use this if you want multiple versions
of your app to be able to run independently. If used, this
would typically be "<major>.<minor>".
Only applied when appname is present.
"opinion" (boolean) can be False to disable the appending of
"Logs" to the base app data dir for Windows, and "log" to the
base cache dir for Unix. See discussion below.
Typical user cache directories are:
macOS: ~/Library/Logs/<AppName>
Unix: ~/.cache/<AppName>/log # or under $XDG_CACHE_HOME if defined
Win XP: C:\Documents and Settings\<username>\Local Settings\Application Data\<AppAuthor>\<AppName>\Logs
Vista: C:\Users\<username>\AppData\Local\<AppAuthor>\<AppName>\Logs
On Windows the only suggestion in the MSDN docs is that local settings
go in the `CSIDL_LOCAL_APPDATA` directory. (Note: I'm interested in
examples of what some windows apps use for a logs dir.)
OPINION: This function appends "Logs" to the `CSIDL_LOCAL_APPDATA`
value for Windows and appends "log" to the user cache dir for Unix.
This can be disabled with the `opinion=False` option.
"""
if system == "darwin":
path = os.path.join(
os.path.expanduser('~/Library/Logs'),
appname)
elif system == "win32":
path = user_data_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "Logs")
else:
path = user_cache_dir(appname, appauthor, version)
version = False
if opinion:
path = os.path.join(path, "log")
if appname and version:
path = os.path.join(path, version)
return path
class AppDirs(object):
"""Convenience wrapper for getting application dirs."""
def __init__(self, appname, appauthor=None, version=None, roaming=False,
multipath=False):
self.appname = appname
self.appauthor = appauthor
self.version = version
self.roaming = roaming
self.multipath = multipath
@property
def user_data_dir(self):
return user_data_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_data_dir(self):
return site_data_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_config_dir(self):
return user_config_dir(self.appname, self.appauthor,
version=self.version, roaming=self.roaming)
@property
def site_config_dir(self):
return site_config_dir(self.appname, self.appauthor,
version=self.version, multipath=self.multipath)
@property
def user_cache_dir(self):
return user_cache_dir(self.appname, self.appauthor,
version=self.version)
@property
def user_log_dir(self):
return user_log_dir(self.appname, self.appauthor,
version=self.version)
#---- internal support stuff
def _get_win_folder_from_registry(csidl_name):
"""This is a fallback technique at best. I'm not sure if using the
registry for this guarantees us the correct answer for all CSIDL_*
names.
"""
import _winreg
shell_folder_name = {
"CSIDL_APPDATA": "AppData",
"CSIDL_COMMON_APPDATA": "Common AppData",
"CSIDL_LOCAL_APPDATA": "Local AppData",
}[csidl_name]
key = _winreg.OpenKey(
_winreg.HKEY_CURRENT_USER,
r"Software\Microsoft\Windows\CurrentVersion\Explorer\Shell Folders"
)
dir, type = _winreg.QueryValueEx(key, shell_folder_name)
return dir
def _get_win_folder_with_pywin32(csidl_name):
from win32com.shell import shellcon, shell
dir = shell.SHGetFolderPath(0, getattr(shellcon, csidl_name), 0, 0)
# Try to make this a unicode path because SHGetFolderPath does
# not return unicode strings when there is unicode data in the
# path.
try:
dir = unicode(dir)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
try:
import win32api
dir = win32api.GetShortPathName(dir)
except ImportError:
pass
except UnicodeError:
pass
return dir
def _get_win_folder_with_ctypes(csidl_name):
import ctypes
csidl_const = {
"CSIDL_APPDATA": 26,
"CSIDL_COMMON_APPDATA": 35,
"CSIDL_LOCAL_APPDATA": 28,
}[csidl_name]
buf = ctypes.create_unicode_buffer(1024)
ctypes.windll.shell32.SHGetFolderPathW(None, csidl_const, None, 0, buf)
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in buf:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf2 = ctypes.create_unicode_buffer(1024)
if ctypes.windll.kernel32.GetShortPathNameW(buf.value, buf2, 1024):
buf = buf2
return buf.value
def _get_win_folder_with_jna(csidl_name):
import array
from com.sun import jna
from com.sun.jna.platform import win32
buf_size = win32.WinDef.MAX_PATH * 2
buf = array.zeros('c', buf_size)
shell = win32.Shell32.INSTANCE
shell.SHGetFolderPath(None, getattr(win32.ShlObj, csidl_name), None, win32.ShlObj.SHGFP_TYPE_CURRENT, buf)
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
# Downgrade to short path name if have highbit chars. See
# <http://bugs.activestate.com/show_bug.cgi?id=85099>.
has_high_char = False
for c in dir:
if ord(c) > 255:
has_high_char = True
break
if has_high_char:
buf = array.zeros('c', buf_size)
kernel = win32.Kernel32.INSTANCE
if kernal.GetShortPathName(dir, buf, buf_size):
dir = jna.Native.toString(buf.tostring()).rstrip("\0")
return dir
if system == "win32":
try:
import win32com.shell
_get_win_folder = _get_win_folder_with_pywin32
except ImportError:
try:
from ctypes import windll
_get_win_folder = _get_win_folder_with_ctypes
except ImportError:
try:
import com.sun.jna
_get_win_folder = _get_win_folder_with_jna
except ImportError:
_get_win_folder = _get_win_folder_from_registry
#---- self test code
if __name__ == "__main__":
appname = "MyApp"
appauthor = "MyCompany"
props = ("user_data_dir", "site_data_dir",
"user_config_dir", "site_config_dir",
"user_cache_dir", "user_log_dir")
print("-- app dirs (with optional 'version')")
dirs = AppDirs(appname, appauthor, version="1.0")
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'version')")
dirs = AppDirs(appname, appauthor)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (without optional 'appauthor')")
dirs = AppDirs(appname)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
print("\n-- app dirs (with disabled 'appauthor')")
dirs = AppDirs(appname, appauthor=False)
for prop in props:
print("%s: %s" % (prop, getattr(dirs, prop)))
| mit | 5,838,502,434,004,336,000 | 39.519928 | 122 | 0.617562 | false |
bingopodcast/bingos | bingo_emulator/graphics/super_7.py | 1 | 58578 |
import pygame
import random
pygame.display.set_caption("Multi Bingo")
screen = pygame.display.set_mode((0,0))
screen.fill([0,0,0])
pygame.mouse.set_visible(False)
meter = pygame.image.load('graphics/assets/black_register_cover.png').convert()
odds = pygame.image.load('super_7/assets/odds.png').convert_alpha()
eb = pygame.image.load('super_7/assets/eb.png').convert_alpha()
eb_number = pygame.image.load('super_7/assets/eb_number.png').convert_alpha()
extra_balls = pygame.image.load('super_7/assets/extra_balls.png').convert_alpha()
time = pygame.image.load('super_7/assets/time.png').convert_alpha()
ml_letter = pygame.image.load('super_7/assets/ml_letter.png').convert_alpha()
ml_arrow = pygame.image.load('super_7/assets/ml_arrow.png').convert_alpha()
ml_a = pygame.image.load('super_7/assets/ml_a.png').convert_alpha()
ml_b = pygame.image.load('super_7/assets/ml_b.png').convert_alpha()
ml_c = pygame.image.load('super_7/assets/ml_c.png').convert_alpha()
select_now = pygame.image.load('super_7/assets/select_now.png').convert_alpha()
tilt = pygame.image.load('super_7/assets/tilt.png').convert_alpha()
button = pygame.image.load('super_7/assets/pap.png').convert_alpha()
red_double = pygame.image.load('super_7/assets/red_double.png').convert_alpha()
green_double = pygame.image.load('super_7/assets/green_double.png').convert_alpha()
yellow_double = pygame.image.load('super_7/assets/yellow_double.png').convert_alpha()
blue_double = pygame.image.load('super_7/assets/blue_double.png').convert_alpha()
four_stars = pygame.image.load('super_7/assets/four_stars.png').convert_alpha()
six_stars = pygame.image.load('super_7/assets/six_stars.png').convert_alpha()
three_stars = pygame.image.load('super_7/assets/three_stars.png').convert_alpha()
three_red = pygame.image.load('super_7/assets/three_red.png').convert_alpha()
two_red = pygame.image.load('super_7/assets/two_red.png').convert_alpha()
red_letter = pygame.image.load('super_7/assets/red_letter.png').convert_alpha()
letter1 = pygame.image.load('super_7/assets/letter1.png').convert_alpha()
letter2 = pygame.image.load('super_7/assets/letter2.png').convert_alpha()
letter3 = pygame.image.load('super_7/assets/letter3.png').convert_alpha()
letter4 = pygame.image.load('super_7/assets/letter4.png').convert_alpha()
letter5 = pygame.image.load('super_7/assets/letter5.png').convert_alpha()
letter6 = pygame.image.load('super_7/assets/letter6.png').convert_alpha()
red_letter1 = pygame.image.load('super_7/assets/red_letter1.png').convert_alpha()
red_letter2 = pygame.image.load('super_7/assets/red_letter2.png').convert_alpha()
red_letter3 = pygame.image.load('super_7/assets/red_letter3.png').convert_alpha()
red_letter4 = pygame.image.load('super_7/assets/red_letter4.png').convert_alpha()
red_letter5 = pygame.image.load('super_7/assets/red_letter5.png').convert_alpha()
red_letter6 = pygame.image.load('super_7/assets/red_letter6.png').convert_alpha()
number_card = pygame.image.load('super_7/assets/number_card.png').convert_alpha()
number = pygame.image.load('super_7/assets/number.png').convert_alpha()
columnb1 = pygame.image.load('super_7/assets/columnb1.png').convert_alpha()
columnb2 = pygame.image.load('super_7/assets/columnb2.png').convert_alpha()
columna = pygame.image.load('super_7/assets/columna.png').convert_alpha()
columnc1 = pygame.image.load('super_7/assets/columnc1.png').convert_alpha()
columnc2 = pygame.image.load('super_7/assets/columnc2.png').convert_alpha()
double_triple = pygame.image.load('super_7/assets/double_triple.png').convert_alpha()
collected = pygame.image.load('super_7/assets/collected.png').convert_alpha()
special_odds = pygame.image.load('super_7/assets/special_odds.png').convert_alpha()
twin_number = pygame.image.load('super_7/assets/twin_number.png').convert_alpha()
seven_odds = pygame.image.load('super_7/assets/seven_odds.png').convert_alpha()
diamond = pygame.image.load('super_7/assets/diamond.png').convert_alpha()
diamond_7 = pygame.image.load('super_7/assets/diamond_7.png').convert_alpha()
ball = pygame.image.load('super_7/assets/ball.png').convert_alpha()
bg_menu = pygame.image.load('super_7/assets/super_7_menu.png').convert_alpha()
bg_gi = pygame.image.load('super_7/assets/super_7_gi.png').convert_alpha()
bg_off = pygame.image.load('super_7/assets/super_7_off.png').convert_alpha()
class scorereel():
""" Score Reels are used to count replays """
def __init__(self, pos, image):
self.position = pos
self.default_y = self.position[1]
self.image = pygame.image.load(image).convert()
reel1 = scorereel([110,797], "graphics/assets/white_reel.png")
reel10 = scorereel([91,797], "graphics/assets/white_reel.png")
reel100 = scorereel([72,797], "graphics/assets/white_reel.png")
reel1000 = scorereel([53,797], "graphics/assets/white_reel.png")
def display(s, replays=0, menu=False):
meter.set_colorkey((255,0,252))
meter_position = [44,797]
screen.blit(reel1.image, reel1.position)
screen.blit(reel10.image, reel10.position)
screen.blit(reel100.image, reel100.position)
screen.blit(reel1000.image, reel1000.position)
screen.blit(meter, meter_position)
if s.game.line2.position == 0:
p = [233,368]
screen.blit(columnb1, p)
p = [284,369]
screen.blit(columnb2, p)
else:
p = [233,368]
screen.blit(columnb2, p)
p = [284,369]
screen.blit(columnb1, p)
if s.game.line1.position == 0 or s.game.line1.position == 2:
p = [337,318]
screen.blit(columna, p)
elif s.game.line1.position == 1:
p = [337,368]
screen.blit(columna, p)
else:
p = [337,269]
screen.blit(columna, p)
if s.game.line3.position == 0:
p = [389,368]
screen.blit(columnc1, p)
p = [440,369]
screen.blit(columnc2, p)
else:
p = [389,368]
screen.blit(columnc2, p)
p = [440,369]
screen.blit(columnc1, p)
nc_p = [230,368]
screen.blit(number_card, nc_p)
backglass_position = [0, 0]
backglass = pygame.Surface(screen.get_size(), flags=pygame.SRCALPHA)
backglass.fill((0, 0, 0))
if menu == True:
screen.blit(bg_menu, backglass_position)
else:
if (s.game.anti_cheat.status == True):
screen.blit(bg_gi, backglass_position)
else:
screen.blit(bg_off, backglass_position)
if s.game.eb_play.status == True:
eb_position = [41,1040]
screen.blit(extra_balls, eb_position)
if s.game.extra_ball.position >= 1:
eb_position = [150,1040]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 2:
eb_position = [201,1040]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 3:
eb_position = [262,1040]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 4:
eb_position = [323,1040]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 5:
eb_position = [374,1040]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 6:
eb_position = [436,1040]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 7:
eb_position = [498,1040]
screen.blit(eb_number, eb_position)
if s.game.extra_ball.position >= 8:
eb_position = [548,1040]
screen.blit(eb, eb_position)
if s.game.extra_ball.position >= 9:
eb_position = [610,1040]
screen.blit(eb, eb_position)
if s.game.red_star.status == True:
rs_position = [18,460]
screen.blit(time, rs_position)
if s.game.yellow_star.status == True:
rs_position = [18,504]
screen.blit(time, rs_position)
if s.game.mystic_lines.position >= 4 or s.game.two_red_letter.status == True or s.game.three_red_letter.status == True:
if s.game.selection_feature.position < 7:
bfp = [18,548]
screen.blit(time, bfp)
elif s.game.selection_feature.position in [7,8]:
bfp = [19,416]
screen.blit(time, bfp)
elif s.game.selection_feature.position == 9:
bfp = [18,372]
screen.blit(time, bfp)
if s.game.ball_count.position < 1:
if s.game.odds_only.status == True:
b = [18,874]
screen.blit(button, b)
elif s.game.features.status == True:
b = [18,912]
screen.blit(button, b)
elif s.game.special.status == True:
b = [18,989]
screen.blit(button, b)
else:
b = [18,950]
screen.blit(button, b)
if s.game.mystic_lines.position == 1:
p = [203,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position == 2:
p = [236,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position == 3:
p = [267,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position >= 4:
p = [300,683]
screen.blit(ml_a, p)
p = [335,591]
screen.blit(ml_letter, p)
if s.game.mystic_lines.position == 5:
p = [334,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position == 6:
p = [360,681]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position >= 7:
p = [396,682]
screen.blit(ml_b, p)
p = [262,591]
screen.blit(ml_letter, p)
if s.game.mystic_lines.position == 8:
p = [430,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position == 9:
p = [459,680]
screen.blit(ml_arrow, p)
if s.game.mystic_lines.position == 10:
p = [492,682]
screen.blit(ml_c, p)
p = [410,591]
screen.blit(ml_letter, p)
if s.game.mystic_lines.position >= 4:
t = 3
if s.game.selection_feature.position in [7,8]:
t = 4
if s.game.selection_feature.position == 9:
t = 5
if s.game.ball_count.position == t:
s.cancel_delayed(name="blink")
blink([s,1,1])
else:
s.cancel_delayed(name="blink")
if s.game.tilt.status == False:
if s.holes:
if 1 in s.holes:
if s.game.line2.position == 0:
p = [284,526]
screen.blit(number, p)
else:
p = [234,529]
screen.blit(number, p)
if 2 in s.holes:
if s.game.line2.position == 0:
p = [282,377]
screen.blit(number, p)
else:
p = [232,378]
screen.blit(number, p)
if 3 in s.holes:
if s.game.line2.position == 0:
p = [232,427]
screen.blit(number, p)
else:
p = [282,426]
screen.blit(number, p)
if 4 in s.holes:
if s.game.line3.position == 0:
p = [387,378]
screen.blit(number, p)
else:
p = [440,378]
screen.blit(number, p)
if 5 in s.holes:
if s.game.line1.position == 0 or s.game.line1.position == 2:
p = [336,477]
screen.blit(number, p)
elif s.game.line1.position == 1:
p = [336,526]
screen.blit(number, p)
else:
p = [336,428]
screen.blit(number, p)
if 6 in s.holes:
if s.game.line3.position == 0:
p = [440,378]
screen.blit(number, p)
else:
p = [387,378]
screen.blit(number, p)
if 7 in s.holes:
if s.game.line1.position == 0 or s.game.line1.position == 2:
p = [336,526]
screen.blit(number, p)
elif s.game.line1.position == 1:
p = [334,377]
screen.blit(number, p)
else:
p = [336,476]
screen.blit(number, p)
if 8 in s.holes:
if s.game.line2.position == 0:
p = [232,378]
screen.blit(number, p)
else:
p = [282,378]
screen.blit(number, p)
if 9 in s.holes:
if s.game.line1.position == 0 or s.game.line1.position == 2:
p = [336,427]
screen.blit(number, p)
elif s.game.line1.position == 1:
p = [336,476]
screen.blit(number, p)
else:
p = [336,377]
screen.blit(number, p)
if 10 in s.holes:
if s.game.line3.position == 0:
p = [442,477]
screen.blit(number, p)
else:
p = [388,476]
screen.blit(number, p)
if 11 in s.holes:
if s.game.line3.position == 0:
p = [388,428]
screen.blit(number, p)
else:
p = [442,428]
screen.blit(number, p)
if 12 in s.holes:
if s.game.line3.position == 0:
p = [387,476]
screen.blit(number, p)
else:
p = [442,478]
screen.blit(number, p)
if 13 in s.holes:
if s.game.line3.position == 0:
p = [442,526]
screen.blit(number, p)
else:
p = [387,526]
screen.blit(number, p)
if 14 in s.holes:
if s.game.line3.position == 0:
p = [442,428]
screen.blit(number, p)
else:
p = [388,428]
screen.blit(number, p)
if 15 in s.holes:
if s.game.line2.position == 0:
p = [282,426]
screen.blit(number, p)
else:
p = [232,426]
screen.blit(number, p)
if 16 in s.holes:
if s.game.line1.position == 0 or s.game.line1.position == 2:
p = [336,378]
screen.blit(number, p)
elif s.game.line1.position == 1:
p = [336,428]
screen.blit(number, p)
else:
p = [336,526]
screen.blit(number, p)
if 17 in s.holes:
if s.game.line2.position == 0:
p = [285,479]
screen.blit(number, p)
else:
p = [233,479]
screen.blit(number, p)
if 18 in s.holes:
if s.game.line2.position == 0:
p = [233,479]
screen.blit(number, p)
else:
p = [285,479]
screen.blit(number, p)
if 19 in s.holes:
if s.game.line3.position == 0:
p = [387,526]
screen.blit(number, p)
else:
p = [442,526]
screen.blit(number, p)
if 20 in s.holes:
if s.game.line2.position == 0:
p = [232,528]
screen.blit(number, p)
else:
p = [284,526]
screen.blit(number, p)
if s.game.red_odds.position == 1:
o = [192,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 2:
o = [230,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 3:
o = [267,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 4:
o = [305,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 5:
o = [343,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 6:
o = [385,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 7:
o = [436,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 8:
o = [483,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 9:
o = [530,783]
screen.blit(odds, o)
elif s.game.red_odds.position == 10:
o = [578,783]
screen.blit(odds, o)
if s.game.green_odds.position == 1:
o = [192,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 2:
o = [230,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 3:
o = [267,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 4:
o = [305,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 5:
o = [343,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 6:
o = [385,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 7:
o = [436,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 8:
o = [483,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 9:
o = [530,843]
screen.blit(odds, o)
elif s.game.green_odds.position == 10:
o = [578,843]
screen.blit(odds, o)
if s.game.yellow_odds.position == 1:
o = [192,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 2:
o = [230,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 3:
o = [267,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 4:
o = [305,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 5:
o = [343,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 6:
o = [385,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 7:
o = [436,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 8:
o = [483,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 9:
o = [530,907]
screen.blit(odds, o)
elif s.game.yellow_odds.position == 10:
o = [578,907]
screen.blit(odds, o)
if s.game.blue_odds.position == 1:
o = [192,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 2:
o = [230,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 3:
o = [267,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 4:
o = [305,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 5:
o = [343,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 6:
o = [385,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 7:
o = [436,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 8:
o = [483,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 9:
o = [530,973]
screen.blit(odds, o)
elif s.game.blue_odds.position == 10:
o = [578,973]
screen.blit(odds, o)
p = [307,217]
screen.blit(letter1, p)
p = [346,217]
screen.blit(letter2, p)
p = [402,217]
screen.blit(letter3, p)
p = [451,217]
screen.blit(letter4, p)
p = [497,217]
screen.blit(letter5, p)
p = [572,217]
screen.blit(letter6, p)
if s.game.red_odds.position < 5:
p = [307,217]
screen.blit(red_letter1, p)
if s.game.red_odds.position in [5,6]:
p = [346,217]
screen.blit(red_letter2, p)
if s.game.red_odds.position == 7:
p = [402,217]
screen.blit(red_letter3, p)
if s.game.red_odds.position == 8:
p = [451,217]
screen.blit(red_letter4, p)
if s.game.red_odds.position == 9:
p = [497,217]
screen.blit(red_letter5, p)
if s.game.red_odds.position == 10:
p = [572,217]
screen.blit(red_letter6, p)
if s.game.two_red_letter.status == True:
p = [18,258]
screen.blit(red_letter, p)
p = [92,220]
screen.blit(two_red, p)
if s.game.three_red_letter.status == True:
p = [18,258]
screen.blit(red_letter, p)
p = [18,219]
screen.blit(three_red, p)
if s.game.three_stars.status == True:
p = [18,297]
screen.blit(four_stars, p)
p = [18,334]
screen.blit(three_stars, p)
if s.game.six_stars.status == True:
p = [18,297]
screen.blit(four_stars, p)
p = [92,334]
screen.blit(six_stars, p)
if s.game.double_red.status == True:
p = [20,610]
screen.blit(red_double, p)
if s.game.double_yellow.status == True:
p = [94,610]
screen.blit(yellow_double, p)
if s.game.double_green.status == True:
p = [20,683]
screen.blit(green_double, p)
if s.game.double_blue.status == True:
p = [94,683]
screen.blit(blue_double, p)
if s.game.triple.status == False and (s.game.double_red.status == True or s.game.double_yellow.status == True or s.game.double_green.status == True or s.game.double_blue.status == True):
p = [52,680]
screen.blit(double_triple, p)
if s.game.triple.status == True and (s.game.double_red.status == True or s.game.double_yellow.status == True or s.game.double_green.status == True or s.game.double_blue.status == True):
p = [52,647]
screen.blit(double_triple, p)
if s.game.tilt.status == True:
tilt_position = [652,817]
screen.blit(tilt, tilt_position)
# Special Game
if s.game.special_odds.position > 0:
if s.game.special_odds.position == 1:
p = [600,512]
screen.blit(special_odds, p)
p = [547,511]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 2:
p = [599,482]
screen.blit(special_odds, p)
p = [547,482]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 3:
p = [599,453]
screen.blit(special_odds, p)
p = [547,452]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 4:
p = [599,424]
screen.blit(special_odds, p)
p = [547,424]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 5:
p = [599,395]
screen.blit(special_odds, p)
p = [547,394]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 6:
p = [598,366]
screen.blit(special_odds, p)
p = [547,366]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 7:
p = [598,337]
screen.blit(special_odds, p)
p = [548,336]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 8:
p = [598,308]
screen.blit(special_odds, p)
p = [548,308]
screen.blit(seven_odds, p)
if s.game.special_odds.position == 9:
p = [599,278]
screen.blit(special_odds, p)
p = [548,279]
screen.blit(seven_odds, p)
if s.game.special_odds.position > 0:
if s.game.special_replay_counter.position > 0:
p = [608,732]
screen.blit(collected, p)
if s.game.ball_count.position < 3:
p = [531,731]
screen.blit(collected, p)
if s.game.special_game.position == 2:
p = [598,540]
screen.blit(ball, p)
p = [608,635]
screen.blit(collected, p)
if s.game.special_game.position == 3:
p = [626,540]
screen.blit(ball, p)
p = [608,635]
screen.blit(collected, p)
if s.game.special_game.position == 4:
p = [656,540]
screen.blit(ball, p)
p = [608,635]
screen.blit(collected, p)
if s.game.missed.status == True:
p = [608,684]
screen.blit(collected, p)
if s.game.twin_number.position == 1:
p = [204,739]
screen.blit(ml_arrow, p)
elif s.game.twin_number.position == 2:
p = [236,738]
screen.blit(ml_arrow, p)
elif s.game.twin_number.position == 3:
p = [269,738]
screen.blit(ml_arrow, p)
if s.game.twin_number.position >= 4:
if s.game.twelve.status == True:
p = [300,728]
screen.blit(twin_number, p)
if s.game.eight.status == True:
p = [300,752]
screen.blit(twin_number, p)
if s.game.twin_number.position == 5:
p = [370,739]
screen.blit(ml_arrow, p)
elif s.game.twin_number.position == 6:
p = [400,739]
screen.blit(ml_arrow, p)
elif s.game.twin_number.position == 7:
p = [430,739]
screen.blit(ml_arrow, p)
if s.game.twin_number.position == 8:
if s.game.eight.status == True:
p = [462,730]
screen.blit(twin_number, p)
if s.game.twelve.status == True:
p = [462,752]
screen.blit(twin_number, p)
if s.game.bonus.position == 1:
p = [552,702]
screen.blit(diamond, p)
elif s.game.bonus.position == 2:
p = [535,686]
screen.blit(diamond, p)
elif s.game.bonus.position == 3:
p = [536,660]
screen.blit(diamond, p)
elif s.game.bonus.position == 4:
p = [535,635]
screen.blit(diamond, p)
elif s.game.bonus.position == 5:
p = [535,608]
screen.blit(diamond, p)
elif s.game.bonus.position == 6:
p = [534,584]
screen.blit(diamond, p)
elif s.game.bonus.position == 7:
p = [546,552]
screen.blit(diamond_7, p)
elif s.game.bonus.position == 8:
p = [572,582]
screen.blit(diamond, p)
elif s.game.bonus.position == 9:
p = [573,608]
screen.blit(diamond, p)
elif s.game.bonus.position == 10:
p = [573,634]
screen.blit(diamond, p)
elif s.game.bonus.position == 11:
p = [574,660]
screen.blit(diamond, p)
elif s.game.bonus.position == 12:
p = [574,686]
screen.blit(diamond, p)
pygame.display.update()
def blink(args):
dirty_rects = []
s = args[0]
b = args[1]
sn = args[2]
if b == 0:
if sn == 1:
p = [287,640]
dirty_rects.append(screen.blit(select_now, p))
pygame.display.update(dirty_rects)
else:
dirty_rects.append(screen.blit(bg_gi, (287,640), pygame.Rect(287,640,146,30)))
pygame.display.update(dirty_rects)
b = not b
args = [s,b,sn]
s.delay(name="blink", delay=0.1, handler=blink, param=args)
def line1_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
line = args[2]
if line == 1:
if s.game.line1.position == 0:
dirty_rects.append(screen.blit(columna, (337, 269 - num)))
elif s.game.line1.position == 1:
dirty_rects.append(screen.blit(columna, (337, 318 - num)))
elif s.game.line1.position == 2:
dirty_rects.append(screen.blit(columna, (337, 368 + num)))
elif s.game.line1.position == 3:
dirty_rects.append(screen.blit(columna, (337, 318 + num)))
nc_p = [230,368]
dirty_rects.append(screen.blit(number_card, nc_p))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, (224,264), pygame.Rect(224,264,270,408)))
else:
dirty_rects.append(screen.blit(bg_off, (224,264), pygame.Rect(224,264,270,408)))
p = [307,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],42,57)))
dirty_rects.append(screen.blit(letter1, p))
p = [346,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],55,57)))
dirty_rects.append(screen.blit(letter2, p))
p = [402,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,57)))
dirty_rects.append(screen.blit(letter3, p))
p = [451,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],43,57)))
dirty_rects.append(screen.blit(letter4, p))
p = [497,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],54,57)))
dirty_rects.append(screen.blit(letter5, p))
p = [572,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],53,57)))
dirty_rects.append(screen.blit(letter6, p))
if s.game.red_odds.position < 5:
p = [307,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],42,57)))
dirty_rects.append(screen.blit(letter1, p))
dirty_rects.append(screen.blit(red_letter1, p))
if s.game.red_odds.position in [5,6]:
p = [346,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],55,57)))
dirty_rects.append(screen.blit(letter2, p))
dirty_rects.append(screen.blit(red_letter2, p))
if s.game.red_odds.position == 7:
p = [402,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,57)))
dirty_rects.append(screen.blit(letter3, p))
dirty_rects.append(screen.blit(red_letter3, p))
if s.game.red_odds.position == 8:
p = [451,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],43,57)))
dirty_rects.append(screen.blit(letter4, p))
dirty_rects.append(screen.blit(red_letter4, p))
if s.game.red_odds.position == 9:
p = [497,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],54,57)))
dirty_rects.append(screen.blit(letter5, p))
dirty_rects.append(screen.blit(red_letter5, p))
if s.game.red_odds.position == 10:
p = [572,217]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],53,57)))
dirty_rects.append(screen.blit(letter6, p))
dirty_rects.append(screen.blit(red_letter6, p))
if s.game.mystic_lines.position >= 4:
p = [335,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position >= 7:
p = [262,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position == 10:
p = [410,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
def line2_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
line = args[2]
if line == 2:
if s.game.line2.position == 0:
dirty_rects.append(screen.blit(columnb2, (233 - num, 369)))
dirty_rects.append(screen.blit(columnb1, (286 + num, 369)))
elif s.game.line2.position == 1:
dirty_rects.append(screen.blit(columnb1, (233 - num, 369)))
dirty_rects.append(screen.blit(columnb2, (286 + num, 369)))
nc_p = [230,368]
dirty_rects.append(screen.blit(number_card, nc_p))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, (233,369), pygame.Rect(233,369,270,212)))
else:
dirty_rects.append(screen.blit(bg_off, (233,369), pygame.Rect(233,369,270,212)))
if s.game.mystic_lines.position >= 4:
p = [335,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position >= 7:
p = [262,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position == 10:
p = [410,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
def line3_animation(args):
dirty_rects = []
s = args[0]
num = args[1]
line = args[2]
if line == 3:
if s.game.line3.position == 0:
dirty_rects.append(screen.blit(columnc2, (389 - num, 369)))
dirty_rects.append(screen.blit(columnc1, (440 + num, 369)))
elif s.game.line3.position == 1:
dirty_rects.append(screen.blit(columnc1, (389 - num, 369)))
dirty_rects.append(screen.blit(columnc2, (440 + num, 369)))
nc_p = [230,368]
dirty_rects.append(screen.blit(number_card, nc_p))
if (s.game.anti_cheat.status == True):
dirty_rects.append(screen.blit(bg_gi, (230,369), pygame.Rect(230,369,273,212)))
else:
dirty_rects.append(screen.blit(bg_off, (230,369), pygame.Rect(230,369,273,212)))
if s.game.mystic_lines.position >= 4:
p = [335,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position >= 7:
p = [262,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
if s.game.mystic_lines.position == 10:
p = [410,591]
dirty_rects.append(screen.blit(bg_gi, p, pygame.Rect(p[0],p[1],49,48)))
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
def eb_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
if s.game.extra_ball.position < 1:
dirty_rects.append(screen.blit(bg_gi, (150,1040), pygame.Rect(150,1040,47,31)))
if s.game.extra_ball.position < 2:
dirty_rects.append(screen.blit(bg_gi, (201,1040), pygame.Rect(201,1040,59,34)))
if s.game.extra_ball.position < 3:
dirty_rects.append(screen.blit(bg_gi, (262,1040), pygame.Rect(262,1040,59,34)))
if s.game.extra_ball.position < 4:
dirty_rects.append(screen.blit(bg_gi, (323,1040), pygame.Rect(323,1040,47,31)))
if s.game.extra_ball.position < 5:
dirty_rects.append(screen.blit(bg_gi, (374,1040), pygame.Rect(374,1040,59,34)))
if s.game.extra_ball.position < 6:
dirty_rects.append(screen.blit(bg_gi, (436,1040), pygame.Rect(436,1040,59,34)))
if s.game.extra_ball.position < 7:
dirty_rects.append(screen.blit(bg_gi, (498,1040), pygame.Rect(498,1040,47,31)))
if s.game.extra_ball.position < 8:
dirty_rects.append(screen.blit(bg_gi, (548,1040), pygame.Rect(548,1040,59,34)))
if s.game.extra_ball.position < 9:
dirty_rects.append(screen.blit(bg_gi, (610,1040), pygame.Rect(610,1040,59,34)))
pygame.display.update(dirty_rects)
if num in [0,25,14,49]:
if s.game.extra_ball.position < 1:
p = [150,1040]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [39,1,26,15]:
if s.game.extra_ball.position < 2:
p = [201,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [3,4,17,28,29,40]:
if s.game.extra_ball.position < 3:
p = [262,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [5,18,30,43]:
if s.game.extra_ball.position < 4:
p = [323,1040]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [7,8,19,32,33,44]:
if s.game.extra_ball.position < 5:
p = [374,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [9,10,20,34,35,45]:
if s.game.extra_ball.position < 6:
p = [436,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [11,21,36,46]:
if s.game.extra_ball.position < 7:
p = [498,1040]
dirty_rects.append(screen.blit(eb_number, p))
pygame.display.update(dirty_rects)
return
elif num in [12,22,37,47]:
if s.game.extra_ball.position < 8:
p = [548,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
elif num in [2,6,13,16,23,27,31,38,41,48]:
if s.game.extra_ball.position < 9:
p = [610,1040]
dirty_rects.append(screen.blit(eb, p))
pygame.display.update(dirty_rects)
return
def clear_odds(s, num):
global screen
dirty_rects = []
if s.game.double_red.status == False:
dirty_rects.append(screen.blit(bg_gi, (20,610), pygame.Rect(20,610,74,74)))
if s.game.double_yellow.status == False:
dirty_rects.append(screen.blit(bg_gi, (94,610), pygame.Rect(94,610,74,74)))
if s.game.double_green.status == False:
dirty_rects.append(screen.blit(bg_gi, (20,683), pygame.Rect(20,683,74,74)))
if s.game.double_blue.status == False:
dirty_rects.append(screen.blit(bg_gi, (94,683), pygame.Rect(94,683,74,74)))
if s.game.yellow_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (230,907), pygame.Rect(230,907,46,61)))
if s.game.yellow_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (343,907), pygame.Rect(343,907,46,61)))
if s.game.yellow_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (385,907), pygame.Rect(385,907,46,61)))
if s.game.yellow_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (436,907), pygame.Rect(436,907,46,61)))
if s.game.yellow_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (483,907), pygame.Rect(483,907,46,61)))
if s.game.yellow_odds.position != 9:
dirty_rects.append(screen.blit(bg_gi, (530,907), pygame.Rect(530,907,46,61)))
if s.game.yellow_odds.position != 10:
dirty_rects.append(screen.blit(bg_gi, (578,907), pygame.Rect(578,907,46,61)))
if s.game.red_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (267,783), pygame.Rect(267,783,46,61)))
if s.game.red_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (305,783), pygame.Rect(305,783,46,61)))
if s.game.red_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (385,783), pygame.Rect(385,783,46,61)))
if s.game.red_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (436,783), pygame.Rect(436,783,46,61)))
if s.game.red_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (483,783), pygame.Rect(483,783,46,61)))
if s.game.red_odds.position != 9:
dirty_rects.append(screen.blit(bg_gi, (530,783), pygame.Rect(530,783,46,61)))
if s.game.red_odds.position != 10:
dirty_rects.append(screen.blit(bg_gi, (578,783), pygame.Rect(578,783,46,61)))
if s.game.blue_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (230,973), pygame.Rect(230,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (230,843), pygame.Rect(230,843,46,61)))
if s.game.blue_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (343,973), pygame.Rect(343,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (343,843), pygame.Rect(343,843,46,61)))
if s.game.blue_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (436,973), pygame.Rect(436,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (436,843), pygame.Rect(436,843,46,61)))
if s.game.blue_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (483,973), pygame.Rect(483,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (483,843), pygame.Rect(483,843,46,61)))
if s.game.blue_odds.position != 9:
dirty_rects.append(screen.blit(bg_gi, (530,973), pygame.Rect(530,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (530,843), pygame.Rect(530,843,46,61)))
if s.game.blue_odds.position != 10:
dirty_rects.append(screen.blit(bg_gi, (578,973), pygame.Rect(578,973,46,61)))
dirty_rects.append(screen.blit(bg_gi, (578,843), pygame.Rect(578,843,46,61)))
pygame.display.update(dirty_rects)
def draw_odds_animation(s, num):
global screen
dirty_rects = []
if num in [7,32]:
if s.game.double_red.status == False:
p = [20,610]
dirty_rects.append(screen.blit(red_double, p))
pygame.display.update(dirty_rects)
return
if num in [15,40]:
if s.game.double_yellow.status == False:
p = [94,608]
dirty_rects.append(screen.blit(yellow_double, p))
pygame.display.update(dirty_rects)
return
if num in [0,25]:
if s.game.double_green.status == False:
p = [20,683]
dirty_rects.append(screen.blit(green_double, p))
pygame.display.update(dirty_rects)
return
if num in [9,34]:
if s.game.double_blue.status == False:
p = [94,683]
dirty_rects.append(screen.blit(blue_double, p))
pygame.display.update(dirty_rects)
return
if num in [22,47]:
if s.game.yellow_odds.position != 2:
p = [230,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [13,38]:
if s.game.yellow_odds.position != 5:
p = [343,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [20,45]:
if s.game.yellow_odds.position != 6:
p = [385,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [20,45]:
if s.game.yellow_odds.position != 7:
p = [436,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [12,37]:
if s.game.yellow_odds.position != 8:
p = [483,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [7,32]:
if s.game.yellow_odds.position != 9:
p = [530,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [21,46]:
if s.game.yellow_odds.position != 10:
p = [578,907]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [2,27]:
if s.game.red_odds.position != 3:
p = [267,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [10,35]:
if s.game.red_odds.position != 4:
p = [305,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [12,37]:
if s.game.red_odds.position != 6:
p = [385,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [6,31]:
if s.game.red_odds.position != 7:
p = [436,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [14,39]:
if s.game.red_odds.position != 8:
p = [483,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [1,26]:
if s.game.red_odds.position != 9:
p = [530,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [20,45]:
if s.game.red_odds.position != 10:
p = [578,783]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [8,33]:
if s.game.blue_odds.position != 2:
p = [230,973]
dirty_rects.append(screen.blit(odds, p))
p = [230,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [23,48]:
if s.game.blue_odds.position != 5:
p = [343,973]
dirty_rects.append(screen.blit(odds, p))
p = [343,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [3,28]:
if s.game.blue_odds.position != 7:
p = [436,973]
dirty_rects.append(screen.blit(odds, p))
p = [436,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
if num in [18,43]:
if s.game.blue_odds.position != 8:
p = [483,973]
dirty_rects.append(screen.blit(odds, p))
p = [483,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [5,30]:
if s.game.blue_odds.position != 9:
p = [530,973]
dirty_rects.append(screen.blit(odds, p))
p = [530,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
if num in [16,41]:
if s.game.blue_odds.position != 10:
p = [578,973]
dirty_rects.append(screen.blit(odds, p))
p = [578,843]
dirty_rects.append(screen.blit(odds, p))
pygame.display.update(dirty_rects)
return
def odds_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_odds(s, num)
draw_odds_animation(s, num)
def clear_features(s, num):
global screen
dirty_rects = []
if s.game.selection_feature.position > 7:
dirty_rects.append(screen.blit(bg_gi, (18,548), pygame.Rect(18,548,148,48)))
if s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,548), pygame.Rect(18,548,148,48)))
if s.game.selection_feature.position not in [7,8]:
dirty_rects.append(screen.blit(bg_gi, (19,416), pygame.Rect(19,416,148,48)))
if s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False:
dirty_rects.append(screen.blit(bg_gi, (19,416), pygame.Rect(19,416,148,48)))
if s.game.selection_feature.position < 9:
dirty_rects.append(screen.blit(bg_gi, (18,372), pygame.Rect(18,372,148,48)))
if s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,372), pygame.Rect(18,372,148,48)))
if s.game.yellow_star.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,504), pygame.Rect(18,504,148,48)))
if s.game.red_star.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,460), pygame.Rect(18,460,148,48)))
if s.game.two_red_letter.status == False:
dirty_rects.append(screen.blit(bg_gi, (92,220), pygame.Rect(92,220,76,41)))
if s.game.three_red_letter.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,219), pygame.Rect(18,219,76,41)))
if s.game.three_stars.status == False:
dirty_rects.append(screen.blit(bg_gi, (18,334), pygame.Rect(18,334,77,27)))
if s.game.six_stars.status == False:
dirty_rects.append(screen.blit(bg_gi, (92,334), pygame.Rect(92,334,77,27)))
if s.game.mystic_lines.position != 2:
dirty_rects.append(screen.blit(bg_gi, (236,680), pygame.Rect(236,680,29,29)))
if s.game.mystic_lines.position < 4:
dirty_rects.append(screen.blit(bg_gi, (335,591), pygame.Rect(335,591,49,48)))
if s.game.mystic_lines.position != 5:
dirty_rects.append(screen.blit(bg_gi, (334,680), pygame.Rect(334,680,29,29)))
if s.game.mystic_lines.position < 7:
dirty_rects.append(screen.blit(bg_gi, (262,591), pygame.Rect(262,591,49,48)))
if s.game.mystic_lines.position != 9:
dirty_rects.append(screen.blit(bg_gi, (459,680), pygame.Rect(459,680,29,29)))
if s.game.mystic_lines.position < 10:
dirty_rects.append(screen.blit(bg_gi, (410,591), pygame.Rect(410,591,49,48)))
pygame.display.update(dirty_rects)
def draw_feature_animation(s, num):
global screen
dirty_rects = []
if num in [10,35]:
if s.game.selection_feature.position not in [1,2,3,4,5,6] and (s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False):
p = [18,548]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
return
if num in [9,34]:
if s.game.selection_feature.position not in [7,8] and (s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False):
p = [19,416]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
return
if num in [6,31]:
if s.game.selection_feature.position not in [9] and (s.game.mystic_lines.position < 4 and s.game.two_red_letter.status == False and s.game.three_red_letter.status == False):
p = [18,372]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
return
if num in [11,36]:
if s.game.red_star.status == False:
p = [18,460]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
s.game.coils.redROLamp.pulse(85)
return
if num in [4,29]:
if s.game.yellow_star.status == False:
p = [18,504]
dirty_rects.append(screen.blit(time, p))
pygame.display.update(dirty_rects)
s.game.coils.yellowROLamp.pulse(85)
return
if num in [13,38]:
if s.game.three_red_letter.status == False:
p = [18,219]
dirty_rects.append(screen.blit(three_red, p))
pygame.display.update(dirty_rects)
return
if num in [44,19]:
if s.game.two_red_letter.status == False:
p = [92,220]
dirty_rects.append(screen.blit(two_red, p))
pygame.display.update(dirty_rects)
return
if num in [16,41]:
if s.game.three_stars.status == False:
p = [18,334]
dirty_rects.append(screen.blit(three_stars, p))
pygame.display.update(dirty_rects)
return
if num in [4,29]:
if s.game.six_stars.status == False:
p = [92,334]
dirty_rects.append(screen.blit(six_stars, p))
pygame.display.update(dirty_rects)
return
if num in [5,30]:
if s.game.mystic_lines.position != 2:
p = [236,680]
dirty_rects.append(screen.blit(ml_arrow, p))
pygame.display.update(dirty_rects)
return
if num in [23,48]:
if s.game.mystic_lines.position < 4:
p = [335,591]
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
return
if num in [0,25]:
if s.game.mystic_lines.position != 5:
p = [334,680]
dirty_rects.append(screen.blit(ml_arrow, p))
pygame.display.update(dirty_rects)
return
if num in [12,37,22,47]:
if s.game.mystic_lines.position < 7:
p = [262,591]
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
return
if num in [9,34]:
if s.game.mystic_lines.position != 9:
p = [459,680]
dirty_rects.append(screen.blit(ml_arrow, p))
pygame.display.update(dirty_rects)
return
if num in [10,35,24,49]:
if s.game.mystic_lines.position < 10:
p = [410,591]
dirty_rects.append(screen.blit(ml_letter, p))
pygame.display.update(dirty_rects)
return
def feature_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
draw_feature_animation(s, num)
def both_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
clear_features(s, num)
clear_odds(s, num)
draw_odds_animation(s, num)
draw_feature_animation(s, num)
def special_animation(args):
global screen
dirty_rects = []
s = args[0]
num = args[1]
if s.game.special_odds.position != 2:
dirty_rects.append(screen.blit(bg_gi, (599,482), pygame.Rect(599,482,90,30)))
dirty_rects.append(screen.blit(bg_gi, (547,482), pygame.Rect(547,482,42,32)))
if s.game.special_odds.position != 3:
dirty_rects.append(screen.blit(bg_gi, (599,453), pygame.Rect(599,453,90,30)))
dirty_rects.append(screen.blit(bg_gi, (547,452), pygame.Rect(547,452,42,32)))
if s.game.special_odds.position != 4:
dirty_rects.append(screen.blit(bg_gi, (599,424), pygame.Rect(599,424,90,30)))
dirty_rects.append(screen.blit(bg_gi, (547,424), pygame.Rect(547,424,42,32)))
if s.game.special_odds.position != 5:
dirty_rects.append(screen.blit(bg_gi, (599,395), pygame.Rect(599,395,90,30)))
dirty_rects.append(screen.blit(bg_gi, (547,394), pygame.Rect(547,394,42,32)))
if s.game.special_odds.position != 6:
dirty_rects.append(screen.blit(bg_gi, (598,366), pygame.Rect(598,366,90,30)))
dirty_rects.append(screen.blit(bg_gi, (547,366), pygame.Rect(547,366,42,32)))
if s.game.special_odds.position != 7:
dirty_rects.append(screen.blit(bg_gi, (598,337), pygame.Rect(598,337,90,30)))
dirty_rects.append(screen.blit(bg_gi, (548,336), pygame.Rect(548,336,42,32)))
if s.game.special_odds.position != 8:
dirty_rects.append(screen.blit(bg_gi, (598,308), pygame.Rect(598,308,90,30)))
dirty_rects.append(screen.blit(bg_gi, (548,308), pygame.Rect(548,308,42,32)))
if s.game.special_odds.position != 9:
dirty_rects.append(screen.blit(bg_gi, (599,278), pygame.Rect(599,278,90,30)))
dirty_rects.append(screen.blit(bg_gi, (548,279), pygame.Rect(548,279,42,32)))
pygame.display.update(dirty_rects)
if num in [18,19,43,44]:
if s.game.special_odds.position < 2:
p = [599,482]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,482]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [20,21,45,46]:
if s.game.special_odds.position < 3:
p = [599,453]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,452]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [14,15,39,40]:
if s.game.special_odds.position < 4:
p = [599,424]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,424]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [16,17,41,42]:
if s.game.special_odds.position < 5:
p = [599,395]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,394]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [6,7,10,11,31,32,35,36]:
if s.game.special_odds.position < 6:
p = [598,366]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,366]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [4,5,12,13,29,30,37,38]:
if s.game.special_odds.position < 7:
p = [598,337]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,336]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [0,1,2,3,8,9,25,26,27,28,33,34]:
if s.game.special_odds.position < 8:
p = [598,308]
dirty_rects.append(screen.blit(special_odds, p))
p = [547,308]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
if num in [22,23,47,48]:
if s.game.special_odds.position < 9:
p = [599,278]
dirty_rects.append(screen.blit(special_odds, p))
p = [548,279]
dirty_rects.append(screen.blit(seven_odds, p))
pygame.display.update(dirty_rects)
return
| gpl-3.0 | -9,146,427,976,417,311,000 | 37.336387 | 191 | 0.552716 | false |
luotao1/Paddle | python/paddle/fluid/tests/unittests/test_ir_memory_optimize_pass.py | 2 | 3516 | # Copyright (c) 2018 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from parallel_executor_test_base import TestParallelExecutorBase, DeviceType
import paddle.fluid as fluid
import paddle.fluid.core as core
import numpy as np
import paddle
import paddle.dataset.mnist as mnist
import unittest
import os
def _feed_data_helper():
img = fluid.layers.data(name='image', shape=[784], dtype='float32')
label = fluid.layers.data(name='label', shape=[1], dtype='int64')
return img, label
def simple_fc_net(use_feed):
assert use_feed
x, y = _feed_data_helper()
hidden_layer = 4
for _ in range(hidden_layer):
x = fluid.layers.fc(input=x, size=20, act='relu')
y_predict = fluid.layers.fc(input=x, size=10, act='softmax')
cost = fluid.layers.cross_entropy(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
return avg_cost
def fc_with_inplace_net(use_feed):
assert use_feed
x, y = _feed_data_helper()
fc = fluid.layers.fc(input=x, size=20, act='relu')
fc = fluid.layers.fc(input=fc, size=10, act='relu')
reshape = fluid.layers.reshape(x=fc, shape=[-1, 2, 5])
reshape = fluid.layers.reshape(x=reshape, shape=[-1, 5, 2])
y_predict = fluid.layers.fc(input=reshape, size=10, act='softmax')
cost = fluid.layers.cross_entropy(input=y_predict, label=y)
avg_cost = fluid.layers.mean(cost)
return avg_cost
class TestMNIST(TestParallelExecutorBase):
def _dummy_data(self):
np.random.seed(5)
img = np.random.random(size=[32, 784]).astype(np.float32)
label = np.ones(shape=[32, 1], dtype='int64')
return img, label
def _compare_ir_memory_optimize(self, model, use_device):
if use_device == DeviceType.CUDA and not core.is_compiled_with_cuda():
return
img, label = self._dummy_data()
first_loss0, last_loss0 = self.check_network_convergence(
model,
feed_dict={"image": img,
"label": label},
use_device=use_device,
use_ir_memory_optimize=False)
first_loss1, last_loss1 = self.check_network_convergence(
model,
feed_dict={"image": img,
"label": label},
use_device=use_device,
use_ir_memory_optimize=True)
for loss in zip(first_loss0, first_loss1):
self.assertAlmostEqual(loss[0], loss[1], delta=1e-6)
for loss in zip(last_loss0, last_loss1):
self.assertAlmostEqual(loss[0], loss[1], delta=1e-6)
def test_simple_fc_net(self):
self._compare_ir_memory_optimize(simple_fc_net, DeviceType.CPU)
self._compare_ir_memory_optimize(simple_fc_net, DeviceType.CUDA)
def test_fc_with_reshape_net(self):
self._compare_ir_memory_optimize(fc_with_inplace_net, DeviceType.CPU)
self._compare_ir_memory_optimize(fc_with_inplace_net, DeviceType.CUDA)
if __name__ == '__main__':
unittest.main()
| apache-2.0 | 8,543,096,284,069,665,000 | 36.010526 | 78 | 0.655859 | false |
namccart/gnuradio | gr-digital/examples/example_costas.py | 49 | 5316 | #!/usr/bin/env python
#
# Copyright 2011-2013 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
from gnuradio import gr, digital, filter
from gnuradio import blocks
from gnuradio import channels
from gnuradio import eng_notation
from gnuradio.eng_option import eng_option
from optparse import OptionParser
import sys
try:
import scipy
except ImportError:
print "Error: could not import scipy (http://www.scipy.org/)"
sys.exit(1)
try:
import pylab
except ImportError:
print "Error: could not import pylab (http://matplotlib.sourceforge.net/)"
sys.exit(1)
class example_costas(gr.top_block):
def __init__(self, N, sps, rolloff, ntaps, bw, noise, foffset, toffset, poffset):
gr.top_block.__init__(self)
rrc_taps = filter.firdes.root_raised_cosine(
sps, sps, 1.0, rolloff, ntaps)
data = 2.0*scipy.random.randint(0, 2, N) - 1.0
data = scipy.exp(1j*poffset) * data
self.src = blocks.vector_source_c(data.tolist(), False)
self.rrc = filter.interp_fir_filter_ccf(sps, rrc_taps)
self.chn = channels.channel_model(noise, foffset, toffset)
self.cst = digital.costas_loop_cc(bw, 2)
self.vsnk_src = blocks.vector_sink_c()
self.vsnk_cst = blocks.vector_sink_c()
self.vsnk_frq = blocks.vector_sink_f()
self.connect(self.src, self.rrc, self.chn, self.cst, self.vsnk_cst)
self.connect(self.rrc, self.vsnk_src)
self.connect((self.cst,1), self.vsnk_frq)
def main():
parser = OptionParser(option_class=eng_option, conflict_handler="resolve")
parser.add_option("-N", "--nsamples", type="int", default=2000,
help="Set the number of samples to process [default=%default]")
parser.add_option("-S", "--sps", type="int", default=4,
help="Set the samples per symbol [default=%default]")
parser.add_option("-r", "--rolloff", type="eng_float", default=0.35,
help="Set the rolloff factor [default=%default]")
parser.add_option("-W", "--bandwidth", type="eng_float", default=2*scipy.pi/100.0,
help="Set the loop bandwidth [default=%default]")
parser.add_option("-n", "--ntaps", type="int", default=45,
help="Set the number of taps in the filters [default=%default]")
parser.add_option("", "--noise", type="eng_float", default=0.0,
help="Set the simulation noise voltage [default=%default]")
parser.add_option("-f", "--foffset", type="eng_float", default=0.0,
help="Set the simulation's normalized frequency offset (in Hz) [default=%default]")
parser.add_option("-t", "--toffset", type="eng_float", default=1.0,
help="Set the simulation's timing offset [default=%default]")
parser.add_option("-p", "--poffset", type="eng_float", default=0.707,
help="Set the simulation's phase offset [default=%default]")
(options, args) = parser.parse_args ()
# Adjust N for the interpolation by sps
options.nsamples = options.nsamples // options.sps
# Set up the program-under-test
put = example_costas(options.nsamples, options.sps, options.rolloff,
options.ntaps, options.bandwidth, options.noise,
options.foffset, options.toffset, options.poffset)
put.run()
data_src = scipy.array(put.vsnk_src.data())
# Convert the FLL's LO frequency from rads/sec to Hz
data_frq = scipy.array(put.vsnk_frq.data()) / (2.0*scipy.pi)
# adjust this to align with the data.
data_cst = scipy.array(3*[0,]+list(put.vsnk_cst.data()))
# Plot the Costas loop's LO frequency
f1 = pylab.figure(1, figsize=(12,10), facecolor='w')
s1 = f1.add_subplot(2,2,1)
s1.plot(data_frq)
s1.set_title("Costas LO")
s1.set_xlabel("Samples")
s1.set_ylabel("Frequency (normalized Hz)")
# Plot the IQ symbols
s3 = f1.add_subplot(2,2,2)
s3.plot(data_src.real, data_src.imag, "o")
s3.plot(data_cst.real, data_cst.imag, "rx")
s3.set_title("IQ")
s3.set_xlabel("Real part")
s3.set_ylabel("Imag part")
s3.set_xlim([-2, 2])
s3.set_ylim([-2, 2])
# Plot the symbols in time
s4 = f1.add_subplot(2,2,3)
s4.set_position([0.125, 0.05, 0.775, 0.4])
s4.plot(data_src.real, "o-")
s4.plot(data_cst.real, "rx-")
s4.set_title("Symbols")
s4.set_xlabel("Samples")
s4.set_ylabel("Real Part of Signals")
pylab.show()
if __name__ == "__main__":
try:
main()
except KeyboardInterrupt:
pass
| gpl-3.0 | -1,067,415,030,627,115,500 | 37.244604 | 105 | 0.637133 | false |
wskplho/fontuley | src/third_party/fontTools/Lib/fontTools/ttLib/__init__.py | 2 | 30232 | """fontTools.ttLib -- a package for dealing with TrueType fonts.
This package offers translators to convert TrueType fonts to Python
objects and vice versa, and additionally from Python to TTX (an XML-based
text format) and vice versa.
Example interactive session:
Python 1.5.2c1 (#43, Mar 9 1999, 13:06:43) [CW PPC w/GUSI w/MSL]
Copyright 1991-1995 Stichting Mathematisch Centrum, Amsterdam
>>> from fontTools import ttLib
>>> tt = ttLib.TTFont("afont.ttf")
>>> tt['maxp'].numGlyphs
242
>>> tt['OS/2'].achVendID
'B&H\000'
>>> tt['head'].unitsPerEm
2048
>>> tt.saveXML("afont.ttx")
Dumping 'LTSH' table...
Dumping 'OS/2' table...
Dumping 'VDMX' table...
Dumping 'cmap' table...
Dumping 'cvt ' table...
Dumping 'fpgm' table...
Dumping 'glyf' table...
Dumping 'hdmx' table...
Dumping 'head' table...
Dumping 'hhea' table...
Dumping 'hmtx' table...
Dumping 'loca' table...
Dumping 'maxp' table...
Dumping 'name' table...
Dumping 'post' table...
Dumping 'prep' table...
>>> tt2 = ttLib.TTFont()
>>> tt2.importXML("afont.ttx")
>>> tt2['maxp'].numGlyphs
242
>>>
"""
from __future__ import print_function, division, absolute_import
from fontTools.misc.py23 import *
import os
import sys
haveMacSupport = 0
if sys.platform == "mac":
haveMacSupport = 1
elif sys.platform == "darwin" and sys.version_info[:3] != (2, 2, 0):
# Python 2.2's Mac support is broken, so don't enable it there.
haveMacSupport = 1
class TTLibError(Exception): pass
class TTFont(object):
"""The main font object. It manages file input and output, and offers
a convenient way of accessing tables.
Tables will be only decompiled when necessary, ie. when they're actually
accessed. This means that simple operations can be extremely fast.
"""
def __init__(self, file=None, res_name_or_index=None,
sfntVersion="\000\001\000\000", flavor=None, checkChecksums=False,
verbose=False, recalcBBoxes=True, allowVID=False, ignoreDecompileErrors=False,
recalcTimestamp=True, fontNumber=-1, lazy=False, quiet=False):
"""The constructor can be called with a few different arguments.
When reading a font from disk, 'file' should be either a pathname
pointing to a file, or a readable file object.
It we're running on a Macintosh, 'res_name_or_index' maybe an sfnt
resource name or an sfnt resource index number or zero. The latter
case will cause TTLib to autodetect whether the file is a flat file
or a suitcase. (If it's a suitcase, only the first 'sfnt' resource
will be read!)
The 'checkChecksums' argument is used to specify how sfnt
checksums are treated upon reading a file from disk:
0: don't check (default)
1: check, print warnings if a wrong checksum is found
2: check, raise an exception if a wrong checksum is found.
The TTFont constructor can also be called without a 'file'
argument: this is the way to create a new empty font.
In this case you can optionally supply the 'sfntVersion' argument,
and a 'flavor' which can be None, or 'woff'.
If the recalcBBoxes argument is false, a number of things will *not*
be recalculated upon save/compile:
1) glyph bounding boxes
2) maxp font bounding box
3) hhea min/max values
(1) is needed for certain kinds of CJK fonts (ask Werner Lemberg ;-).
Additionally, upon importing an TTX file, this option cause glyphs
to be compiled right away. This should reduce memory consumption
greatly, and therefore should have some impact on the time needed
to parse/compile large fonts.
If the recalcTimestamp argument is false, the modified timestamp in the
'head' table will *not* be recalculated upon save/compile.
If the allowVID argument is set to true, then virtual GID's are
supported. Asking for a glyph ID with a glyph name or GID that is not in
the font will return a virtual GID. This is valid for GSUB and cmap
tables. For SING glyphlets, the cmap table is used to specify Unicode
values for virtual GI's used in GSUB/GPOS rules. If the gid N is requested
and does not exist in the font, or the glyphname has the form glyphN
and does not exist in the font, then N is used as the virtual GID.
Else, the first virtual GID is assigned as 0x1000 -1; for subsequent new
virtual GIDs, the next is one less than the previous.
If ignoreDecompileErrors is set to True, exceptions raised in
individual tables during decompilation will be ignored, falling
back to the DefaultTable implementation, which simply keeps the
binary data.
If lazy is set to True, many data structures are loaded lazily, upon
access only.
"""
from fontTools.ttLib import sfnt
self.verbose = verbose
self.quiet = quiet
self.lazy = lazy
self.recalcBBoxes = recalcBBoxes
self.recalcTimestamp = recalcTimestamp
self.tables = {}
self.reader = None
# Permit the user to reference glyphs that are not int the font.
self.last_vid = 0xFFFE # Can't make it be 0xFFFF, as the world is full unsigned short integer counters that get incremented after the last seen GID value.
self.reverseVIDDict = {}
self.VIDDict = {}
self.allowVID = allowVID
self.ignoreDecompileErrors = ignoreDecompileErrors
if not file:
self.sfntVersion = sfntVersion
self.flavor = flavor
self.flavorData = None
return
if not hasattr(file, "read"):
# assume file is a string
if haveMacSupport and res_name_or_index is not None:
# on the mac, we deal with sfnt resources as well as flat files
from . import macUtils
if res_name_or_index == 0:
if macUtils.getSFNTResIndices(file):
# get the first available sfnt font.
file = macUtils.SFNTResourceReader(file, 1)
else:
file = open(file, "rb")
else:
file = macUtils.SFNTResourceReader(file, res_name_or_index)
else:
file = open(file, "rb")
else:
pass # assume "file" is a readable file object
self.reader = sfnt.SFNTReader(file, checkChecksums, fontNumber=fontNumber)
self.sfntVersion = self.reader.sfntVersion
self.flavor = self.reader.flavor
self.flavorData = self.reader.flavorData
def close(self):
"""If we still have a reader object, close it."""
if self.reader is not None:
self.reader.close()
def save(self, file, makeSuitcase=False, reorderTables=True):
"""Save the font to disk. Similarly to the constructor,
the 'file' argument can be either a pathname or a writable
file object.
On the Mac, if makeSuitcase is true, a suitcase (resource fork)
file will we made instead of a flat .ttf file.
"""
from fontTools.ttLib import sfnt
if not hasattr(file, "write"):
closeStream = 1
if os.name == "mac" and makeSuitcase:
from . import macUtils
file = macUtils.SFNTResourceWriter(file, self)
else:
file = open(file, "wb")
if os.name == "mac":
from fontTools.misc.macCreator import setMacCreatorAndType
setMacCreatorAndType(file.name, 'mdos', 'BINA')
else:
# assume "file" is a writable file object
closeStream = 0
tags = list(self.keys())
if "GlyphOrder" in tags:
tags.remove("GlyphOrder")
numTables = len(tags)
if reorderTables:
import tempfile
tmp = tempfile.TemporaryFile(prefix="ttx-fonttools")
else:
tmp = file
writer = sfnt.SFNTWriter(tmp, numTables, self.sfntVersion, self.flavor, self.flavorData)
done = []
for tag in tags:
self._writeTable(tag, writer, done)
writer.close()
if reorderTables:
tmp.flush()
tmp.seek(0)
reorderFontTables(tmp, file)
tmp.close()
if closeStream:
file.close()
def saveXML(self, fileOrPath, progress=None, quiet=False,
tables=None, skipTables=None, splitTables=False, disassembleInstructions=True,
bitmapGlyphDataFormat='raw'):
"""Export the font as TTX (an XML-based text file), or as a series of text
files when splitTables is true. In the latter case, the 'fileOrPath'
argument should be a path to a directory.
The 'tables' argument must either be false (dump all tables) or a
list of tables to dump. The 'skipTables' argument may be a list of tables
to skip, but only when the 'tables' argument is false.
"""
from fontTools import version
from fontTools.misc import xmlWriter
self.disassembleInstructions = disassembleInstructions
self.bitmapGlyphDataFormat = bitmapGlyphDataFormat
if not tables:
tables = list(self.keys())
if "GlyphOrder" not in tables:
tables = ["GlyphOrder"] + tables
if skipTables:
for tag in skipTables:
if tag in tables:
tables.remove(tag)
numTables = len(tables)
if progress:
progress.set(0, numTables)
idlefunc = getattr(progress, "idle", None)
else:
idlefunc = None
writer = xmlWriter.XMLWriter(fileOrPath, idlefunc=idlefunc)
writer.begintag("ttFont", sfntVersion=repr(self.sfntVersion)[1:-1],
ttLibVersion=version)
writer.newline()
if not splitTables:
writer.newline()
else:
# 'fileOrPath' must now be a path
path, ext = os.path.splitext(fileOrPath)
fileNameTemplate = path + ".%s" + ext
for i in range(numTables):
if progress:
progress.set(i)
tag = tables[i]
if splitTables:
tablePath = fileNameTemplate % tagToIdentifier(tag)
tableWriter = xmlWriter.XMLWriter(tablePath, idlefunc=idlefunc)
tableWriter.begintag("ttFont", ttLibVersion=version)
tableWriter.newline()
tableWriter.newline()
writer.simpletag(tagToXML(tag), src=os.path.basename(tablePath))
writer.newline()
else:
tableWriter = writer
self._tableToXML(tableWriter, tag, progress, quiet)
if splitTables:
tableWriter.endtag("ttFont")
tableWriter.newline()
tableWriter.close()
if progress:
progress.set((i + 1))
writer.endtag("ttFont")
writer.newline()
writer.close()
if self.verbose:
debugmsg("Done dumping TTX")
def _tableToXML(self, writer, tag, progress, quiet):
if tag in self:
table = self[tag]
report = "Dumping '%s' table..." % tag
else:
report = "No '%s' table found." % tag
if progress:
progress.setLabel(report)
elif self.verbose:
debugmsg(report)
else:
if not quiet:
print(report)
if tag not in self:
return
xmlTag = tagToXML(tag)
if hasattr(table, "ERROR"):
writer.begintag(xmlTag, ERROR="decompilation error")
else:
writer.begintag(xmlTag)
writer.newline()
if tag in ("glyf", "CFF "):
table.toXML(writer, self, progress)
else:
table.toXML(writer, self)
writer.endtag(xmlTag)
writer.newline()
writer.newline()
def importXML(self, file, progress=None, quiet=False):
"""Import a TTX file (an XML-based text format), so as to recreate
a font object.
"""
if "maxp" in self and "post" in self:
# Make sure the glyph order is loaded, as it otherwise gets
# lost if the XML doesn't contain the glyph order, yet does
# contain the table which was originally used to extract the
# glyph names from (ie. 'post', 'cmap' or 'CFF ').
self.getGlyphOrder()
from fontTools.misc import xmlReader
reader = xmlReader.XMLReader(file, self, progress, quiet)
reader.read()
def isLoaded(self, tag):
"""Return true if the table identified by 'tag' has been
decompiled and loaded into memory."""
return tag in self.tables
def has_key(self, tag):
if self.isLoaded(tag):
return True
elif self.reader and tag in self.reader:
return True
elif tag == "GlyphOrder":
return True
else:
return False
__contains__ = has_key
def keys(self):
keys = list(self.tables.keys())
if self.reader:
for key in list(self.reader.keys()):
if key not in keys:
keys.append(key)
if "GlyphOrder" in keys:
keys.remove("GlyphOrder")
keys = sortedTagList(keys)
return ["GlyphOrder"] + keys
def __len__(self):
return len(list(self.keys()))
def __getitem__(self, tag):
tag = Tag(tag)
try:
return self.tables[tag]
except KeyError:
if tag == "GlyphOrder":
table = GlyphOrder(tag)
self.tables[tag] = table
return table
if self.reader is not None:
import traceback
if self.verbose:
debugmsg("Reading '%s' table from disk" % tag)
data = self.reader[tag]
tableClass = getTableClass(tag)
table = tableClass(tag)
self.tables[tag] = table
if self.verbose:
debugmsg("Decompiling '%s' table" % tag)
try:
table.decompile(data, self)
except:
if not self.ignoreDecompileErrors:
raise
# fall back to DefaultTable, retaining the binary table data
print("An exception occurred during the decompilation of the '%s' table" % tag)
from .tables.DefaultTable import DefaultTable
file = StringIO()
traceback.print_exc(file=file)
table = DefaultTable(tag)
table.ERROR = file.getvalue()
self.tables[tag] = table
table.decompile(data, self)
return table
else:
raise KeyError("'%s' table not found" % tag)
def __setitem__(self, tag, table):
self.tables[Tag(tag)] = table
def __delitem__(self, tag):
if tag not in self:
raise KeyError("'%s' table not found" % tag)
if tag in self.tables:
del self.tables[tag]
if self.reader and tag in self.reader:
del self.reader[tag]
def get(self, tag, default=None):
try:
return self[tag]
except KeyError:
return default
def setGlyphOrder(self, glyphOrder):
self.glyphOrder = glyphOrder
def getGlyphOrder(self):
try:
return self.glyphOrder
except AttributeError:
pass
if 'CFF ' in self:
cff = self['CFF ']
self.glyphOrder = cff.getGlyphOrder()
elif 'post' in self:
# TrueType font
glyphOrder = self['post'].getGlyphOrder()
if glyphOrder is None:
#
# No names found in the 'post' table.
# Try to create glyph names from the unicode cmap (if available)
# in combination with the Adobe Glyph List (AGL).
#
self._getGlyphNamesFromCmap()
else:
self.glyphOrder = glyphOrder
else:
self._getGlyphNamesFromCmap()
return self.glyphOrder
def _getGlyphNamesFromCmap(self):
#
# This is rather convoluted, but then again, it's an interesting problem:
# - we need to use the unicode values found in the cmap table to
# build glyph names (eg. because there is only a minimal post table,
# or none at all).
# - but the cmap parser also needs glyph names to work with...
# So here's what we do:
# - make up glyph names based on glyphID
# - load a temporary cmap table based on those names
# - extract the unicode values, build the "real" glyph names
# - unload the temporary cmap table
#
if self.isLoaded("cmap"):
# Bootstrapping: we're getting called by the cmap parser
# itself. This means self.tables['cmap'] contains a partially
# loaded cmap, making it impossible to get at a unicode
# subtable here. We remove the partially loaded cmap and
# restore it later.
# This only happens if the cmap table is loaded before any
# other table that does f.getGlyphOrder() or f.getGlyphName().
cmapLoading = self.tables['cmap']
del self.tables['cmap']
else:
cmapLoading = None
# Make up glyph names based on glyphID, which will be used by the
# temporary cmap and by the real cmap in case we don't find a unicode
# cmap.
numGlyphs = int(self['maxp'].numGlyphs)
glyphOrder = [None] * numGlyphs
glyphOrder[0] = ".notdef"
for i in range(1, numGlyphs):
glyphOrder[i] = "glyph%.5d" % i
# Set the glyph order, so the cmap parser has something
# to work with (so we don't get called recursively).
self.glyphOrder = glyphOrder
# Get a (new) temporary cmap (based on the just invented names)
tempcmap = self['cmap'].getcmap(3, 1)
if tempcmap is not None:
# we have a unicode cmap
from fontTools import agl
cmap = tempcmap.cmap
# create a reverse cmap dict
reversecmap = {}
for unicode, name in list(cmap.items()):
reversecmap[name] = unicode
allNames = {}
for i in range(numGlyphs):
tempName = glyphOrder[i]
if tempName in reversecmap:
unicode = reversecmap[tempName]
if unicode in agl.UV2AGL:
# get name from the Adobe Glyph List
glyphName = agl.UV2AGL[unicode]
else:
# create uni<CODE> name
glyphName = "uni%04X" % unicode
tempName = glyphName
n = allNames.get(tempName, 0)
if n:
tempName = glyphName + "#" + str(n)
glyphOrder[i] = tempName
allNames[tempName] = n + 1
# Delete the temporary cmap table from the cache, so it can
# be parsed again with the right names.
del self.tables['cmap']
else:
pass # no unicode cmap available, stick with the invented names
self.glyphOrder = glyphOrder
if cmapLoading:
# restore partially loaded cmap, so it can continue loading
# using the proper names.
self.tables['cmap'] = cmapLoading
def getGlyphNames(self):
"""Get a list of glyph names, sorted alphabetically."""
glyphNames = sorted(self.getGlyphOrder()[:])
return glyphNames
def getGlyphNames2(self):
"""Get a list of glyph names, sorted alphabetically,
but not case sensitive.
"""
from fontTools.misc import textTools
return textTools.caselessSort(self.getGlyphOrder())
def getGlyphName(self, glyphID, requireReal=False):
try:
return self.getGlyphOrder()[glyphID]
except IndexError:
if requireReal or not self.allowVID:
# XXX The ??.W8.otf font that ships with OSX uses higher glyphIDs in
# the cmap table than there are glyphs. I don't think it's legal...
return "glyph%.5d" % glyphID
else:
# user intends virtual GID support
try:
glyphName = self.VIDDict[glyphID]
except KeyError:
glyphName ="glyph%.5d" % glyphID
self.last_vid = min(glyphID, self.last_vid )
self.reverseVIDDict[glyphName] = glyphID
self.VIDDict[glyphID] = glyphName
return glyphName
def getGlyphID(self, glyphName, requireReal=False):
if not hasattr(self, "_reverseGlyphOrderDict"):
self._buildReverseGlyphOrderDict()
glyphOrder = self.getGlyphOrder()
d = self._reverseGlyphOrderDict
if glyphName not in d:
if glyphName in glyphOrder:
self._buildReverseGlyphOrderDict()
return self.getGlyphID(glyphName)
else:
if requireReal:
raise KeyError(glyphName)
elif not self.allowVID:
# Handle glyphXXX only
if glyphName[:5] == "glyph":
try:
return int(glyphName[5:])
except (NameError, ValueError):
raise KeyError(glyphName)
else:
# user intends virtual GID support
try:
glyphID = self.reverseVIDDict[glyphName]
except KeyError:
# if name is in glyphXXX format, use the specified name.
if glyphName[:5] == "glyph":
try:
glyphID = int(glyphName[5:])
except (NameError, ValueError):
glyphID = None
if glyphID is None:
glyphID = self.last_vid -1
self.last_vid = glyphID
self.reverseVIDDict[glyphName] = glyphID
self.VIDDict[glyphID] = glyphName
return glyphID
glyphID = d[glyphName]
if glyphName != glyphOrder[glyphID]:
self._buildReverseGlyphOrderDict()
return self.getGlyphID(glyphName)
return glyphID
def getReverseGlyphMap(self, rebuild=False):
if rebuild or not hasattr(self, "_reverseGlyphOrderDict"):
self._buildReverseGlyphOrderDict()
return self._reverseGlyphOrderDict
def _buildReverseGlyphOrderDict(self):
self._reverseGlyphOrderDict = d = {}
glyphOrder = self.getGlyphOrder()
for glyphID in range(len(glyphOrder)):
d[glyphOrder[glyphID]] = glyphID
def _writeTable(self, tag, writer, done):
"""Internal helper function for self.save(). Keeps track of
inter-table dependencies.
"""
if tag in done:
return
tableClass = getTableClass(tag)
for masterTable in tableClass.dependencies:
if masterTable not in done:
if masterTable in self:
self._writeTable(masterTable, writer, done)
else:
done.append(masterTable)
tabledata = self.getTableData(tag)
if self.verbose:
debugmsg("writing '%s' table to disk" % tag)
writer[tag] = tabledata
done.append(tag)
def getTableData(self, tag):
"""Returns raw table data, whether compiled or directly read from disk.
"""
tag = Tag(tag)
if self.isLoaded(tag):
if self.verbose:
debugmsg("compiling '%s' table" % tag)
return self.tables[tag].compile(self)
elif self.reader and tag in self.reader:
if self.verbose:
debugmsg("Reading '%s' table from disk" % tag)
return self.reader[tag]
else:
raise KeyError(tag)
def getGlyphSet(self, preferCFF=True):
"""Return a generic GlyphSet, which is a dict-like object
mapping glyph names to glyph objects. The returned glyph objects
have a .draw() method that supports the Pen protocol, and will
have an attribute named 'width', but only *after* the .draw() method
has been called.
If the font is CFF-based, the outlines will be taken from the 'CFF '
table. Otherwise the outlines will be taken from the 'glyf' table.
If the font contains both a 'CFF ' and a 'glyf' table, you can use
the 'preferCFF' argument to specify which one should be taken.
"""
if preferCFF and "CFF " in self:
return list(self["CFF "].cff.values())[0].CharStrings
if "glyf" in self:
return _TTGlyphSet(self)
if "CFF " in self:
return list(self["CFF "].cff.values())[0].CharStrings
raise TTLibError("Font contains no outlines")
class _TTGlyphSet(object):
"""Generic dict-like GlyphSet class, meant as a TrueType counterpart
to CFF's CharString dict. See TTFont.getGlyphSet().
"""
# This class is distinct from the 'glyf' table itself because we need
# access to the 'hmtx' table, which could cause a dependency problem
# there when reading from XML.
def __init__(self, ttFont):
self._ttFont = ttFont
def keys(self):
return list(self._ttFont["glyf"].keys())
def has_key(self, glyphName):
return glyphName in self._ttFont["glyf"]
__contains__ = has_key
def __getitem__(self, glyphName):
return _TTGlyph(glyphName, self._ttFont)
def get(self, glyphName, default=None):
try:
return self[glyphName]
except KeyError:
return default
class _TTGlyph(object):
"""Wrapper for a TrueType glyph that supports the Pen protocol, meaning
that it has a .draw() method that takes a pen object as its only
argument. Additionally there is a 'width' attribute.
"""
def __init__(self, glyphName, ttFont):
self._glyphName = glyphName
self._ttFont = ttFont
self.width, self.lsb = self._ttFont['hmtx'][self._glyphName]
def draw(self, pen):
"""Draw the glyph onto Pen. See fontTools.pens.basePen for details
how that works.
"""
glyfTable = self._ttFont['glyf']
glyph = glyfTable[self._glyphName]
if hasattr(glyph, "xMin"):
offset = self.lsb - glyph.xMin
else:
offset = 0
if glyph.isComposite():
for component in glyph:
glyphName, transform = component.getComponentInfo()
pen.addComponent(glyphName, transform)
else:
coordinates, endPts, flags = glyph.getCoordinates(glyfTable)
if offset:
coordinates = coordinates + (offset, 0)
start = 0
for end in endPts:
end = end + 1
contour = coordinates[start:end].tolist()
cFlags = flags[start:end].tolist()
start = end
if 1 not in cFlags:
# There is not a single on-curve point on the curve,
# use pen.qCurveTo's special case by specifying None
# as the on-curve point.
contour.append(None)
pen.qCurveTo(*contour)
else:
# Shuffle the points so that contour the is guaranteed
# to *end* in an on-curve point, which we'll use for
# the moveTo.
firstOnCurve = cFlags.index(1) + 1
contour = contour[firstOnCurve:] + contour[:firstOnCurve]
cFlags = cFlags[firstOnCurve:] + cFlags[:firstOnCurve]
pen.moveTo(contour[-1])
while contour:
nextOnCurve = cFlags.index(1) + 1
if nextOnCurve == 1:
pen.lineTo(contour[0])
else:
pen.qCurveTo(*contour[:nextOnCurve])
contour = contour[nextOnCurve:]
cFlags = cFlags[nextOnCurve:]
pen.closePath()
class GlyphOrder(object):
"""A pseudo table. The glyph order isn't in the font as a separate
table, but it's nice to present it as such in the TTX format.
"""
def __init__(self, tag=None):
pass
def toXML(self, writer, ttFont):
glyphOrder = ttFont.getGlyphOrder()
writer.comment("The 'id' attribute is only for humans; "
"it is ignored when parsed.")
writer.newline()
for i in range(len(glyphOrder)):
glyphName = glyphOrder[i]
writer.simpletag("GlyphID", id=i, name=glyphName)
writer.newline()
def fromXML(self, name, attrs, content, ttFont):
if not hasattr(self, "glyphOrder"):
self.glyphOrder = []
ttFont.setGlyphOrder(self.glyphOrder)
if name == "GlyphID":
self.glyphOrder.append(attrs["name"])
def getTableModule(tag):
"""Fetch the packer/unpacker module for a table.
Return None when no module is found.
"""
from . import tables
pyTag = tagToIdentifier(tag)
try:
__import__("fontTools.ttLib.tables." + pyTag)
except ImportError as err:
# If pyTag is found in the ImportError message,
# means table is not implemented. If it's not
# there, then some other module is missing, don't
# suppress the error.
if str(err).find(pyTag) >= 0:
return None
else:
raise err
else:
return getattr(tables, pyTag)
def getTableClass(tag):
"""Fetch the packer/unpacker class for a table.
Return None when no class is found.
"""
module = getTableModule(tag)
if module is None:
from .tables.DefaultTable import DefaultTable
return DefaultTable
pyTag = tagToIdentifier(tag)
tableClass = getattr(module, "table_" + pyTag)
return tableClass
def getClassTag(klass):
"""Fetch the table tag for a class object."""
name = klass.__name__
assert name[:6] == 'table_'
name = name[6:] # Chop 'table_'
return identifierToTag(name)
def newTable(tag):
"""Return a new instance of a table."""
tableClass = getTableClass(tag)
return tableClass(tag)
def _escapechar(c):
"""Helper function for tagToIdentifier()"""
import re
if re.match("[a-z0-9]", c):
return "_" + c
elif re.match("[A-Z]", c):
return c + "_"
else:
return hex(byteord(c))[2:]
def tagToIdentifier(tag):
"""Convert a table tag to a valid (but UGLY) python identifier,
as well as a filename that's guaranteed to be unique even on a
caseless file system. Each character is mapped to two characters.
Lowercase letters get an underscore before the letter, uppercase
letters get an underscore after the letter. Trailing spaces are
trimmed. Illegal characters are escaped as two hex bytes. If the
result starts with a number (as the result of a hex escape), an
extra underscore is prepended. Examples:
'glyf' -> '_g_l_y_f'
'cvt ' -> '_c_v_t'
'OS/2' -> 'O_S_2f_2'
"""
import re
tag = Tag(tag)
if tag == "GlyphOrder":
return tag
assert len(tag) == 4, "tag should be 4 characters long"
while len(tag) > 1 and tag[-1] == ' ':
tag = tag[:-1]
ident = ""
for c in tag:
ident = ident + _escapechar(c)
if re.match("[0-9]", ident):
ident = "_" + ident
return ident
def identifierToTag(ident):
"""the opposite of tagToIdentifier()"""
if ident == "GlyphOrder":
return ident
if len(ident) % 2 and ident[0] == "_":
ident = ident[1:]
assert not (len(ident) % 2)
tag = ""
for i in range(0, len(ident), 2):
if ident[i] == "_":
tag = tag + ident[i+1]
elif ident[i+1] == "_":
tag = tag + ident[i]
else:
# assume hex
tag = tag + chr(int(ident[i:i+2], 16))
# append trailing spaces
tag = tag + (4 - len(tag)) * ' '
return Tag(tag)
def tagToXML(tag):
"""Similarly to tagToIdentifier(), this converts a TT tag
to a valid XML element name. Since XML element names are
case sensitive, this is a fairly simple/readable translation.
"""
import re
tag = Tag(tag)
if tag == "OS/2":
return "OS_2"
elif tag == "GlyphOrder":
return tag
if re.match("[A-Za-z_][A-Za-z_0-9]* *$", tag):
return tag.strip()
else:
return tagToIdentifier(tag)
def xmlToTag(tag):
"""The opposite of tagToXML()"""
if tag == "OS_2":
return Tag("OS/2")
if len(tag) == 8:
return identifierToTag(tag)
else:
return Tag(tag + " " * (4 - len(tag)))
def debugmsg(msg):
import time
print(msg + time.strftime(" (%H:%M:%S)", time.localtime(time.time())))
# Table order as recommended in the OpenType specification 1.4
TTFTableOrder = ["head", "hhea", "maxp", "OS/2", "hmtx", "LTSH", "VDMX",
"hdmx", "cmap", "fpgm", "prep", "cvt ", "loca", "glyf",
"kern", "name", "post", "gasp", "PCLT"]
OTFTableOrder = ["head", "hhea", "maxp", "OS/2", "name", "cmap", "post",
"CFF "]
def sortedTagList(tagList, tableOrder=None):
"""Return a sorted copy of tagList, sorted according to the OpenType
specification, or according to a custom tableOrder. If given and not
None, tableOrder needs to be a list of tag names.
"""
tagList = sorted(tagList)
if tableOrder is None:
if "DSIG" in tagList:
# DSIG should be last (XXX spec reference?)
tagList.remove("DSIG")
tagList.append("DSIG")
if "CFF " in tagList:
tableOrder = OTFTableOrder
else:
tableOrder = TTFTableOrder
orderedTables = []
for tag in tableOrder:
if tag in tagList:
orderedTables.append(tag)
tagList.remove(tag)
orderedTables.extend(tagList)
return orderedTables
def reorderFontTables(inFile, outFile, tableOrder=None, checkChecksums=False):
"""Rewrite a font file, ordering the tables as recommended by the
OpenType specification 1.4.
"""
from fontTools.ttLib.sfnt import SFNTReader, SFNTWriter
reader = SFNTReader(inFile, checkChecksums=checkChecksums)
writer = SFNTWriter(outFile, len(reader.tables), reader.sfntVersion, reader.flavor, reader.flavorData)
tables = list(reader.keys())
for tag in sortedTagList(tables, tableOrder):
writer[tag] = reader[tag]
writer.close()
def maxPowerOfTwo(x):
"""Return the highest exponent of two, so that
(2 ** exponent) <= x. Return 0 if x is 0.
"""
exponent = 0
while x:
x = x >> 1
exponent = exponent + 1
return max(exponent - 1, 0)
def getSearchRange(n, itemSize):
"""Calculate searchRange, entrySelector, rangeShift.
"""
# This stuff needs to be stored in the file, because?
exponent = maxPowerOfTwo(n)
searchRange = (2 ** exponent) * itemSize
entrySelector = exponent
rangeShift = max(0, n * itemSize - searchRange)
return searchRange, entrySelector, rangeShift
| apache-2.0 | 8,371,049,326,602,620,000 | 29.568251 | 156 | 0.68798 | false |
dracos/QGIS | python/plugins/processing/tools/translation.py | 12 | 3016 | # -*- coding: utf-8 -*-
"""
***************************************************************************
classification.py
---------------------
Date : July 2015
Copyright : (C) 2015 by Arnaud Morvan
Email : arnaud dot morvan at camptocamp dot com
***************************************************************************
* *
* This program is free software; you can redistribute it and/or modify *
* it under the terms of the GNU General Public License as published by *
* the Free Software Foundation; either version 2 of the License, or *
* (at your option) any later version. *
* *
***************************************************************************
"""
__author__ = 'Arnaud Morvan'
__date__ = 'July 2015'
__copyright__ = '(C) 2015, Arnaud Morvan'
# This will get replaced with a git SHA1 when you do a git archive
__revision__ = '$Format:%H$'
import os
from processing.core.Processing import Processing
from processing.gui.AlgorithmClassification import (
loadClassification, loadDisplayNames, getClassificationEn, getDisplayNameEn)
def updateTranslations():
"""Update processing.algs.translations module.
Need QGIS python API on python path, can be run from QGIS console. Example:
from processing.tools.translation import updateTranslations
updateTranslations()
"""
loadClassification()
loadDisplayNames()
f = open(os.path.join(os.path.dirname(__file__), '../algs/translations.py'), 'w')
f.write('''# -*- coding: utf-8 -*-
"""
Don't edit this file manually.
Update it from QGIS console:
from processing.tools.translation import updateTranslations
updateTranslations()
"""
from PyQt4.QtCore import QCoreApplication
def translationShadow():
''')
groups = {}
for provider in Processing.providers:
f.write('''
"""{}"""
'''.format(provider.__class__.__name__))
for alg in provider.algs:
display_name = getDisplayNameEn(alg)
f.write(" QCoreApplication.translate(\"{}\", \"{}\")\n"
.format(alg.__class__.__name__,
display_name.replace('"', '\\"')))
if not alg.group in groups:
groups[alg.group] = 'AlgorithmClassification'
group, subgroup = getClassificationEn(alg)
if group is not None and not group in groups:
groups[group] = 'AlgorithmClassification'
if subgroup is not None and not subgroup in groups:
groups[subgroup] = 'AlgorithmClassification'
f.write('''
"""Groups and subgroups"""
''')
for group, context in groups.iteritems():
f.write(" QCoreApplication.translate(\"{}\", \"{}\")\n"
.format(context,
group.replace('"', '\\"')))
| gpl-2.0 | -9,154,054,789,079,552,000 | 35.337349 | 85 | 0.527188 | false |
redhat-openstack/horizon | openstack_dashboard/dashboards/settings/password/forms.py | 63 | 3043 | # Copyright 2013 Centrin Data Systems Ltd.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.conf import settings
from django.forms import ValidationError # noqa
from django import http
from django.utils.translation import ugettext_lazy as _
from django.views.decorators.debug import sensitive_variables # noqa
from horizon import exceptions
from horizon import forms
from horizon import messages
from horizon.utils import functions as utils
from horizon.utils import validators
from openstack_dashboard import api
class PasswordForm(forms.SelfHandlingForm):
current_password = forms.CharField(
label=_("Current password"),
widget=forms.PasswordInput(render_value=False))
new_password = forms.RegexField(
label=_("New password"),
widget=forms.PasswordInput(render_value=False),
regex=validators.password_validator(),
error_messages={'invalid':
validators.password_validator_msg()})
confirm_password = forms.CharField(
label=_("Confirm new password"),
widget=forms.PasswordInput(render_value=False))
no_autocomplete = True
def clean(self):
'''Check to make sure password fields match.'''
data = super(forms.Form, self).clean()
if 'new_password' in data:
if data['new_password'] != data.get('confirm_password', None):
raise ValidationError(_('Passwords do not match.'))
return data
# We have to protect the entire "data" dict because it contains the
# oldpassword and newpassword strings.
@sensitive_variables('data')
def handle(self, request, data):
user_is_editable = api.keystone.keystone_can_edit_user()
if user_is_editable:
try:
api.keystone.user_update_own_password(request,
data['current_password'],
data['new_password'])
response = http.HttpResponseRedirect(settings.LOGOUT_URL)
msg = _("Password changed. Please log in again to continue.")
utils.add_logout_reason(request, response, msg)
return response
except Exception:
exceptions.handle(request,
_('Unable to change password.'))
return False
else:
messages.error(request, _('Changing password is not supported.'))
return False
| apache-2.0 | 1,253,453,899,435,859,200 | 40.121622 | 79 | 0.640486 | false |
unclechu/avto-lux161 | avto-lux/app/adm/routes/main.py | 1 | 5434 | # -*- coding: utf-8 -*-
import os, time
import hashlib
import datetime
from warnings import warn
from .helpers import request_except_handler, require_auth
from app.configparser import config
from app.utils import get_json_localization
from app.mixins.auth import AuthMixin
from app.mixins.routes import JsonResponseMixin
from app.models.dbconnect import Session
from app.models.usermodels import User
class AdminMainRoute(JsonResponseMixin):
def get(self, *args):
lang = config('LOCALIZATION')['LANG']
localization = get_json_localization('ADMIN')[lang]
kwrgs = {
'page_title': localization['page_title'],
'lang': lang,
'local': localization,
'is_auth': 1 if self.get_secure_cookie('user') else 0,
'is_debug': 1 if self.application.settings.get('debug') else 0
}
return self.render('admin/layout.jade', **kwrgs)
class AuthHandler(AuthMixin, JsonResponseMixin):
def post(self):
if self.get_secure_cookie('user'):
return self.json_response({
'status': 'success',
'username': self.get_secure_cookie('user').decode('utf-8')
})
session = Session()
try:
usr = (
session
.query(User)
.filter_by(login=self.get_argument('user'))
.one()
)
except Exception as e:
warn('adm/AuthHandler.post(): user not found:\n%s' % e)
return self.json_response({
'status': 'error',
'error_code': 'user_not_found'
})
finally:
session.close()
compared = self.compare_password(
hpasswd=usr.password,
password=self.get_argument('pass')
)
if compared and usr.is_active:
self.set_secure_cookie('user', usr.login)
return self.json_response({
'status': 'success',
'username': usr.login
})
elif not usr.is_active:
return self.json_response({
'status': 'error',
'error_code': 'user_inactive'
})
return self.json_response({
'status': 'error',
'error_code': 'incorrect_password'
})
class LogoutHandler(JsonResponseMixin):
def post(self):
self.clear_all_cookies()
return self.json_response({'status': 'logout'})
class CreateUser(AuthMixin, JsonResponseMixin):
@require_auth
def post(self):
login = self.get_argument('login')
passwd = self.get_argument('password')
session = Session()
try:
olds = [x[0] for x in session.query(User.login).all()]
except Exception as e:
session.close()
warn('adm/CreateUser.post(): cannot get users logins:\n%s' % e)
raise e
if login == '':
return self.json_response({
'status': 'error',
'error_code': 'unique_key_exist'
})
elif login in olds:
return self.json_response({
'status': 'error',
'error_code': 'incorrect_data'
})
is_active = True
try:
self.get_argument('is_active')
except:
is_active = False
usr = User(
login=login,
password=self.create_password(passwd),
last_login=datetime.datetime.utcnow(),
is_active=is_active
)
try:
session.add(usr)
session.commit()
except Exception as e:
warn('adm/CreateUser.post(): cannot add user:\n%s' % e)
raise e
finally:
session.close()
return self.json_response({'status': 'success'})
class UpdateUser(AuthMixin, JsonResponseMixin):
@require_auth
def post(self):
kwargs = {}
passwrd = self.get_argument('password')
login = self.get_argument('login')
id = self.get_argument('id')
is_active = True
try:
self.get_argument('is_active')
except:
is_active = False
session = Session()
try:
usr = session.query(User).filter_by(id=id).one()
except Exception as e:
session.close()
warn(
'adm/UpdateUser.post(): cannot get user by #%s id:\n%s' %
(str(id), e)
)
raise e
try:
olds = [x[0] for x in session.query(User.login).all()]
except Exception as e:
session.close()
warn('adm/UpdateUser.post(): cannot get users logins:\n%s' % e)
raise e
if login == '':
return self.json_response({
'status': 'error',
'error_code': 'unique_key_exist'
})
elif usr.login != login and login in olds:
return self.json_response({
'status': 'error',
'error_code': 'incorrect_data'
})
kwargs.update({
'login': login,
'is_active': is_active
})
if passwrd != '':
kwargs.update({'password': self.create_password(passwrd)})
try:
session.query(User).filter_by(id=id).update(kwargs)
session.commit()
except Exception as e:
warn(
'adm/UpdateUser.post(): cannot update user #%s data:\n%s' %
(str(id), e)
)
raise e
finally:
session.close()
return self.json_response({'status': 'success'})
class FileUpload(JsonResponseMixin):
_extension_map = {
'application/octet-stream': '', # without extension
'image/svg+xml': '.svg',
'text/plain': '.txt'
}
@require_auth
@request_except_handler
def post(self):
file_path = config('UPLOAD_FILES_PATH')
hashes = []
for f in self.request.files.items():
_file = f[1][0]
_filename = hashlib.sha512(
str(time.time()).encode('utf-8')
).hexdigest()[0:35]
if _file['content_type'] in self._extension_map:
ext = self._extension_map[_file['content_type']]
else:
ext = '.' + _file['content_type'].split('/')[1]
fname = _filename + ext
f = open(os.path.join(file_path, fname), 'wb')
f.write(_file['body'])
f.close()
hashes.append({'name': fname})
return self.json_response({
'status': 'success',
'files': hashes
})
| agpl-3.0 | -498,916,912,433,025,150 | 20.823293 | 66 | 0.634155 | false |
dpetzold/django | django/contrib/gis/geos/mutable_list.py | 238 | 10705 | # Copyright (c) 2008-2009 Aryeh Leib Taurog, all rights reserved.
# Released under the New BSD license.
"""
This module contains a base type which provides list-style mutations
without specific data storage methods.
See also http://static.aryehleib.com/oldsite/MutableLists.html
Author: Aryeh Leib Taurog.
"""
from functools import total_ordering
from django.utils import six
from django.utils.six.moves import range
@total_ordering
class ListMixin(object):
"""
A base class which provides complete list interface.
Derived classes must call ListMixin's __init__() function
and implement the following:
function _get_single_external(self, i):
Return single item with index i for general use.
The index i will always satisfy 0 <= i < len(self).
function _get_single_internal(self, i):
Same as above, but for use within the class [Optional]
Note that if _get_single_internal and _get_single_internal return
different types of objects, _set_list must distinguish
between the two and handle each appropriately.
function _set_list(self, length, items):
Recreate the entire object.
NOTE: items may be a generator which calls _get_single_internal.
Therefore, it is necessary to cache the values in a temporary:
temp = list(items)
before clobbering the original storage.
function _set_single(self, i, value):
Set the single item at index i to value [Optional]
If left undefined, all mutations will result in rebuilding
the object using _set_list.
function __len__(self):
Return the length
int _minlength:
The minimum legal length [Optional]
int _maxlength:
The maximum legal length [Optional]
type or tuple _allowed:
A type or tuple of allowed item types [Optional]
"""
_minlength = 0
_maxlength = None
# ### Python initialization and special list interface methods ###
def __init__(self, *args, **kwargs):
if not hasattr(self, '_get_single_internal'):
self._get_single_internal = self._get_single_external
if not hasattr(self, '_set_single'):
self._set_single = self._set_single_rebuild
self._assign_extended_slice = self._assign_extended_slice_rebuild
super(ListMixin, self).__init__(*args, **kwargs)
def __getitem__(self, index):
"Get the item(s) at the specified index/slice."
if isinstance(index, slice):
return [self._get_single_external(i) for i in range(*index.indices(len(self)))]
else:
index = self._checkindex(index)
return self._get_single_external(index)
def __delitem__(self, index):
"Delete the item(s) at the specified index/slice."
if not isinstance(index, six.integer_types + (slice,)):
raise TypeError("%s is not a legal index" % index)
# calculate new length and dimensions
origLen = len(self)
if isinstance(index, six.integer_types):
index = self._checkindex(index)
indexRange = [index]
else:
indexRange = range(*index.indices(origLen))
newLen = origLen - len(indexRange)
newItems = (self._get_single_internal(i)
for i in range(origLen)
if i not in indexRange)
self._rebuild(newLen, newItems)
def __setitem__(self, index, val):
"Set the item(s) at the specified index/slice."
if isinstance(index, slice):
self._set_slice(index, val)
else:
index = self._checkindex(index)
self._check_allowed((val,))
self._set_single(index, val)
# ### Special methods for arithmetic operations ###
def __add__(self, other):
'add another list-like object'
return self.__class__(list(self) + list(other))
def __radd__(self, other):
'add to another list-like object'
return other.__class__(list(other) + list(self))
def __iadd__(self, other):
'add another list-like object to self'
self.extend(list(other))
return self
def __mul__(self, n):
'multiply'
return self.__class__(list(self) * n)
def __rmul__(self, n):
'multiply'
return self.__class__(list(self) * n)
def __imul__(self, n):
'multiply'
if n <= 0:
del self[:]
else:
cache = list(self)
for i in range(n - 1):
self.extend(cache)
return self
def __eq__(self, other):
olen = len(other)
for i in range(olen):
try:
c = self[i] == other[i]
except IndexError:
# self must be shorter
return False
if not c:
return False
return len(self) == olen
def __lt__(self, other):
olen = len(other)
for i in range(olen):
try:
c = self[i] < other[i]
except IndexError:
# self must be shorter
return True
if c:
return c
elif other[i] < self[i]:
return False
return len(self) < olen
# ### Public list interface Methods ###
# ## Non-mutating ##
def count(self, val):
"Standard list count method"
count = 0
for i in self:
if val == i:
count += 1
return count
def index(self, val):
"Standard list index method"
for i in range(0, len(self)):
if self[i] == val:
return i
raise ValueError('%s not found in object' % str(val))
# ## Mutating ##
def append(self, val):
"Standard list append method"
self[len(self):] = [val]
def extend(self, vals):
"Standard list extend method"
self[len(self):] = vals
def insert(self, index, val):
"Standard list insert method"
if not isinstance(index, six.integer_types):
raise TypeError("%s is not a legal index" % index)
self[index:index] = [val]
def pop(self, index=-1):
"Standard list pop method"
result = self[index]
del self[index]
return result
def remove(self, val):
"Standard list remove method"
del self[self.index(val)]
def reverse(self):
"Standard list reverse method"
self[:] = self[-1::-1]
def sort(self, cmp=None, key=None, reverse=False):
"Standard list sort method"
if key:
temp = [(key(v), v) for v in self]
temp.sort(key=lambda x: x[0], reverse=reverse)
self[:] = [v[1] for v in temp]
else:
temp = list(self)
if cmp is not None:
temp.sort(cmp=cmp, reverse=reverse)
else:
temp.sort(reverse=reverse)
self[:] = temp
# ### Private routines ###
def _rebuild(self, newLen, newItems):
if newLen < self._minlength:
raise ValueError('Must have at least %d items' % self._minlength)
if self._maxlength is not None and newLen > self._maxlength:
raise ValueError('Cannot have more than %d items' % self._maxlength)
self._set_list(newLen, newItems)
def _set_single_rebuild(self, index, value):
self._set_slice(slice(index, index + 1, 1), [value])
def _checkindex(self, index, correct=True):
length = len(self)
if 0 <= index < length:
return index
if correct and -length <= index < 0:
return index + length
raise IndexError('invalid index: %s' % str(index))
def _check_allowed(self, items):
if hasattr(self, '_allowed'):
if False in [isinstance(val, self._allowed) for val in items]:
raise TypeError('Invalid type encountered in the arguments.')
def _set_slice(self, index, values):
"Assign values to a slice of the object"
try:
iter(values)
except TypeError:
raise TypeError('can only assign an iterable to a slice')
self._check_allowed(values)
origLen = len(self)
valueList = list(values)
start, stop, step = index.indices(origLen)
# CAREFUL: index.step and step are not the same!
# step will never be None
if index.step is None:
self._assign_simple_slice(start, stop, valueList)
else:
self._assign_extended_slice(start, stop, step, valueList)
def _assign_extended_slice_rebuild(self, start, stop, step, valueList):
'Assign an extended slice by rebuilding entire list'
indexList = range(start, stop, step)
# extended slice, only allow assigning slice of same size
if len(valueList) != len(indexList):
raise ValueError('attempt to assign sequence of size %d '
'to extended slice of size %d'
% (len(valueList), len(indexList)))
# we're not changing the length of the sequence
newLen = len(self)
newVals = dict(zip(indexList, valueList))
def newItems():
for i in range(newLen):
if i in newVals:
yield newVals[i]
else:
yield self._get_single_internal(i)
self._rebuild(newLen, newItems())
def _assign_extended_slice(self, start, stop, step, valueList):
'Assign an extended slice by re-assigning individual items'
indexList = range(start, stop, step)
# extended slice, only allow assigning slice of same size
if len(valueList) != len(indexList):
raise ValueError('attempt to assign sequence of size %d '
'to extended slice of size %d'
% (len(valueList), len(indexList)))
for i, val in zip(indexList, valueList):
self._set_single(i, val)
def _assign_simple_slice(self, start, stop, valueList):
'Assign a simple slice; Can assign slice of any length'
origLen = len(self)
stop = max(start, stop)
newLen = origLen - stop + start + len(valueList)
def newItems():
for i in range(origLen + 1):
if i == start:
for val in valueList:
yield val
if i < origLen:
if i < start or i >= stop:
yield self._get_single_internal(i)
self._rebuild(newLen, newItems())
| bsd-3-clause | -5,861,360,032,018,099,000 | 31.938462 | 91 | 0.564783 | false |
tbabej/astropy | astropy/visualization/wcsaxes/tests/test_frame.py | 2 | 5298 | # Licensed under a 3-clause BSD style license - see LICENSE.rst
import numpy as np
import matplotlib.pyplot as plt
from ....wcs import WCS
from ....tests.helper import pytest, remote_data
from .. import WCSAxes
from ..frame import BaseFrame
from ....tests.image_tests import IMAGE_REFERENCE_DIR
from .test_images import BaseImageTests
class HexagonalFrame(BaseFrame):
spine_names = 'abcdef'
def update_spines(self):
xmin, xmax = self.parent_axes.get_xlim()
ymin, ymax = self.parent_axes.get_ylim()
ymid = 0.5 * (ymin + ymax)
xmid1 = (xmin + xmax) / 4.
xmid2 = (xmin + xmax) * 3. / 4.
self['a'].data = np.array(([xmid1, ymin], [xmid2, ymin]))
self['b'].data = np.array(([xmid2, ymin], [xmax, ymid]))
self['c'].data = np.array(([xmax, ymid], [xmid2, ymax]))
self['d'].data = np.array(([xmid2, ymax], [xmid1, ymax]))
self['e'].data = np.array(([xmid1, ymax], [xmin, ymid]))
self['f'].data = np.array(([xmin, ymid], [xmid1, ymin]))
class TestFrame(BaseImageTests):
@remote_data(source='astropy')
@pytest.mark.mpl_image_compare(baseline_dir=IMAGE_REFERENCE_DIR,
filename='custom_frame.png',
tolerance=1.5)
def test_custom_frame(self):
wcs = WCS(self.msx_header)
fig = plt.figure(figsize=(4, 4))
ax = WCSAxes(fig, [0.15, 0.15, 0.7, 0.7],
wcs=wcs,
frame_class=HexagonalFrame)
fig.add_axes(ax)
ax.coords.grid(color='white')
im = ax.imshow(np.ones((149, 149)), vmin=0., vmax=2.,
origin='lower', cmap=plt.cm.gist_heat)
minpad = {}
minpad['a'] = minpad['d'] = 1
minpad['b'] = minpad['c'] = minpad['e'] = minpad['f'] = 2.75
ax.coords['glon'].set_axislabel("Longitude", minpad=minpad)
ax.coords['glon'].set_axislabel_position('ad')
ax.coords['glat'].set_axislabel("Latitude", minpad=minpad)
ax.coords['glat'].set_axislabel_position('bcef')
ax.coords['glon'].set_ticklabel_position('ad')
ax.coords['glat'].set_ticklabel_position('bcef')
# Set limits so that no labels overlap
ax.set_xlim(5.5, 100.5)
ax.set_ylim(5.5, 110.5)
# Clip the image to the frame
im.set_clip_path(ax.coords.frame.patch)
return fig
@remote_data(source='astropy')
@pytest.mark.mpl_image_compare(baseline_dir=IMAGE_REFERENCE_DIR,
filename='update_clip_path_rectangular.png',
tolerance=1.5)
def test_update_clip_path_rectangular(self, tmpdir):
fig = plt.figure()
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], aspect='equal')
fig.add_axes(ax)
ax.set_xlim(0., 2.)
ax.set_ylim(0., 2.)
# Force drawing, which freezes the clip path returned by WCSAxes
fig.savefig(tmpdir.join('nothing').strpath)
ax.imshow(np.zeros((12, 4)))
ax.set_xlim(-0.5, 3.5)
ax.set_ylim(-0.5, 11.5)
return fig
@remote_data(source='astropy')
@pytest.mark.mpl_image_compare(baseline_dir=IMAGE_REFERENCE_DIR,
filename='update_clip_path_nonrectangular.png',
tolerance=1.5)
def test_update_clip_path_nonrectangular(self, tmpdir):
fig = plt.figure()
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], aspect='equal',
frame_class=HexagonalFrame)
fig.add_axes(ax)
ax.set_xlim(0., 2.)
ax.set_ylim(0., 2.)
# Force drawing, which freezes the clip path returned by WCSAxes
fig.savefig(tmpdir.join('nothing').strpath)
ax.imshow(np.zeros((12, 4)))
ax.set_xlim(-0.5, 3.5)
ax.set_ylim(-0.5, 11.5)
return fig
@remote_data(source='astropy')
@pytest.mark.mpl_image_compare(baseline_dir=IMAGE_REFERENCE_DIR,
filename='update_clip_path_change_wcs.png',
tolerance=1.5)
def test_update_clip_path_change_wcs(self, tmpdir):
# When WCS is changed, a new frame is created, so we need to make sure
# that the path is carried over to the new frame.
fig = plt.figure()
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8], aspect='equal')
fig.add_axes(ax)
ax.set_xlim(0., 2.)
ax.set_ylim(0., 2.)
# Force drawing, which freezes the clip path returned by WCSAxes
fig.savefig(tmpdir.join('nothing').strpath)
ax.reset_wcs()
ax.imshow(np.zeros((12, 4)))
ax.set_xlim(-0.5, 3.5)
ax.set_ylim(-0.5, 11.5)
return fig
def test_copy_frame_properties_change_wcs(self):
# When WCS is changed, a new frame is created, so we need to make sure
# that the color and linewidth are transferred over
fig = plt.figure()
ax = WCSAxes(fig, [0.1, 0.1, 0.8, 0.8])
fig.add_axes(ax)
ax.coords.frame.set_linewidth(5)
ax.coords.frame.set_color('purple')
ax.reset_wcs()
assert ax.coords.frame.get_linewidth() == 5
assert ax.coords.frame.get_color() == 'purple'
| bsd-3-clause | 2,828,958,993,210,332,000 | 29.982456 | 82 | 0.556625 | false |
krzysztof/invenio-pidrelations | invenio_pidrelations/contrib/versioning.py | 1 | 6972 | # -*- coding: utf-8 -*-
#
# This file is part of Invenio.
# Copyright (C) 2014, 2015, 2016 CERN.
#
# Invenio is free software; you can redistribute it
# and/or modify it under the terms of the GNU General Public License as
# published by the Free Software Foundation; either version 2 of the
# License, or (at your option) any later version.
#
# Invenio is distributed in the hope that it will be
# useful, but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Invenio; if not, write to the
# Free Software Foundation, Inc., 59 Temple Place, Suite 330, Boston,
# MA 02111-1307, USA.
#
# In applying this license, CERN does not
# waive the privileges and immunities granted to it by virtue of its status
# as an Intergovernmental Organization or submit itself to any jurisdiction.
"""API for PID relations concepts."""
from __future__ import absolute_import, print_function
from flask import Blueprint
from invenio_db import db
from invenio_pidstore.models import PersistentIdentifier, PIDStatus
from ..api import PIDConceptOrdered
from ..models import PIDRelation
from ..utils import resolve_relation_type_config
class PIDVersioning(PIDConceptOrdered):
"""API for PID versioning relations.
- Adds automatic redirection handling for Parent-LastChild
- Sets stricter method signatures, e.g.: 'index' is mandatory parameter
when calling 'insert'.
"""
def __init__(self, child=None, parent=None, draft_deposit=None,
draft_record=None, relation=None):
"""Create a PID versioning API."""
self.relation_type = resolve_relation_type_config('version').id
if relation is not None:
if relation.relation_type != self.relation_type:
raise ValueError("Provided PID relation ({0}) is not a "
"version relation.".format(relation))
super(PIDVersioning, self).__init__(relation=relation)
else:
super(PIDVersioning, self).__init__(
child=child, parent=parent, relation_type=self.relation_type,
relation=relation)
if self.child:
self.relation = PIDRelation.query.filter(
PIDRelation.child_id == self.child.id,
PIDRelation.relation_type == self.relation_type,
).one_or_none()
def insert_child(self, child, index=-1):
"""Insert child into versioning scheme.
Parameter 'index' is has to be an integer.
"""
if index is None:
raise ValueError(
"Incorrect value for child index: {0}".format(index))
with db.session.begin_nested():
super(PIDVersioning, self).insert_child(child, index=index)
self.parent.redirect(child)
def remove_child(self, child):
"""Remove a child from a versioning scheme.
Extends the base method call with always reordering after removal and
adding a redirection from the parent to the last child.
"""
with db.session.begin_nested():
super(PIDVersioning, self).remove_child(child, reorder=True)
if self.last_child is not None:
self.parent.redirect(self.last_child)
def create_parent(self, pid_value, status=PIDStatus.REGISTERED,
redirect=True):
"""Create a parent PID from a child and create a new PID versioning."""
if self.has_parents:
raise Exception("Parent already exists for this child.")
self.parent = PersistentIdentifier.create(
self.child.pid_type, pid_value,
object_type=self.child.object_type,
status=status)
self.relation = PIDRelation.create(
self.parent, self.child, self.relation_type, 0)
if redirect:
self.parent.redirect(self.child)
@property
def exists(self):
"""Check if the PID Versioning exists."""
return self.parent is not None
@property
def last_child(self):
"""
Get the latest PID as pointed by the Head PID.
If the 'pid' is a Head PID, return the latest of its children.
If the 'pid' is a Version PID, return the latest of its siblings.
Return None for the non-versioned PIDs.
"""
return self.get_children(ordered=False,
pid_status=PIDStatus.REGISTERED).filter(
PIDRelation.index.isnot(None)).order_by(
PIDRelation.index.desc()).first()
@property
def draft_child(self):
"""Get the last non-registered child."""
return self.get_children(ordered=False).filter(
PIDRelation.index.isnot(None),
PersistentIdentifier.status == PIDStatus.RESERVED).order_by(
PIDRelation.index.desc()).one_or_none()
@property
def draft_child_deposit(self):
"""
Get the deposit of the draft child.
Return `None` if no new-version deposit exists.
"""
from invenio_pidrelations.contrib.records import RecordDraft
if self.draft_child:
return RecordDraft.get_draft(self.draft_child)
else:
return None
def insert_draft_child(self, child):
"""Insert a draft child to versioning."""
if not self.draft_child:
with db.session.begin_nested():
super(PIDVersioning, self).insert_child(child, index=-1)
else:
raise Exception(
"Draft child already exists for this relation: {0}".format(
self.draft_child))
def remove_draft_child(self):
"""Remove the draft child from versioning."""
if self.draft_child:
with db.session.begin_nested():
super(PIDVersioning, self).remove_child(self.draft_child,
reorder=True)
def update_redirect(self):
"""Update the parent redirect to the current last child."""
if self.last_child:
if self.parent.status == PIDStatus.RESERVED:
self.parent.register()
self.parent.redirect(self.last_child)
@property
def children(self):
"""Children of the parent."""
return self.get_children(pid_status=PIDStatus.REGISTERED, ordered=True)
versioning_blueprint = Blueprint(
'invenio_pidrelations.versioning',
__name__,
template_folder='templates'
)
@versioning_blueprint.app_template_filter()
def to_versioning_api(pid, child=True):
"""Get PIDVersioning object."""
return PIDVersioning(
child=pid if child else None,
parent=pid if not child else None
)
__all__ = (
'PIDVersioning',
'versioning_blueprint'
)
| gpl-2.0 | 1,267,976,114,181,516,500 | 35.3125 | 79 | 0.62708 | false |
CasparLi/calibre | src/calibre/ebooks/compression/tcr.py | 24 | 5143 | # -*- coding: utf-8 -*-
__license__ = 'GPL 3'
__copyright__ = '2009, John Schember <[email protected]>'
__docformat__ = 'restructuredtext en'
import re
class TCRCompressor(object):
'''
TCR compression takes the form header+code_dict+coded_text.
The header is always "!!8-Bit!!". The code dict is a list of 256 strings.
The list takes the form 1 byte length and then a string. Each position in
The list corresponds to a code found in the file. The coded text is
string of characters values. for instance the character Q represents the
value 81 which corresponds to the string in the code list at position 81.
'''
def _reset(self):
# List of indexes in the codes list that are empty and can hold new codes
self.unused_codes = set()
self.coded_txt = ''
# Generate initial codes from text.
# The index of the list will be the code that represents the characters at that location
# in the list
self.codes = []
def _combine_codes(self):
'''
Combine two codes that always appear in pair into a single code.
The intent is to create more unused codes.
'''
possible_codes = []
a_code = set(re.findall('(?msu).', self.coded_txt))
for code in a_code:
single_code = set(re.findall('(?msu)%s.' % re.escape(code), self.coded_txt))
if len(single_code) == 1:
possible_codes.append(single_code.pop())
for code in possible_codes:
self.coded_txt = self.coded_txt.replace(code, code[0])
self.codes[ord(code[0])] = '%s%s' % (self.codes[ord(code[0])], self.codes[ord(code[1])])
def _free_unused_codes(self):
'''
Look for codes that do no not appear in the coded text and add them to
the list of free codes.
'''
for i in xrange(256):
if i not in self.unused_codes:
if chr(i) not in self.coded_txt:
self.unused_codes.add(i)
def _new_codes(self):
'''
Create new codes from codes that occur in pairs often.
'''
possible_new_codes = list(set(re.findall('(?msu)..', self.coded_txt)))
new_codes_count = []
for c in possible_new_codes:
count = self.coded_txt.count(c)
# Less than 3 occurrences will not produce any size reduction.
if count > 2:
new_codes_count.append((c, count))
# Arrange the codes in order of least to most occurring.
possible_new_codes = [x[0] for x in sorted(new_codes_count, key=lambda c: c[1])]
return possible_new_codes
def compress(self, txt):
self._reset()
self.codes = list(set(re.findall('(?msu).', txt)))
# Replace the text with their corresponding code
for c in txt:
self.coded_txt += chr(self.codes.index(c))
# Zero the unused codes and record which are unused.
for i in range(len(self.codes), 256):
self.codes.append('')
self.unused_codes.add(i)
self._combine_codes()
possible_codes = self._new_codes()
while possible_codes and self.unused_codes:
while possible_codes and self.unused_codes:
unused_code = self.unused_codes.pop()
# Take the last possible codes and split it into individual
# codes. The last possible code is the most often occurring.
code1, code2 = possible_codes.pop()
self.codes[unused_code] = '%s%s' % (self.codes[ord(code1)], self.codes[ord(code2)])
self.coded_txt = self.coded_txt.replace('%s%s' % (code1, code2), chr(unused_code))
self._combine_codes()
self._free_unused_codes()
possible_codes = self._new_codes()
self._free_unused_codes()
# Generate the code dictionary.
code_dict = []
for i in xrange(0, 256):
if i in self.unused_codes:
code_dict.append(chr(0))
else:
code_dict.append(chr(len(self.codes[i])) + self.codes[i])
# Join the identifier with the dictionary and coded text.
return '!!8-Bit!!'+''.join(code_dict)+self.coded_txt
def decompress(stream):
txt = []
stream.seek(0)
if stream.read(9) != '!!8-Bit!!':
raise ValueError('File %s contains an invalid TCR header.' % stream.name)
# Codes that the file contents are broken down into.
entries = []
for i in xrange(256):
entry_len = ord(stream.read(1))
entries.append(stream.read(entry_len))
# Map the values in the file to locations in the string list.
entry_loc = stream.read(1)
while entry_loc != '': # EOF
txt.append(entries[ord(entry_loc)])
entry_loc = stream.read(1)
return ''.join(txt)
def compress(txt):
t = TCRCompressor()
return t.compress(txt)
| gpl-3.0 | 8,195,233,368,721,441,000 | 36.268116 | 100 | 0.563873 | false |
ruslanloman/nova | nova/tests/unit/api/openstack/compute/contrib/test_hosts.py | 25 | 18124 | # Copyright (c) 2011 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import testtools
import webob.exc
from nova.api.openstack.compute.contrib import hosts as os_hosts_v2
from nova.api.openstack.compute.plugins.v3 import hosts as os_hosts_v21
from nova.compute import power_state
from nova.compute import vm_states
from nova import context as context_maker
from nova import db
from nova import exception
from nova import test
from nova.tests.unit.api.openstack import fakes
from nova.tests.unit import fake_hosts
def stub_service_get_all(context, disabled=None):
return fake_hosts.SERVICES_LIST
def stub_service_get_by_host_and_binary(context, host_name, binary):
for service in stub_service_get_all(context):
if service['host'] == host_name and service['binary'] == binary:
return service
def stub_set_host_enabled(context, host_name, enabled):
"""Simulates three possible behaviours for VM drivers or compute
drivers when enabling or disabling a host.
'enabled' means new instances can go to this host
'disabled' means they can't
"""
results = {True: "enabled", False: "disabled"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "service_not_available":
# The service is not available
raise exception.ComputeServiceUnavailable(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not enabled]
else:
# Do the right thing
return results[enabled]
def stub_set_host_maintenance(context, host_name, mode):
# We'll simulate success and failure by assuming
# that 'host_c1' always succeeds, and 'host_c2'
# always fails
results = {True: "on_maintenance", False: "off_maintenance"}
if host_name == "notimplemented":
# The vm driver for this host doesn't support this feature
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "service_not_available":
# The service is not available
raise exception.ComputeServiceUnavailable(host=host_name)
elif host_name == "host_c2":
# Simulate a failure
return results[not mode]
else:
# Do the right thing
return results[mode]
def stub_host_power_action(context, host_name, action):
if host_name == "notimplemented":
raise NotImplementedError()
elif host_name == "dummydest":
# The host does not exist
raise exception.ComputeHostNotFound(host=host_name)
elif host_name == "service_not_available":
# The service is not available
raise exception.ComputeServiceUnavailable(host=host_name)
return action
def _create_instance(**kwargs):
"""Create a test instance."""
ctxt = context_maker.get_admin_context()
return db.instance_create(ctxt, _create_instance_dict(**kwargs))
def _create_instance_dict(**kwargs):
"""Create a dictionary for a test instance."""
inst = {}
inst['image_ref'] = 'cedef40a-ed67-4d10-800e-17455edce175'
inst['reservation_id'] = 'r-fakeres'
inst['user_id'] = kwargs.get('user_id', 'admin')
inst['project_id'] = kwargs.get('project_id', 'fake')
inst['instance_type_id'] = '1'
if 'host' in kwargs:
inst['host'] = kwargs.get('host')
inst['vcpus'] = kwargs.get('vcpus', 1)
inst['memory_mb'] = kwargs.get('memory_mb', 20)
inst['root_gb'] = kwargs.get('root_gb', 30)
inst['ephemeral_gb'] = kwargs.get('ephemeral_gb', 30)
inst['vm_state'] = kwargs.get('vm_state', vm_states.ACTIVE)
inst['power_state'] = kwargs.get('power_state', power_state.RUNNING)
inst['task_state'] = kwargs.get('task_state', None)
inst['availability_zone'] = kwargs.get('availability_zone', None)
inst['ami_launch_index'] = 0
inst['launched_on'] = kwargs.get('launched_on', 'dummy')
return inst
class FakeRequestWithNovaZone(object):
environ = {"nova.context": context_maker.get_admin_context()}
GET = {"zone": "nova"}
class HostTestCaseV21(test.TestCase):
"""Test Case for hosts."""
validation_ex = exception.ValidationError
Controller = os_hosts_v21.HostController
policy_ex = exception.PolicyNotAuthorized
def _setup_stubs(self):
# Pretend we have fake_hosts.HOST_LIST in the DB
self.stubs.Set(db, 'service_get_all',
stub_service_get_all)
# Only hosts in our fake DB exist
self.stubs.Set(db, 'service_get_by_host_and_binary',
stub_service_get_by_host_and_binary)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_enabled',
stub_set_host_enabled)
# 'host_c1' always succeeds, and 'host_c2'
self.stubs.Set(self.hosts_api, 'set_host_maintenance',
stub_set_host_maintenance)
self.stubs.Set(self.hosts_api, 'host_power_action',
stub_host_power_action)
def setUp(self):
super(HostTestCaseV21, self).setUp()
self.controller = self.Controller()
self.hosts_api = self.controller.api
self.req = fakes.HTTPRequest.blank('', use_admin_context=True)
self._setup_stubs()
def _test_host_update(self, host, key, val, expected_value):
body = {key: val}
result = self.controller.update(self.req, host, body=body)
self.assertEqual(result[key], expected_value)
def test_list_hosts(self):
"""Verify that the compute hosts are returned."""
result = self.controller.index(self.req)
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST, hosts)
def test_disable_host(self):
self._test_host_update('host_c1', 'status', 'disable', 'disabled')
self._test_host_update('host_c2', 'status', 'disable', 'enabled')
def test_enable_host(self):
self._test_host_update('host_c1', 'status', 'enable', 'enabled')
self._test_host_update('host_c2', 'status', 'enable', 'disabled')
def test_enable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'enable', 'on_maintenance')
def test_disable_maintenance(self):
self._test_host_update('host_c1', 'maintenance_mode',
'disable', 'off_maintenance')
def _test_host_update_notimpl(self, key, val):
def stub_service_get_all_notimpl(self, req):
return [{'host': 'notimplemented', 'topic': None,
'availability_zone': None}]
self.stubs.Set(db, 'service_get_all',
stub_service_get_all_notimpl)
body = {key: val}
self.assertRaises(webob.exc.HTTPNotImplemented,
self.controller.update,
self.req, 'notimplemented', body=body)
def test_disable_host_notimpl(self):
self._test_host_update_notimpl('status', 'disable')
def test_enable_maintenance_notimpl(self):
self._test_host_update_notimpl('maintenance_mode', 'enable')
def test_host_startup(self):
result = self.controller.startup(self.req, "host_c1")
self.assertEqual(result["power_action"], "startup")
def test_host_shutdown(self):
result = self.controller.shutdown(self.req, "host_c1")
self.assertEqual(result["power_action"], "shutdown")
def test_host_reboot(self):
result = self.controller.reboot(self.req, "host_c1")
self.assertEqual(result["power_action"], "reboot")
def _test_host_power_action_notimpl(self, method):
self.assertRaises(webob.exc.HTTPNotImplemented,
method, self.req, "notimplemented")
def test_host_startup_notimpl(self):
self._test_host_power_action_notimpl(self.controller.startup)
def test_host_shutdown_notimpl(self):
self._test_host_power_action_notimpl(self.controller.shutdown)
def test_host_reboot_notimpl(self):
self._test_host_power_action_notimpl(self.controller.reboot)
def test_host_status_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest, body={'status': 'enable'})
def test_host_maintenance_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.update(self.req, dest,
body={'maintenance_mode': 'enable'})
def test_host_power_action_bad_host(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.reboot(self.req, dest)
def test_host_status_bad_status(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'service_not_available'
with testtools.ExpectedException(webob.exc.HTTPBadRequest,
".*%s.*" % dest):
self.controller.update(self.req, dest, body={'status': 'enable'})
def test_host_maintenance_bad_status(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'service_not_available'
with testtools.ExpectedException(webob.exc.HTTPBadRequest,
".*%s.*" % dest):
self.controller.update(self.req, dest,
body={'maintenance_mode': 'enable'})
def test_host_power_action_bad_status(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'service_not_available'
with testtools.ExpectedException(webob.exc.HTTPBadRequest,
".*%s.*" % dest):
self.controller.reboot(self.req, dest)
def test_bad_status_value(self):
bad_body = {"status": "bad"}
self.assertRaises(self.validation_ex, self.controller.update,
self.req, "host_c1", body=bad_body)
bad_body2 = {"status": "disablabc"}
self.assertRaises(self.validation_ex, self.controller.update,
self.req, "host_c1", body=bad_body2)
def test_bad_update_key(self):
bad_body = {"crazy": "bad"}
self.assertRaises(self.validation_ex, self.controller.update,
self.req, "host_c1", body=bad_body)
def test_bad_update_key_and_correct_update_key(self):
bad_body = {"status": "disable", "crazy": "bad"}
self.assertRaises(self.validation_ex, self.controller.update,
self.req, "host_c1", body=bad_body)
def test_good_update_keys(self):
body = {"status": "disable", "maintenance_mode": "enable"}
result = self.controller.update(self.req, 'host_c1', body=body)
self.assertEqual(result["host"], "host_c1")
self.assertEqual(result["status"], "disabled")
self.assertEqual(result["maintenance_mode"], "on_maintenance")
def test_show_host_not_exist(self):
# A host given as an argument does not exist.
self.req.environ["nova.context"].is_admin = True
dest = 'dummydest'
with testtools.ExpectedException(webob.exc.HTTPNotFound,
".*%s.*" % dest):
self.controller.show(self.req, dest)
def _create_compute_service(self):
"""Create compute-manager(ComputeNode and Service record)."""
ctxt = self.req.environ["nova.context"]
dic = {'host': 'dummy', 'binary': 'nova-compute', 'topic': 'compute',
'report_count': 0}
s_ref = db.service_create(ctxt, dic)
dic = {'service_id': s_ref['id'],
'host': s_ref['host'],
'vcpus': 16, 'memory_mb': 32, 'local_gb': 100,
'vcpus_used': 16, 'memory_mb_used': 32, 'local_gb_used': 10,
'hypervisor_type': 'qemu', 'hypervisor_version': 12003,
'cpu_info': '', 'stats': ''}
db.compute_node_create(ctxt, dic)
return db.service_get(ctxt, s_ref['id'])
def test_show_no_project(self):
"""No instances are running on the given host."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 3)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertEqual(set(column), set(resource['resource'].keys()))
db.service_destroy(ctxt, s_ref['id'])
def test_show_works_correctly(self):
"""show() works correctly as expected."""
ctxt = context_maker.get_admin_context()
s_ref = self._create_compute_service()
i_ref1 = _create_instance(project_id='p-01', host=s_ref['host'])
i_ref2 = _create_instance(project_id='p-02', vcpus=3,
host=s_ref['host'])
result = self.controller.show(self.req, s_ref['host'])
proj = ['(total)', '(used_now)', '(used_max)', 'p-01', 'p-02']
column = ['host', 'project', 'cpu', 'memory_mb', 'disk_gb']
self.assertEqual(len(result['host']), 5)
for resource in result['host']:
self.assertIn(resource['resource']['project'], proj)
self.assertEqual(len(resource['resource']), 5)
self.assertEqual(set(column), set(resource['resource'].keys()))
db.service_destroy(ctxt, s_ref['id'])
db.instance_destroy(ctxt, i_ref1['uuid'])
db.instance_destroy(ctxt, i_ref2['uuid'])
def test_list_hosts_with_zone(self):
result = self.controller.index(FakeRequestWithNovaZone())
self.assertIn('hosts', result)
hosts = result['hosts']
self.assertEqual(fake_hosts.HOST_LIST_NOVA_ZONE, hosts)
class HostTestCaseV20(HostTestCaseV21):
validation_ex = webob.exc.HTTPBadRequest
policy_ex = webob.exc.HTTPForbidden
Controller = os_hosts_v2.HostController
def test_list_hosts_with_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.index, fakes.HTTPRequest.blank(''))
def test_host_maintenance_with_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.update, fakes.HTTPRequest.blank(''),
'host_c1', {'maintenance_mode': 'enable'})
def test_startup_with_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.startup, fakes.HTTPRequest.blank(''),
'host_c1')
def test_reboot_with_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.reboot, fakes.HTTPRequest.blank(''),
'host_c1')
def test_shutdown_with_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.shutdown,
fakes.HTTPRequest.blank(''),
'host_c1')
def test_show_non_admin(self):
self.assertRaises(exception.AdminRequired,
self.controller.show,
fakes.HTTPRequest.blank(''),
1)
class HostsPolicyEnforcementV21(test.NoDBTestCase):
def setUp(self):
super(HostsPolicyEnforcementV21, self).setUp()
self.controller = os_hosts_v21.HostController()
self.req = fakes.HTTPRequest.blank('')
def test_index_policy_failed(self):
rule_name = "os_compute_api:os-hosts"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.index, self.req)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
def test_show_policy_failed(self):
rule_name = "os_compute_api:os-hosts"
self.policy.set_rules({rule_name: "project_id:non_fake"})
exc = self.assertRaises(
exception.PolicyNotAuthorized,
self.controller.show, self.req, 1)
self.assertEqual(
"Policy doesn't allow %s to be performed." % rule_name,
exc.format_message())
| apache-2.0 | -6,765,995,968,625,189,000 | 40.190909 | 79 | 0.609247 | false |
eusi/MissionPlanerHM | Lib/site-packages/numpy/oldnumeric/typeconv.py | 101 | 1622 | __all__ = ['oldtype2dtype', 'convtypecode', 'convtypecode2', 'oldtypecodes']
import numpy as np
oldtype2dtype = {'1': np.dtype(np.byte),
's': np.dtype(np.short),
# 'i': np.dtype(np.intc),
# 'l': np.dtype(int),
# 'b': np.dtype(np.ubyte),
'w': np.dtype(np.ushort),
'u': np.dtype(np.uintc),
# 'f': np.dtype(np.single),
# 'd': np.dtype(float),
# 'F': np.dtype(np.csingle),
# 'D': np.dtype(complex),
# 'O': np.dtype(object),
# 'c': np.dtype('c'),
None: np.dtype(int)
}
# converts typecode=None to int
def convtypecode(typecode, dtype=None):
if dtype is None:
try:
return oldtype2dtype[typecode]
except:
return np.dtype(typecode)
else:
return dtype
#if both typecode and dtype are None
# return None
def convtypecode2(typecode, dtype=None):
if dtype is None:
if typecode is None:
return None
else:
try:
return oldtype2dtype[typecode]
except:
return np.dtype(typecode)
else:
return dtype
_changedtypes = {'B': 'b',
'b': '1',
'h': 's',
'H': 'w',
'I': 'u'}
class _oldtypecodes(dict):
def __getitem__(self, obj):
char = np.dtype(obj).char
try:
return _changedtypes[char]
except KeyError:
return char
oldtypecodes = _oldtypecodes()
| gpl-3.0 | 5,873,648,182,143,087,000 | 26.033333 | 76 | 0.467941 | false |
jaredkipe/mongo-connector | mongo_connector/doc_managers/formatters.py | 1 | 5486 | import base64
import datetime
import re
from uuid import UUID
from math import isnan, isinf
import logging
LOG = logging.getLogger(__name__)
import bson
import bson.json_util
from mongo_connector.compat import PY3
if PY3:
long = int
unicode = str
RE_TYPE = type(re.compile(""))
try:
from bson.regex import Regex
RE_TYPES = (RE_TYPE, Regex)
except ImportError:
RE_TYPES = (RE_TYPE,)
class DocumentFormatter(object):
"""Interface for classes that can transform documents to conform to
external drivers' expectations.
"""
def transform_value(self, value):
"""Transform a leaf-node in a document.
This method may be overridden to provide custom handling for specific
types of values.
"""
raise NotImplementedError
def transform_element(self, key, value):
"""Transform a single key-value pair within a document.
This method may be overridden to provide custom handling for specific
types of values. This method should return an iterator over the
resulting key-value pairs.
"""
raise NotImplementedError
def format_document(self, document):
"""Format a document in preparation to be sent to an external driver."""
raise NotImplementedError
class DefaultDocumentFormatter(DocumentFormatter):
"""Basic DocumentFormatter that preserves numbers, base64-encodes binary,
and stringifies everything else.
"""
def transform_value(self, value):
# This is largely taken from bson.json_util.default, though not the same
# so we don't modify the structure of the document
if isinstance(value, dict):
return self.format_document(value)
elif isinstance(value, list):
return [self.transform_value(v) for v in value]
if isinstance(value, RE_TYPES):
flags = ""
if value.flags & re.IGNORECASE:
flags += "i"
if value.flags & re.LOCALE:
flags += "l"
if value.flags & re.MULTILINE:
flags += "m"
if value.flags & re.DOTALL:
flags += "s"
if value.flags & re.UNICODE:
flags += "u"
if value.flags & re.VERBOSE:
flags += "x"
pattern = value.pattern
# quasi-JavaScript notation (may include non-standard flags)
return '/%s/%s' % (pattern, flags)
elif (isinstance(value, bson.Binary) or
(PY3 and isinstance(value, bytes))):
# Just include body of binary data without subtype
return base64.b64encode(value).decode()
elif isinstance(value, UUID):
return value.hex
elif isinstance(value, (int, long, float)):
if isnan(value):
raise ValueError("nan")
elif isinf(value):
raise ValueError("inf")
return value
elif isinstance(value, datetime.datetime):
return value
elif value is None:
return value
# Default
return unicode(value)
def transform_element(self, key, value):
try:
new_value = self.transform_value(value)
yield key, new_value
except ValueError as e:
LOG.warn("Invalid value for key: {} as {}".format(key, e))
def format_document(self, document):
def _kernel(doc):
for key in doc:
value = doc[key]
for new_k, new_v in self.transform_element(key, value):
yield new_k, new_v
return dict(_kernel(document))
class DocumentFlattener(DefaultDocumentFormatter):
"""Formatter that completely flattens documents and unwinds arrays:
An example:
{"a": 2,
"b": {
"c": {
"d": 5
}
},
"e": [6, 7, 8]
}
becomes:
{"a": 2, "b.c.d": 5, "e.0": 6, "e.1": 7, "e.2": 8}
"""
def transform_element(self, key, value):
if isinstance(value, list):
for li, lv in enumerate(value):
for inner_k, inner_v in self.transform_element(
"%s.%s" % (key, li), lv):
yield inner_k, inner_v
elif isinstance(value, dict):
formatted = self.format_document(value)
for doc_key in formatted:
yield "%s.%s" % (key, doc_key), formatted[doc_key]
else:
# We assume that transform_value will return a 'flat' value,
# not a list or dict
yield key, self.transform_value(value)
def format_document(self, document):
def flatten(doc, path):
top_level = (len(path) == 0)
if not top_level:
path_string = ".".join(path)
for k in doc:
v = doc[k]
if isinstance(v, dict):
path.append(k)
for inner_k, inner_v in flatten(v, path):
yield inner_k, inner_v
path.pop()
else:
transformed = self.transform_element(k, v)
for new_k, new_v in transformed:
if top_level:
yield new_k, new_v
else:
yield "%s.%s" % (path_string, new_k), new_v
return dict(flatten(document, []))
| apache-2.0 | 3,470,819,923,407,805,000 | 30.895349 | 80 | 0.545024 | false |
jtyuan/racetrack | src/arch/x86/isa/insts/general_purpose/string/compare_strings.py | 91 | 3952 | # Copyright (c) 2007-2008 The Hewlett-Packard Development Company
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Gabe Black
microcode = '''
def macroop CMPS_M_M {
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
ld t1, seg, [1, t0, rsi]
ld t2, es, [1, t0, rdi]
sub t0, t1, t2, flags=(OF, SF, ZF, AF, PF, CF)
add rdi, rdi, t3, dataSize=asz
add rsi, rsi, t3, dataSize=asz
};
#
# Versions which have the rep prefix. These could benefit from some loop
# unrolling.
#
def macroop CMPS_E_M_M {
and t0, rcx, rcx, flags=(EZF,), dataSize=asz
br label("end"), flags=(CEZF,)
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
topOfLoop:
ld t1, seg, [1, t0, rsi]
ld t2, es, [1, t0, rdi]
sub t0, t1, t2, flags=(OF, SF, ZF, AF, PF, CF)
subi rcx, rcx, 1, flags=(EZF,), dataSize=asz
add rdi, rdi, t3, dataSize=asz
add rsi, rsi, t3, dataSize=asz
br label("topOfLoop"), flags=(CSTRZnEZF,)
end:
fault "NoFault"
};
def macroop CMPS_N_M_M {
and t0, rcx, rcx, flags=(EZF,), dataSize=asz
br label("end"), flags=(CEZF,)
# Find the constant we need to either add or subtract from rdi
ruflag t0, 10
movi t3, t3, dsz, flags=(CEZF,), dataSize=asz
subi t4, t0, dsz, dataSize=asz
mov t3, t3, t4, flags=(nCEZF,), dataSize=asz
topOfLoop:
ld t1, seg, [1, t0, rsi]
ld t2, es, [1, t0, rdi]
sub t0, t1, t2, flags=(OF, SF, ZF, AF, PF, CF)
subi rcx, rcx, 1, flags=(EZF,), dataSize=asz
add rdi, rdi, t3, dataSize=asz
add rsi, rsi, t3, dataSize=asz
br label("topOfLoop"), flags=(CSTRnZnEZF,)
end:
fault "NoFault"
};
'''
| bsd-3-clause | -7,625,089,208,390,721,000 | 37 | 72 | 0.708755 | false |
jamesgk/robofab | Scripts/RoboFabIntro/intro_GeneratingFonts.py | 8 | 1526 | #FLM: RoboFab Intro, Generating Fonts
#
#
# demo generating fonts with robofab
#
#
# Generating fonts with RoboFab is super easy! Let's have a look.
# (you will need to have a font open in FontLab)
from robofab.world import CurrentFont
import os
# A little function for making folders. we'll need it later.
def makeFolder(path):
#if the path doesn't exist, make it!
if not os.path.exists(path):
os.makedirs(path)
# We need to have a font open for this demo to work
font = CurrentFont()
# This will tell us what folder the font is in
fontPath = os.path.dirname(font.path)
# We'll put the fonts into a folder called "FabFonts" next the .vfb file
macPath = os.path.join(fontPath, 'FabFonts', 'ForMac')
pcPath = os.path.join(fontPath, 'FabFonts', 'ForPC')
bothPath = os.path.join(fontPath, 'FabFonts', 'ForBoth')
# Now, we'll use that little function we made earlier to make the folders
makeFolder(macPath)
makeFolder(pcPath)
makeFolder(bothPath)
# A dict of all the font types we want to output
fontTypes = { 'mac' : ['mactype1', 'macttf', 'macttdfont'],
'pc' : ['pctype1', 'pcmm'],
'both' : ['otfcff', 'otfttf']
}
# Finally, let's generate the fonts!
for macType in fontTypes['mac']:
print "generating %s..."%macType
font.generate(macType, macPath)
for pcType in fontTypes['pc']:
print "generating %s..."%pcType
font.generate(pcType, pcPath)
for bothType in fontTypes['both']:
print "generating %s..."%bothType
font.generate(bothType, bothPath)
print 'Done!'
# Wow! Could it be any easier than that?
| bsd-3-clause | -981,752,005,894,061,400 | 26.745455 | 73 | 0.714941 | false |
matthiasdiener/spack | var/spack/repos/builtin/packages/dealii-parameter-gui/package.py | 1 | 1726 | ##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class DealiiParameterGui(CMakePackage):
"""A qt based graphical user interface for editing deal.II .prm parameter
files."""
homepage = "https://github.com/dealii/parameter_gui"
url = "https://github.com/dealii/parameter_gui"
version('develop', git='https://github.com/dealii/parameter_gui.git', branch='master')
depends_on('qt')
def setup_environment(self, spack_env, run_env):
run_env.set('PARAMETER_GUI_DIR', self.prefix)
| lgpl-2.1 | 8,911,789,122,302,930,000 | 42.15 | 90 | 0.674392 | false |
symbolicdata/code | src/sdeval/classes/templates/comp/SOL_R_poly_sys/Z3/template_sol.py | 1 | 2437 | """
This is the template for extracting the solution for the computation problem of computing
real solution of a polynomial system of equations RR from the output of the computer
algebra system Z3.
.. moduleauthor:: Albert Heinle <[email protected]>
"""
import xml.dom.minidom as dom
import re
#--------------------------------------------------
#---------------The template-----------------------
#--------------------------------------------------
def extractSolution(outpString):
"""
This function extracts the real solutions of a polynomial system
computed by Z3, using the executable code that was
generated by the template in the same folder on a certain
instance.
It returns a string representation of the solution in XML-format.
The XML-string will be given as follows::
<SOL_R_poly_sys_SOL>
<satisfiable>0 or 1, depending on true or not</satisfiable>
</SOL_R_poly_sys_SOL>
If there is no solution given, or something is wrong with the given string,
a ValueError is raised.
:param outpString: The String that was returned by the Z3-execution
:type outpString: str
:returns: XML-Representation of the solution.
:rtype: str
:raises: ValueError
"""
if (type(outpString) != str):
raise ValueError("Wrong Type of argument. String type expected.")
solBeginStr = "=====Solution Begin====="
solEndStr = "=====Solution End====="
solBeginPos = outpString.index(solBeginStr) + len(solBeginStr)
solEndStrPos = outpString.index(solEndStr)
solStr = outpString[solBeginPos:solEndStrPos].strip()
if (solStr == "" and (solStr!="sat" or solStr!="unsat")):
raise ValueError("Output is empty.")
#From here on, we can assume that we are dealing with a valid
#string.
#Construction of the XML-Document
result = dom.Document()
result.appendChild(result.createElement("SOL_R_poly_sys_SOL"))
tempNode = result.firstChild
#Adding the basis
tempNodeSolutions = tempNode.appendChild(result.createElement("satisfiable"))
if solStr == "sat":
tempNodeSolutions.appendChild(result.createTextNode("1"))
else:
tempNodeSolutions.appendChild(result.createTextNode("0"))
return result.toprettyxml(" ")
#--------------------------------------------------
#----------------Help Functions--------------------
#--------------------------------------------------
| gpl-3.0 | 3,482,664,729,385,816,000 | 37.078125 | 89 | 0.619614 | false |
Pointedstick/ReplicatorG | skein_engines/skeinforge-31/fabmetheus_utilities/miscellaneous/nophead/layers.py | 23 | 2549 | from vector3 import Vector3
import Image, ImageDraw
def bounding_cube(layers):
min_x = 999999
min_y = 999999
min_z = 999999
max_x = -999999
max_y = -999999
max_z = -999999
for layer in layers:
for thread in layer:
for point in thread:
if point.x > max_x:
max_x = point.x
if point.y > max_y:
max_y = point.y
if point.z > max_z:
max_z = point.z
if point.x < min_x:
min_x = point.x
if point.y < min_y:
min_y = point.y
if point.z < min_z:
min_z = point.z
return Vector3(min_x, min_y, min_z), Vector3(max_x, max_y, max_z)
def make_images(layers):
palette = []
for i in xrange(256):
#resistor colour codes
if i == 1:
palette.extend((134, 100, 57)) # brown
elif i == 2:
palette.extend((255, 0, 0)) # red
elif i == 3:
palette.extend((218, 90, 35)) # orange
elif i == 4:
palette.extend((255, 255, 0)) # yellow
elif i == 5:
palette.extend(( 0, 255, 0)) # green
elif i == 6:
palette.extend(( 0, 0, 255)) # blue
elif i == 7:
palette.extend((255, 0, 255)) # purple
else:
palette.extend((i, i, i)) # shades of grey
cube = bounding_cube(layers)
scale = 10
x0 = int(cube[0].x) - 1
y0 = int(cube[0].y) - 1
width = int(round(cube[1].x - x0) + 1) * scale
height = int(round(cube[1].y - y0) + 1) * scale
last_pos = None
images = []
for layer in layers:
image = Image.new('P', (width, height), 255)
image.putpalette(palette)
draw = ImageDraw.Draw(image)
segment = 0
for thread in layer:
if last_pos != None:
draw.line(((( last_pos.x - x0) * scale, height - ( last_pos.y - y0) * scale),
((thread[0].x - x0) * scale, height - (thread[0].y - y0) * scale)), fill = 128)
last_pos = thread[0].copy()
for point in thread[1:]:
draw.line((((last_pos.x - x0) * scale, height - (last_pos.y - y0) * scale),
( (point.x - x0) * scale, height - (point.y - y0) * scale)), fill = segment % 8)
last_pos = point.copy()
segment = segment + 1
images.append(image)
return images
| gpl-2.0 | -5,658,058,831,387,576,000 | 34.402778 | 112 | 0.459396 | false |
wenhuizhang/neutron | neutron/tests/api/test_metering_extensions.py | 47 | 6880 | # Copyright (C) 2014 eNovance SAS <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
from tempest_lib.common.utils import data_utils
from neutron.tests.api import base
from neutron.tests.tempest import test
LOG = logging.getLogger(__name__)
class MeteringTestJSON(base.BaseAdminNetworkTest):
"""
Tests the following operations in the Neutron API using the REST client for
Neutron:
List, Show, Create, Delete Metering labels
List, Show, Create, Delete Metering labels rules
"""
@classmethod
def resource_setup(cls):
super(MeteringTestJSON, cls).resource_setup()
if not test.is_extension_enabled('metering', 'network'):
msg = "metering extension not enabled."
raise cls.skipException(msg)
description = "metering label created by tempest"
name = data_utils.rand_name("metering-label")
cls.metering_label = cls.create_metering_label(name, description)
remote_ip_prefix = ("10.0.0.0/24" if cls._ip_version == 4
else "fd02::/64")
direction = "ingress"
cls.metering_label_rule = cls.create_metering_label_rule(
remote_ip_prefix, direction,
metering_label_id=cls.metering_label['id'])
def _delete_metering_label(self, metering_label_id):
# Deletes a label and verifies if it is deleted or not
self.admin_client.delete_metering_label(metering_label_id)
# Asserting that the label is not found in list after deletion
labels = self.admin_client.list_metering_labels(id=metering_label_id)
self.assertEqual(len(labels['metering_labels']), 0)
def _delete_metering_label_rule(self, metering_label_rule_id):
# Deletes a rule and verifies if it is deleted or not
self.admin_client.delete_metering_label_rule(
metering_label_rule_id)
# Asserting that the rule is not found in list after deletion
rules = (self.admin_client.list_metering_label_rules(
id=metering_label_rule_id))
self.assertEqual(len(rules['metering_label_rules']), 0)
@test.attr(type='smoke')
@test.idempotent_id('e2fb2f8c-45bf-429a-9f17-171c70444612')
def test_list_metering_labels(self):
# Verify label filtering
body = self.admin_client.list_metering_labels(id=33)
metering_labels = body['metering_labels']
self.assertEqual(0, len(metering_labels))
@test.attr(type='smoke')
@test.idempotent_id('ec8e15ff-95d0-433b-b8a6-b466bddb1e50')
def test_create_delete_metering_label_with_filters(self):
# Creates a label
name = data_utils.rand_name('metering-label-')
description = "label created by tempest"
body = self.admin_client.create_metering_label(name=name,
description=description)
metering_label = body['metering_label']
self.addCleanup(self._delete_metering_label,
metering_label['id'])
# Assert whether created labels are found in labels list or fail
# if created labels are not found in labels list
labels = (self.admin_client.list_metering_labels(
id=metering_label['id']))
self.assertEqual(len(labels['metering_labels']), 1)
@test.attr(type='smoke')
@test.idempotent_id('30abb445-0eea-472e-bd02-8649f54a5968')
def test_show_metering_label(self):
# Verifies the details of a label
body = self.admin_client.show_metering_label(self.metering_label['id'])
metering_label = body['metering_label']
self.assertEqual(self.metering_label['id'], metering_label['id'])
self.assertEqual(self.metering_label['tenant_id'],
metering_label['tenant_id'])
self.assertEqual(self.metering_label['name'], metering_label['name'])
self.assertEqual(self.metering_label['description'],
metering_label['description'])
@test.attr(type='smoke')
@test.idempotent_id('cc832399-6681-493b-9d79-0202831a1281')
def test_list_metering_label_rules(self):
# Verify rule filtering
body = self.admin_client.list_metering_label_rules(id=33)
metering_label_rules = body['metering_label_rules']
self.assertEqual(0, len(metering_label_rules))
@test.attr(type='smoke')
@test.idempotent_id('f4d547cd-3aee-408f-bf36-454f8825e045')
def test_create_delete_metering_label_rule_with_filters(self):
# Creates a rule
remote_ip_prefix = ("10.0.1.0/24" if self._ip_version == 4
else "fd03::/64")
body = (self.admin_client.create_metering_label_rule(
remote_ip_prefix=remote_ip_prefix,
direction="ingress",
metering_label_id=self.metering_label['id']))
metering_label_rule = body['metering_label_rule']
self.addCleanup(self._delete_metering_label_rule,
metering_label_rule['id'])
# Assert whether created rules are found in rules list or fail
# if created rules are not found in rules list
rules = (self.admin_client.list_metering_label_rules(
id=metering_label_rule['id']))
self.assertEqual(len(rules['metering_label_rules']), 1)
@test.attr(type='smoke')
@test.idempotent_id('b7354489-96ea-41f3-9452-bace120fb4a7')
def test_show_metering_label_rule(self):
# Verifies the details of a rule
body = (self.admin_client.show_metering_label_rule(
self.metering_label_rule['id']))
metering_label_rule = body['metering_label_rule']
self.assertEqual(self.metering_label_rule['id'],
metering_label_rule['id'])
self.assertEqual(self.metering_label_rule['remote_ip_prefix'],
metering_label_rule['remote_ip_prefix'])
self.assertEqual(self.metering_label_rule['direction'],
metering_label_rule['direction'])
self.assertEqual(self.metering_label_rule['metering_label_id'],
metering_label_rule['metering_label_id'])
self.assertFalse(metering_label_rule['excluded'])
class MeteringIpV6TestJSON(MeteringTestJSON):
_ip_version = 6
| apache-2.0 | 4,943,056,178,554,805,000 | 44.562914 | 79 | 0.64593 | false |
nickanderson/ansible | lib/ansible/color.py | 134 | 2388 | # (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
import sys
import constants
ANSIBLE_COLOR=True
if constants.ANSIBLE_NOCOLOR:
ANSIBLE_COLOR=False
elif not hasattr(sys.stdout, 'isatty') or not sys.stdout.isatty():
ANSIBLE_COLOR=False
else:
try:
import curses
curses.setupterm()
if curses.tigetnum('colors') < 0:
ANSIBLE_COLOR=False
except ImportError:
# curses library was not found
pass
except curses.error:
# curses returns an error (e.g. could not find terminal)
ANSIBLE_COLOR=False
if constants.ANSIBLE_FORCE_COLOR:
ANSIBLE_COLOR=True
# --- begin "pretty"
#
# pretty - A miniature library that provides a Python print and stdout
# wrapper that makes colored terminal text easier to use (e.g. without
# having to mess around with ANSI escape sequences). This code is public
# domain - there is no license except that you must leave this header.
#
# Copyright (C) 2008 Brian Nez <thedude at bri1 dot com>
#
# http://nezzen.net/2008/06/23/colored-text-in-python-using-ansi-escape-sequences/
codeCodes = {
'black': '0;30', 'bright gray': '0;37',
'blue': '0;34', 'white': '1;37',
'green': '0;32', 'bright blue': '1;34',
'cyan': '0;36', 'bright green': '1;32',
'red': '0;31', 'bright cyan': '1;36',
'purple': '0;35', 'bright red': '1;31',
'yellow': '0;33', 'bright purple': '1;35',
'dark gray': '1;30', 'bright yellow': '1;33',
'normal': '0'
}
def stringc(text, color):
"""String in color."""
if ANSIBLE_COLOR:
return "\033["+codeCodes[color]+"m"+text+"\033[0m"
else:
return text
# --- end "pretty"
| gpl-3.0 | 5,381,599,817,891,818,000 | 31.27027 | 82 | 0.649497 | false |
mconstantin/watchdog | tools/bootstrap.py | 8 | 10448 | #
#
# Copyright (c) 2006 Zope Foundation and Contributors.
# All Rights Reserved.
#
# This software is subject to the provisions of the Zope Public License,
# Version 2.1 (ZPL). A copy of the ZPL should accompany this distribution.
# THIS SOFTWARE IS PROVIDED "AS IS" AND ANY AND ALL EXPRESS OR IMPLIED
# WARRANTIES ARE DISCLAIMED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED
# WARRANTIES OF TITLE, MERCHANTABILITY, AGAINST INFRINGEMENT, AND FITNESS
# FOR A PARTICULAR PURPOSE.
#
#
"""Bootstrap a buildout-based project
Simply run this script in a directory containing a buildout.cfg.
The script accepts buildout command-line options, so you can
use the -c option to specify an alternate configuration file.
"""
import os
import sys
import shutil
import tempfile
try:
import urllib.request as urllib2
except ImportError:
import urllib2
import subprocess
from optparse import OptionParser
if sys.platform == 'win32':
def quote(c):
if ' ' in c:
return '"%s"' % c # work around spawn lamosity on windows
else:
return c
else:
quote = str
# See zc.buildout.easy_install._has_broken_dash_S for motivation and comments.
stdout, stderr = subprocess.Popen(
[sys.executable, '-S', '-c',
'try:\n'
' import pickle\n'
'except ImportError:\n'
' print(1)\n'
'else:\n'
' print(0)\n'],
stdout=subprocess.PIPE, stderr=subprocess.PIPE).communicate()
has_broken_dash_S = bool(int(stdout.strip()))
# In order to be more robust in the face of system Pythons, we want to
# run without site-packages loaded. This is somewhat tricky, in
# particular because Python 2.6's distutils imports site, so starting
# with the -S flag is not sufficient. However, we'll start with that:
if not has_broken_dash_S and 'site' in sys.modules:
# We will restart with python -S.
args = sys.argv[:]
args[0:0] = [sys.executable, '-S']
args = list(map(quote, args))
os.execv(sys.executable, args)
# Now we are running with -S. We'll get the clean sys.path, import site
# because distutils will do it later, and then reset the path and clean
# out any namespace packages from site-packages that might have been
# loaded by .pth files.
clean_path = sys.path[:]
import site
sys.path[:] = clean_path
for k, v in list(sys.modules.items()):
if k in ('setuptools', 'pkg_resources') or (
hasattr(v, '__path__') and
len(v.__path__) == 1 and
not os.path.exists(os.path.join(v.__path__[0], '__init__.py'))):
# This is a namespace package. Remove it.
sys.modules.pop(k)
is_jython = sys.platform.startswith('java')
setuptools_source = 'http://peak.telecommunity.com/dist/ez_setup.py'
distribute_source = 'http://python-distribute.org/distribute_setup.py'
# parsing arguments
def normalize_to_url(option, opt_str, value, parser):
if value:
if '://' not in value: # It doesn't smell like a URL.
value = 'file://%s' % (
urllib2.pathname2url(
os.path.abspath(os.path.expanduser(value))),)
if opt_str == '--download-base' and not value.endswith('/'):
# Download base needs a trailing slash to make the world happy.
value += '/'
else:
value = None
name = opt_str[2:].replace('-', '_')
setattr(parser.values, name, value)
usage = '''\
[DESIRED PYTHON FOR BUILDOUT] bootstrap.py [options]
Bootstraps a buildout-based project.
Simply run this script in a directory containing a buildout.cfg, using the
Python that you want bin/buildout to use.
Note that by using --setup-source and --download-base to point to
local resources, you can keep this script from going over the network.
'''
parser = OptionParser(usage=usage)
parser.add_option("-v", "--version", dest="version",
help="use a specific zc.buildout version")
parser.add_option("--setup-version", dest="setup_version",
help="The version of setuptools or distribute to use.")
parser.add_option("-d", "--distribute",
action="store_true", dest="use_distribute",
default=sys.version_info[0] >= 3,
help="Use Distribute rather than Setuptools.")
parser.add_option("--setup-source", action="callback", dest="setup_source",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or file location for the setup file. "
"If you use Setuptools, this will default to " +
setuptools_source + "; if you use Distribute, this "
"will default to " + distribute_source + "."))
parser.add_option("--download-base", action="callback", dest="download_base",
callback=normalize_to_url, nargs=1, type="string",
help=("Specify a URL or directory for downloading "
"zc.buildout and either Setuptools or Distribute. "
"Defaults to PyPI."))
parser.add_option("--eggs",
help=("Specify a directory for storing eggs. Defaults to "
"a temporary directory that is deleted when the "
"bootstrap script completes."))
parser.add_option("-t", "--accept-buildout-test-releases",
dest='accept_buildout_test_releases',
action="store_true",
default=sys.version_info[0] > 2,
help=("Normally, if you do not specify a --version, the "
"bootstrap script and buildout gets the newest "
"*final* versions of zc.buildout and its recipes and "
"extensions for you. If you use this flag, "
"bootstrap and buildout will get the newest releases "
"even if they are alphas or betas."))
parser.add_option("-c", None, action="store", dest="config_file",
help=("Specify the path to the buildout configuration "
"file to be used."))
options, args = parser.parse_args()
# if -c was provided, we push it back into args for buildout's main function
if options.config_file is not None:
args += ['-c', options.config_file]
if options.eggs:
eggs_dir = os.path.abspath(os.path.expanduser(options.eggs))
else:
eggs_dir = tempfile.mkdtemp()
if options.setup_source is None:
if options.use_distribute:
options.setup_source = distribute_source
else:
options.setup_source = setuptools_source
if options.accept_buildout_test_releases:
args.append('buildout:accept-buildout-test-releases=true')
args.append('bootstrap')
try:
import pkg_resources
import setuptools # A flag. Sometimes pkg_resources is installed alone.
if not hasattr(pkg_resources, '_distribute'):
raise ImportError
except ImportError:
ez_code = urllib2.urlopen(
options.setup_source).read().replace('\r\n'.encode(), '\n'.encode())
ez = {}
exec(ez_code, ez)
setup_args = dict(to_dir=eggs_dir, download_delay=0)
if options.download_base:
setup_args['download_base'] = options.download_base
if options.setup_version:
setup_args['version'] = options.setup_version
if options.use_distribute:
setup_args['no_fake'] = True
ez['use_setuptools'](**setup_args)
if 'pkg_resources' in sys.modules:
if sys.version_info[0] >= 3:
import imp
reload_ = imp.reload
else:
reload_ = reload
reload_(sys.modules['pkg_resources'])
import pkg_resources
# This does not (always?) update the default working set. We will
# do it.
for path in sys.path:
if path not in pkg_resources.working_set.entries:
pkg_resources.working_set.add_entry(path)
cmd = [quote(sys.executable),
'-c',
quote('from setuptools.command.easy_install import main; main()'),
'-mqNxd',
quote(eggs_dir)]
if not has_broken_dash_S:
cmd.insert(1, '-S')
find_links = options.download_base
if not find_links:
find_links = os.environ.get('bootstrap-testing-find-links')
if find_links:
cmd.extend(['-f', quote(find_links)])
if options.use_distribute:
setup_requirement = 'distribute'
else:
setup_requirement = 'setuptools'
ws = pkg_resources.working_set
setup_requirement_path = ws.find(
pkg_resources.Requirement.parse(setup_requirement)).location
env = dict(
os.environ,
PYTHONPATH=setup_requirement_path)
requirement = 'zc.buildout'
version = options.version
if version is None and not options.accept_buildout_test_releases:
# Figure out the most recent final version of zc.buildout.
import setuptools.package_index
_final_parts = '*final-', '*final'
def _final_version(parsed_version):
for part in parsed_version:
if (part[:1] == '*') and (part not in _final_parts):
return False
return True
index = setuptools.package_index.PackageIndex(
search_path=[setup_requirement_path])
if find_links:
index.add_find_links((find_links,))
req = pkg_resources.Requirement.parse(requirement)
if index.obtain(req) is not None:
best = []
bestv = None
for dist in index[req.project_name]:
distv = dist.parsed_version
if _final_version(distv):
if bestv is None or distv > bestv:
best = [dist]
bestv = distv
elif distv == bestv:
best.append(dist)
if best:
best.sort()
version = best[-1].version
if version:
requirement = '=='.join((requirement, version))
cmd.append(requirement)
if is_jython:
import subprocess
exitcode = subprocess.Popen(cmd, env=env).wait()
else: # Windows prefers this, apparently; otherwise we would prefer subprocess
exitcode = os.spawnle(*([os.P_WAIT, sys.executable] + cmd + [env]))
if exitcode != 0:
sys.stdout.flush()
sys.stderr.flush()
print("An error occurred when trying to install zc.buildout. "
"Look above this message for any errors that "
"were output by easy_install.")
sys.exit(exitcode)
ws.add_entry(eggs_dir)
ws.require(requirement)
import zc.buildout.buildout
zc.buildout.buildout.main(args)
if not options.eggs: # clean up temporary egg directory
shutil.rmtree(eggs_dir)
| apache-2.0 | 2,246,748,672,473,414,400 | 35.659649 | 79 | 0.632753 | false |
MartijnBraam/CouchPotatoServer | libs/pyasn1/codec/cer/decoder.py | 261 | 1230 | # CER decoder
from pyasn1.type import univ
from pyasn1.codec.ber import decoder
from pyasn1.compat.octets import oct2int
from pyasn1 import error
class BooleanDecoder(decoder.AbstractSimpleDecoder):
protoComponent = univ.Boolean(0)
def valueDecoder(self, fullSubstrate, substrate, asn1Spec, tagSet, length,
state, decodeFun, substrateFun):
head, tail = substrate[:length], substrate[length:]
if not head:
raise error.PyAsn1Error('Empty substrate')
byte = oct2int(head[0])
# CER/DER specifies encoding of TRUE as 0xFF and FALSE as 0x0, while
# BER allows any non-zero value as TRUE; cf. sections 8.2.2. and 11.1
# in http://www.itu.int/ITU-T/studygroups/com17/languages/X.690-0207.pdf
if byte == 0xff:
value = 1
elif byte == 0x00:
value = 0
else:
raise error.PyAsn1Error('Boolean CER violation: %s' % byte)
return self._createComponent(asn1Spec, tagSet, value), tail
tagMap = decoder.tagMap.copy()
tagMap.update({
univ.Boolean.tagSet: BooleanDecoder()
})
typeMap = decoder.typeMap
class Decoder(decoder.Decoder): pass
decode = Decoder(tagMap, decoder.typeMap)
| gpl-3.0 | -3,743,252,936,946,358,000 | 34.142857 | 80 | 0.664228 | false |
AugurProject/sidecoin | contrib/spendfrom/spendfrom.py | 2 | 10094 | #!/usr/bin/env python
#
# Use the raw transactions API to spend sidecoins received on particular addresses,
# and send any change back to that same address.
#
# Example usage:
# spendfrom.py # Lists available funds
# spendfrom.py --from=ADDRESS --to=ADDRESS --amount=11.00
#
# Assumes it will talk to a sidecoind or Sidecoin-Qt running
# on localhost.
#
# Depends on jsonrpc
#
from decimal import *
import getpass
import math
import os
import os.path
import platform
import sys
import time
from jsonrpc import ServiceProxy, json
BASE_FEE=Decimal("0.001")
def check_json_precision():
"""Make sure json library being used does not lose precision converting BTC values"""
n = Decimal("20000000.00000003")
satoshis = int(json.loads(json.dumps(float(n)))*1.0e8)
if satoshis != 2000000000000003:
raise RuntimeError("JSON encode/decode loses precision")
def determine_db_dir():
"""Return the default location of the sidecoin data directory"""
if platform.system() == "Darwin":
return os.path.expanduser("~/Library/Application Support/Sidecoin/")
elif platform.system() == "Windows":
return os.path.join(os.environ['APPDATA'], "Sidecoin")
return os.path.expanduser("~/.sidecoin")
def read_sidecoin_config(dbdir):
"""Read the sidecoin.conf file from dbdir, returns dictionary of settings"""
from ConfigParser import SafeConfigParser
class FakeSecHead(object):
def __init__(self, fp):
self.fp = fp
self.sechead = '[all]\n'
def readline(self):
if self.sechead:
try: return self.sechead
finally: self.sechead = None
else:
s = self.fp.readline()
if s.find('#') != -1:
s = s[0:s.find('#')].strip() +"\n"
return s
config_parser = SafeConfigParser()
config_parser.readfp(FakeSecHead(open(os.path.join(dbdir, "sidecoin.conf"))))
return dict(config_parser.items("all"))
def connect_JSON(config):
"""Connect to a sidecoin JSON-RPC server"""
testnet = config.get('testnet', '0')
testnet = (int(testnet) > 0) # 0/1 in config file, convert to True/False
if not 'rpcport' in config:
config['rpcport'] = 18332 if testnet else 8332
connect = "http://%s:%[email protected]:%s"%(config['rpcuser'], config['rpcpassword'], config['rpcport'])
try:
result = ServiceProxy(connect)
# ServiceProxy is lazy-connect, so send an RPC command mostly to catch connection errors,
# but also make sure the sidecoind we're talking to is/isn't testnet:
if result.getmininginfo()['testnet'] != testnet:
sys.stderr.write("RPC server at "+connect+" testnet setting mismatch\n")
sys.exit(1)
return result
except:
sys.stderr.write("Error connecting to RPC server at "+connect+"\n")
sys.exit(1)
def unlock_wallet(sidecoind):
info = sidecoind.getinfo()
if 'unlocked_until' not in info:
return True # wallet is not encrypted
t = int(info['unlocked_until'])
if t <= time.time():
try:
passphrase = getpass.getpass("Wallet is locked; enter passphrase: ")
sidecoind.walletpassphrase(passphrase, 5)
except:
sys.stderr.write("Wrong passphrase\n")
info = sidecoind.getinfo()
return int(info['unlocked_until']) > time.time()
def list_available(sidecoind):
address_summary = dict()
address_to_account = dict()
for info in sidecoind.listreceivedbyaddress(0):
address_to_account[info["address"]] = info["account"]
unspent = sidecoind.listunspent(0)
for output in unspent:
# listunspent doesn't give addresses, so:
rawtx = sidecoind.getrawtransaction(output['txid'], 1)
vout = rawtx["vout"][output['vout']]
pk = vout["scriptPubKey"]
# This code only deals with ordinary pay-to-sidecoin-address
# or pay-to-script-hash outputs right now; anything exotic is ignored.
if pk["type"] != "pubkeyhash" and pk["type"] != "scripthash":
continue
address = pk["addresses"][0]
if address in address_summary:
address_summary[address]["total"] += vout["value"]
address_summary[address]["outputs"].append(output)
else:
address_summary[address] = {
"total" : vout["value"],
"outputs" : [output],
"account" : address_to_account.get(address, "")
}
return address_summary
def select_coins(needed, inputs):
# Feel free to improve this, this is good enough for my simple needs:
outputs = []
have = Decimal("0.0")
n = 0
while have < needed and n < len(inputs):
outputs.append({ "txid":inputs[n]["txid"], "vout":inputs[n]["vout"]})
have += inputs[n]["amount"]
n += 1
return (outputs, have-needed)
def create_tx(sidecoind, fromaddresses, toaddress, amount, fee):
all_coins = list_available(sidecoind)
total_available = Decimal("0.0")
needed = amount+fee
potential_inputs = []
for addr in fromaddresses:
if addr not in all_coins:
continue
potential_inputs.extend(all_coins[addr]["outputs"])
total_available += all_coins[addr]["total"]
if total_available < needed:
sys.stderr.write("Error, only %f BTC available, need %f\n"%(total_available, needed));
sys.exit(1)
#
# Note:
# Python's json/jsonrpc modules have inconsistent support for Decimal numbers.
# Instead of wrestling with getting json.dumps() (used by jsonrpc) to encode
# Decimals, I'm casting amounts to float before sending them to sidecoind.
#
outputs = { toaddress : float(amount) }
(inputs, change_amount) = select_coins(needed, potential_inputs)
if change_amount > BASE_FEE: # don't bother with zero or tiny change
change_address = fromaddresses[-1]
if change_address in outputs:
outputs[change_address] += float(change_amount)
else:
outputs[change_address] = float(change_amount)
rawtx = sidecoind.createrawtransaction(inputs, outputs)
signed_rawtx = sidecoind.signrawtransaction(rawtx)
if not signed_rawtx["complete"]:
sys.stderr.write("signrawtransaction failed\n")
sys.exit(1)
txdata = signed_rawtx["hex"]
return txdata
def compute_amount_in(sidecoind, txinfo):
result = Decimal("0.0")
for vin in txinfo['vin']:
in_info = sidecoind.getrawtransaction(vin['txid'], 1)
vout = in_info['vout'][vin['vout']]
result = result + vout['value']
return result
def compute_amount_out(txinfo):
result = Decimal("0.0")
for vout in txinfo['vout']:
result = result + vout['value']
return result
def sanity_test_fee(sidecoind, txdata_hex, max_fee):
class FeeError(RuntimeError):
pass
try:
txinfo = sidecoind.decoderawtransaction(txdata_hex)
total_in = compute_amount_in(sidecoind, txinfo)
total_out = compute_amount_out(txinfo)
if total_in-total_out > max_fee:
raise FeeError("Rejecting transaction, unreasonable fee of "+str(total_in-total_out))
tx_size = len(txdata_hex)/2
kb = tx_size/1000 # integer division rounds down
if kb > 1 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee transaction, larger than 1000 bytes")
if total_in < 0.01 and fee < BASE_FEE:
raise FeeError("Rejecting no-fee, tiny-amount transaction")
# Exercise for the reader: compute transaction priority, and
# warn if this is a very-low-priority transaction
except FeeError as err:
sys.stderr.write((str(err)+"\n"))
sys.exit(1)
def main():
import optparse
parser = optparse.OptionParser(usage="%prog [options]")
parser.add_option("--from", dest="fromaddresses", default=None,
help="addresses to get sidecoins from")
parser.add_option("--to", dest="to", default=None,
help="address to get send sidecoins to")
parser.add_option("--amount", dest="amount", default=None,
help="amount to send")
parser.add_option("--fee", dest="fee", default="0.0",
help="fee to include")
parser.add_option("--datadir", dest="datadir", default=determine_db_dir(),
help="location of sidecoin.conf file with RPC username/password (default: %default)")
parser.add_option("--testnet", dest="testnet", default=False, action="store_true",
help="Use the test network")
parser.add_option("--dry_run", dest="dry_run", default=False, action="store_true",
help="Don't broadcast the transaction, just create and print the transaction data")
(options, args) = parser.parse_args()
check_json_precision()
config = read_sidecoin_config(options.datadir)
if options.testnet: config['testnet'] = True
sidecoind = connect_JSON(config)
if options.amount is None:
address_summary = list_available(sidecoind)
for address,info in address_summary.iteritems():
n_transactions = len(info['outputs'])
if n_transactions > 1:
print("%s %.8f %s (%d transactions)"%(address, info['total'], info['account'], n_transactions))
else:
print("%s %.8f %s"%(address, info['total'], info['account']))
else:
fee = Decimal(options.fee)
amount = Decimal(options.amount)
while unlock_wallet(sidecoind) == False:
pass # Keep asking for passphrase until they get it right
txdata = create_tx(sidecoind, options.fromaddresses.split(","), options.to, amount, fee)
sanity_test_fee(sidecoind, txdata, amount*Decimal("0.01"))
if options.dry_run:
print(txdata)
else:
txid = sidecoind.sendrawtransaction(txdata)
print(txid)
if __name__ == '__main__':
main()
| mit | -1,145,647,441,317,409,400 | 36.805243 | 111 | 0.621359 | false |
Permutatrix/servo | tests/wpt/mozilla/tests/webgl/conformance-2.0.0/py/tex_image_test_generator.py | 27 | 9132 | #!/usr/bin/env python
# Copyright (c) 2015 The Khronos Group Inc.
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and/or associated documentation files (the
# "Materials"), to deal in the Materials without restriction, including
# without limitation the rights to use, copy, modify, merge, publish,
# distribute, sublicense, and/or sell copies of the Materials, and to
# permit persons to whom the Materials are furnished to do so, subject to
# the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Materials.
#
# THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
# EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
# MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
# IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
# CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
# TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
# MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
"""
Generator for tex-2d* and tex-3d* tests.
This file needs to be run in its folder.
"""
import os
import os.path
import sys
_LICENSE = """<!--
Copyright (c) 2015 The Khronos Group Inc.
Permission is hereby granted, free of charge, to any person obtaining a
copy of this software and/or associated documentation files (the
"Materials"), to deal in the Materials without restriction, including
without limitation the rights to use, copy, modify, merge, publish,
distribute, sublicense, and/or sell copies of the Materials, and to
permit persons to whom the Materials are furnished to do so, subject to
the following conditions:
The above copyright notice and this permission notice shall be included
in all copies or substantial portions of the Materials.
THE MATERIALS ARE PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF
MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT.
IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY
CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT,
TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE
MATERIALS OR THE USE OR OTHER DEALINGS IN THE MATERIALS.
-->
"""
_DO_NOT_EDIT_WARNING = """<!--
This file is auto-generated from py/tex_image_test_generator.py
DO NOT EDIT!
-->
"""
_ELEMENT_TYPES = [
'canvas',
'canvas-sub-rectangle',
'image',
'image-data',
'svg-image',
'video',
'webgl-canvas',
'image-bitmap-from-image-data',
'image-bitmap-from-image',
'image-bitmap-from-video',
'image-bitmap-from-canvas',
'image-bitmap-from-blob',
'image-bitmap-from-image-bitmap'
]
_FORMATS_TYPES_WEBGL1 = [
{'internal_format': 'RGB', 'format': 'RGB', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB', 'format': 'RGB', 'type': 'UNSIGNED_SHORT_5_6_5' },
{'internal_format': 'RGBA', 'format': 'RGBA', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGBA', 'format': 'RGBA', 'type': 'UNSIGNED_SHORT_4_4_4_4' },
{'internal_format': 'RGBA', 'format': 'RGBA', 'type': 'UNSIGNED_SHORT_5_5_5_1' },
]
_FORMATS_TYPES_WEBGL2 = [
{'internal_format': 'R8', 'format': 'RED', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'R16F', 'format': 'RED', 'type': 'HALF_FLOAT' },
{'internal_format': 'R16F', 'format': 'RED', 'type': 'FLOAT' },
{'internal_format': 'R32F', 'format': 'RED', 'type': 'FLOAT' },
{'internal_format': 'R8UI', 'format': 'RED_INTEGER', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RG8', 'format': 'RG', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RG16F', 'format': 'RG', 'type': 'HALF_FLOAT' },
{'internal_format': 'RG16F', 'format': 'RG', 'type': 'FLOAT' },
{'internal_format': 'RG32F', 'format': 'RG', 'type': 'FLOAT' },
{'internal_format': 'RG8UI', 'format': 'RG_INTEGER', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB8', 'format': 'RGB', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'SRGB8', 'format': 'RGB', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB565', 'format': 'RGB', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB565', 'format': 'RGB', 'type': 'UNSIGNED_SHORT_5_6_5' },
{'internal_format': 'R11F_G11F_B10F', 'format': 'RGB', 'type': 'UNSIGNED_INT_10F_11F_11F_REV' },
{'internal_format': 'R11F_G11F_B10F', 'format': 'RGB', 'type': 'HALF_FLOAT' },
{'internal_format': 'R11F_G11F_B10F', 'format': 'RGB', 'type': 'FLOAT' },
{'internal_format': 'RGB9_E5', 'format': 'RGB', 'type': 'HALF_FLOAT' },
{'internal_format': 'RGB9_E5', 'format': 'RGB', 'type': 'FLOAT' },
{'internal_format': 'RGB16F', 'format': 'RGB', 'type': 'HALF_FLOAT' },
{'internal_format': 'RGB16F', 'format': 'RGB', 'type': 'FLOAT' },
{'internal_format': 'RGB32F', 'format': 'RGB', 'type': 'FLOAT' },
{'internal_format': 'RGB8UI', 'format': 'RGB_INTEGER', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGBA8', 'format': 'RGBA', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'SRGB8_ALPHA8', 'format': 'RGBA', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB5_A1', 'format': 'RGBA', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGB5_A1', 'format': 'RGBA', 'type': 'UNSIGNED_SHORT_5_5_5_1' },
{'internal_format': 'RGBA4', 'format': 'RGBA', 'type': 'UNSIGNED_BYTE' },
{'internal_format': 'RGBA4', 'format': 'RGBA', 'type': 'UNSIGNED_SHORT_4_4_4_4' },
{'internal_format': 'RGBA16F', 'format': 'RGBA', 'type': 'HALF_FLOAT' },
{'internal_format': 'RGBA16F', 'format': 'RGBA', 'type': 'FLOAT' },
{'internal_format': 'RGBA32F', 'format': 'RGBA', 'type': 'FLOAT' },
{'internal_format': 'RGBA8UI', 'format': 'RGBA_INTEGER', 'type': 'UNSIGNED_BYTE' },
]
def GenerateFilename(dimension, element_type, internal_format, format, type):
"""Generate test filename."""
filename = ("tex-" + dimension + "d-" +
internal_format + "-" + format + "-" + type + ".html")
return filename.lower()
def WriteTest(filename, dimension, element_type, internal_format, format, type, default_context_version):
"""Write one test."""
file = open(filename, "wb")
file.write(_LICENSE)
file.write(_DO_NOT_EDIT_WARNING)
code = """
<!DOCTYPE html>
<html>
<head>
<meta charset="utf-8">
<link rel="stylesheet" href="../../../resources/js-test-style.css"/>
<script src="../../../js/js-test-pre.js"></script>
<script src="../../../js/webgl-test-utils.js"></script>
<script src="../../../js/tests/tex-image-and-sub-image-utils.js"></script>"""
if element_type == 'image-bitmap-from-image-data' or element_type == 'image-bitmap-from-image' or \
element_type == 'image-bitmap-from-video' or element_type == 'image-bitmap-from-canvas' or \
element_type == 'image-bitmap-from-blob' or element_type == 'image-bitmap-from-image-bitmap':
code += """
<script src="../../../js/tests/tex-image-and-sub-image-with-image-bitmap-utils.js"></script>"""
code += """
<script src="../../../js/tests/tex-image-and-sub-image-%(dimension)sd-with-%(element_type)s.js"></script>
</head>
<body>"""
if element_type == 'image-data':
code += """
<canvas id="texcanvas" width="2" height="2"></canvas>"""
code += """
<canvas id="example" width="32" height="32"></canvas>"""
code += """
<div id="description"></div>
<div id="console"></div>
<script>
"use strict";
function testPrologue(gl) {
return true;
}
generateTest("%(internal_format)s", "%(format)s", "%(type)s", testPrologue, "../../../resources/", %(default_context_version)s)();
</script>
</body>
</html>
"""
file.write(code % {
'dimension': dimension,
'element_type': element_type,
'internal_format': internal_format,
'format': format,
'type': type,
'default_context_version': default_context_version,
})
file.close()
def GenerateTests(test_dir, test_cases, dimension, default_context_version):
test_dir_template = test_dir + '/%s'
for element_type in _ELEMENT_TYPES:
os.chdir(test_dir_template % element_type.replace('-', '_'))
if dimension == '3':
# Assume we write 2D tests first.
index_file = open("00_test_list.txt", "ab")
else:
index_file = open("00_test_list.txt", "wb")
for tex_info in test_cases:
internal_format = tex_info['internal_format']
format = tex_info['format']
type = tex_info['type']
filename = GenerateFilename(dimension, element_type, internal_format, format, type)
index_file.write(filename)
index_file.write('\n')
WriteTest(filename, dimension, element_type, internal_format, format, type, default_context_version)
index_file.close();
def main(argv):
"""This is the main function."""
py_dir = os.path.dirname(os.path.realpath(__file__))
GenerateTests(os.path.realpath(py_dir + '/../conformance/textures'), _FORMATS_TYPES_WEBGL1, '2', '1')
GenerateTests(os.path.realpath(py_dir + '/../conformance2/textures'), _FORMATS_TYPES_WEBGL2, '2', '2')
GenerateTests(os.path.realpath(py_dir + '/../conformance2/textures'), _FORMATS_TYPES_WEBGL2, '3', '2')
if __name__ == '__main__':
sys.exit(main(sys.argv[1:]))
| mpl-2.0 | -5,448,339,121,345,196,000 | 41.672897 | 130 | 0.656921 | false |
JohnOrlando/gnuradio-bitshark | gnuradio-core/src/python/gnuradio/blks2impl/dqpsk.py | 9 | 14686 | #
# Copyright 2005,2006,2007,2009 Free Software Foundation, Inc.
#
# This file is part of GNU Radio
#
# GNU Radio is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3, or (at your option)
# any later version.
#
# GNU Radio is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with GNU Radio; see the file COPYING. If not, write to
# the Free Software Foundation, Inc., 51 Franklin Street,
# Boston, MA 02110-1301, USA.
#
# See gnuradio-examples/python/digital for examples
"""
differential QPSK modulation and demodulation.
"""
from gnuradio import gr, gru, modulation_utils
from math import pi, sqrt
import psk
import cmath
from pprint import pprint
# default values (used in __init__ and add_options)
_def_samples_per_symbol = 2
_def_excess_bw = 0.35
_def_gray_code = True
_def_verbose = False
_def_log = False
_def_costas_alpha = 0.15
_def_gain_mu = None
_def_mu = 0.5
_def_omega_relative_limit = 0.005
# /////////////////////////////////////////////////////////////////////////////
# DQPSK modulator
# /////////////////////////////////////////////////////////////////////////////
class dqpsk_mod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
"""
Hierarchical block for RRC-filtered QPSK modulation.
The input is a byte stream (unsigned char) and the
output is the complex modulated signal at baseband.
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: integer
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param debug: Print modualtion data to files?
@type debug: bool
"""
gr.hier_block2.__init__(self, "dqpsk_mod",
gr.io_signature(1, 1, gr.sizeof_char), # Input signature
gr.io_signature(1, 1, gr.sizeof_gr_complex)) # Output signature
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._gray_code = gray_code
if not isinstance(samples_per_symbol, int) or samples_per_symbol < 2:
raise TypeError, ("sbp must be an integer >= 2, is %d" % samples_per_symbol)
ntaps = 11 * samples_per_symbol
arity = pow(2,self.bits_per_symbol())
# turn bytes into k-bit vectors
self.bytes2chunks = \
gr.packed_to_unpacked_bb(self.bits_per_symbol(), gr.GR_MSB_FIRST)
if self._gray_code:
self.symbol_mapper = gr.map_bb(psk.binary_to_gray[arity])
else:
self.symbol_mapper = gr.map_bb(psk.binary_to_ungray[arity])
self.diffenc = gr.diff_encoder_bb(arity)
rot = .707 + .707j
rotated_const = map(lambda pt: pt * rot, psk.constellation[arity])
self.chunks2symbols = gr.chunks_to_symbols_bc(rotated_const)
# pulse shaping filter
self.rrc_taps = gr.firdes.root_raised_cosine(
self._samples_per_symbol, # gain (sps since we're interpolating by sps)
self._samples_per_symbol, # sampling rate
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter = gr.interp_fir_filter_ccf(self._samples_per_symbol, self.rrc_taps)
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect & Initialize base class
self.connect(self, self.bytes2chunks, self.symbol_mapper, self.diffenc,
self.chunks2symbols, self.rrc_filter, self)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 2
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "\nModulator:"
print "bits per symbol: %d" % self.bits_per_symbol()
print "Gray code: %s" % self._gray_code
print "RRS roll-off factor: %f" % self._excess_bw
def _setup_logging(self):
print "Modulation logging turned on."
self.connect(self.bytes2chunks,
gr.file_sink(gr.sizeof_char, "tx_bytes2chunks.dat"))
self.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "tx_graycoder.dat"))
self.connect(self.diffenc,
gr.file_sink(gr.sizeof_char, "tx_diffenc.dat"))
self.connect(self.chunks2symbols,
gr.file_sink(gr.sizeof_gr_complex, "tx_chunks2symbols.dat"))
self.connect(self.rrc_filter,
gr.file_sink(gr.sizeof_gr_complex, "tx_rrc_filter.dat"))
def add_options(parser):
"""
Adds QPSK modulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils.extract_kwargs_from_options(dqpsk_mod.__init__,
('self',), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
# /////////////////////////////////////////////////////////////////////////////
# DQPSK demodulator
#
# Differentially coherent detection of differentially encoded qpsk
# /////////////////////////////////////////////////////////////////////////////
class dqpsk_demod(gr.hier_block2):
def __init__(self,
samples_per_symbol=_def_samples_per_symbol,
excess_bw=_def_excess_bw,
costas_alpha=_def_costas_alpha,
gain_mu=_def_gain_mu,
mu=_def_mu,
omega_relative_limit=_def_omega_relative_limit,
gray_code=_def_gray_code,
verbose=_def_verbose,
log=_def_log):
"""
Hierarchical block for RRC-filtered DQPSK demodulation
The input is the complex modulated signal at baseband.
The output is a stream of bits packed 1 bit per byte (LSB)
@param samples_per_symbol: samples per symbol >= 2
@type samples_per_symbol: float
@param excess_bw: Root-raised cosine filter excess bandwidth
@type excess_bw: float
@param costas_alpha: loop filter gain
@type costas_alphas: float
@param gain_mu: for M&M block
@type gain_mu: float
@param mu: for M&M block
@type mu: float
@param omega_relative_limit: for M&M block
@type omega_relative_limit: float
@param gray_code: Tell modulator to Gray code the bits
@type gray_code: bool
@param verbose: Print information about modulator?
@type verbose: bool
@param debug: Print modualtion data to files?
@type debug: bool
"""
gr.hier_block2.__init__(self, "dqpsk_demod",
gr.io_signature(1, 1, gr.sizeof_gr_complex), # Input signature
gr.io_signature(1, 1, gr.sizeof_char)) # Output signature
self._samples_per_symbol = samples_per_symbol
self._excess_bw = excess_bw
self._costas_alpha = costas_alpha
self._mm_gain_mu = gain_mu
self._mm_mu = mu
self._mm_omega_relative_limit = omega_relative_limit
self._gray_code = gray_code
if samples_per_symbol < 2:
raise TypeError, "sbp must be >= 2, is %d" % samples_per_symbol
arity = pow(2,self.bits_per_symbol())
# Automatic gain control
scale = (1.0/16384.0)
self.pre_scaler = gr.multiply_const_cc(scale) # scale the signal from full-range to +-1
#self.agc = gr.agc2_cc(0.6e-1, 1e-3, 1, 1, 100)
self.agc = gr.feedforward_agc_cc(16, 2.0)
# RRC data filter
ntaps = 11 * samples_per_symbol
self.rrc_taps = gr.firdes.root_raised_cosine(
1.0, # gain
self._samples_per_symbol, # sampling rate
1.0, # symbol rate
self._excess_bw, # excess bandwidth (roll-off factor)
ntaps)
self.rrc_filter=gr.interp_fir_filter_ccf(1, self.rrc_taps)
if not self._mm_gain_mu:
sbs_to_mm = {2: 0.050, 3: 0.075, 4: 0.11, 5: 0.125, 6: 0.15, 7: 0.15}
self._mm_gain_mu = sbs_to_mm[samples_per_symbol]
self._mm_omega = self._samples_per_symbol
self._mm_gain_omega = .25 * self._mm_gain_mu * self._mm_gain_mu
self._costas_beta = 0.25 * self._costas_alpha * self._costas_alpha
fmin = -0.25
fmax = 0.25
self.receiver=gr.mpsk_receiver_cc(arity, pi/4.0,
self._costas_alpha, self._costas_beta,
fmin, fmax,
self._mm_mu, self._mm_gain_mu,
self._mm_omega, self._mm_gain_omega,
self._mm_omega_relative_limit)
# Perform Differential decoding on the constellation
self.diffdec = gr.diff_phasor_cc()
# find closest constellation point
rot = 1
rotated_const = map(lambda pt: pt * rot, psk.constellation[arity])
self.slicer = gr.constellation_decoder_cb(rotated_const, range(arity))
if self._gray_code:
self.symbol_mapper = gr.map_bb(psk.gray_to_binary[arity])
else:
self.symbol_mapper = gr.map_bb(psk.ungray_to_binary[arity])
# unpack the k bit vector into a stream of bits
self.unpack = gr.unpack_k_bits_bb(self.bits_per_symbol())
if verbose:
self._print_verbage()
if log:
self._setup_logging()
# Connect & Initialize base class
self.connect(self, self.pre_scaler, self.agc, self.rrc_filter, self.receiver,
self.diffdec, self.slicer, self.symbol_mapper, self.unpack, self)
def samples_per_symbol(self):
return self._samples_per_symbol
def bits_per_symbol(self=None): # staticmethod that's also callable on an instance
return 2
bits_per_symbol = staticmethod(bits_per_symbol) # make it a static method. RTFM
def _print_verbage(self):
print "\nDemodulator:"
print "bits per symbol: %d" % self.bits_per_symbol()
print "Gray code: %s" % self._gray_code
print "RRC roll-off factor: %.2f" % self._excess_bw
print "Costas Loop alpha: %.2e" % self._costas_alpha
print "Costas Loop beta: %.2e" % self._costas_beta
print "M&M mu: %.2f" % self._mm_mu
print "M&M mu gain: %.2e" % self._mm_gain_mu
print "M&M omega: %.2f" % self._mm_omega
print "M&M omega gain: %.2e" % self._mm_gain_omega
print "M&M omega limit: %.2f" % self._mm_omega_relative_limit
def _setup_logging(self):
print "Modulation logging turned on."
self.connect(self.pre_scaler,
gr.file_sink(gr.sizeof_gr_complex, "rx_prescaler.dat"))
self.connect(self.agc,
gr.file_sink(gr.sizeof_gr_complex, "rx_agc.dat"))
self.connect(self.rrc_filter,
gr.file_sink(gr.sizeof_gr_complex, "rx_rrc_filter.dat"))
self.connect(self.receiver,
gr.file_sink(gr.sizeof_gr_complex, "rx_receiver.dat"))
self.connect(self.diffdec,
gr.file_sink(gr.sizeof_gr_complex, "rx_diffdec.dat"))
self.connect(self.slicer,
gr.file_sink(gr.sizeof_char, "rx_slicer.dat"))
self.connect(self.symbol_mapper,
gr.file_sink(gr.sizeof_char, "rx_gray_decoder.dat"))
self.connect(self.unpack,
gr.file_sink(gr.sizeof_char, "rx_unpack.dat"))
def add_options(parser):
"""
Adds modulation-specific options to the standard parser
"""
parser.add_option("", "--excess-bw", type="float", default=_def_excess_bw,
help="set RRC excess bandwith factor [default=%default] (PSK)")
parser.add_option("", "--no-gray-code", dest="gray_code",
action="store_false", default=_def_gray_code,
help="disable gray coding on modulated bits (PSK)")
parser.add_option("", "--costas-alpha", type="float", default=_def_costas_alpha,
help="set Costas loop alpha value [default=%default] (PSK)")
parser.add_option("", "--gain-mu", type="float", default=_def_gain_mu,
help="set M&M symbol sync loop gain mu value [default=%default] (PSK)")
parser.add_option("", "--mu", type="float", default=_def_mu,
help="set M&M symbol sync loop mu value [default=%default] (PSK)")
add_options=staticmethod(add_options)
def extract_kwargs_from_options(options):
"""
Given command line options, create dictionary suitable for passing to __init__
"""
return modulation_utils.extract_kwargs_from_options(
dqpsk_demod.__init__, ('self',), options)
extract_kwargs_from_options=staticmethod(extract_kwargs_from_options)
#
# Add these to the mod/demod registry
#
modulation_utils.add_type_1_mod('dqpsk', dqpsk_mod)
modulation_utils.add_type_1_demod('dqpsk', dqpsk_demod)
| gpl-3.0 | 4,722,334,653,047,052,000 | 39.4573 | 97 | 0.57422 | false |
bollu/sandhi | modules/gr36/docs/sphinx/hieroglyph/nodes.py | 25 | 7930 | __author__ = 'Robert Smallshire'
class Node(object):
def __init__(self, indent=None, lines=None, parent=None):
if indent is not None:
self.indent = indent
else:
self.indent = 0
if lines is not None:
self.lines = lines
else:
self.lines = []
self._parent = parent
self.children = []
parent = property(lambda self: self._parent)
def add_child(self, child):
assert(child.parent is self)
self.children.append(child)
def __repr__(self):
return "Node(" + repr(self.indent) + ", " + repr(self.lines) + ", children=" + repr(self.children) + ")"
def render_rst(self, *args, **kwargs):
result = []
prefix = ' ' * self.indent
result.extend(prefix + line for line in self.lines)
for child in self.children:
result.extend(child.render_rst())
return result
class Arg(Node):
def __init__(self, indent, child_indent, name):
super(Arg, self).__init__(indent)
self.child_indent = child_indent
self.name = name
self.type = None
def __repr__(self):
return "Arg(" + repr(self.name) + ", " + repr(self.type) + ", children=" + repr(self.children) + ")"
def render_rst(self, *args, **kwargs):
result = []
indent = ' ' * self.indent
# Render the param description
description = []
for child in self.children:
child_lines = child.render_rst()
description.extend(child_lines)
dedent = self.child_indent - self.indent
name = self.name.replace('*', r'\*')
first_description = description[0].lstrip() if len(description) else ''
if not first_description:
# TODO: Emit a warning about a missing argument description
pass
result.append("{indent}:param {name}: {first_description}".format(indent=indent, name=name,
first_description=first_description))
dedented_body = [line[dedent:] for line in description[1:]]
result.extend(dedented_body)
# If a type was specified render the type
if self.type is not None:
result.append("{indent}:type {name}: {type}".format(indent=indent, name=self.name, type=self.type))
result.append('')
ensure_terminal_blank(result)
return result
class Raises(Node):
def __init__(self, indent=None):
super(Raises, self).__init__(indent=indent)
def __repr__(self):
return "Raises(" + repr(self.indent) + ", children=" + repr(self.children) + ")"
def render_rst(self, *args, **kwargs):
result = []
indent = ' ' * self.indent
result.append(indent + ':raises:')
for child in self.children:
result.extend(child.render_rst(only_child=len(self.children) == 1))
ensure_terminal_blank(result)
return result
class Except(Node):
def __init__(self, indent, type):
super(Except, self).__init__(indent=indent)
#self.child_indent = child_indent
self.type = type
def __repr__(self):
return "Except(" + repr(self.type) + ", children=" + repr(self.children) + ")"
def render_rst(self, only_child=False, *args, **kwargs):
result = []
indent = ' ' * self.indent
# Render the param description
description = []
for child in self.children:
child_lines = child.render_rst()
description.extend(child_lines)
#dedent = self.child_indent - self.indent
bullet = '* ' if not only_child else ''
first_description = description[0].lstrip() if len(description) else ''
result.append("{indent}{bullet}{type} - {first_description}".format(indent=indent,
bullet=bullet, type=self.type,
first_description=first_description))
#dedented_body = [' ' * len(bullet) + line[dedent:] for line in description[1:]]
#result.extend(dedented_body)
result.extend(description[1:])
ensure_terminal_blank(result)
return result
class Returns(Node):
def __init__(self, indent):
super(Returns, self).__init__(indent=indent)
self.title = 'Returns'
self.line = ''
def __repr__(self):
return "Returns(" + str(self.indent) + ", children=" + str(self.children) + ")"
def render_rst(self, *args, **kwargs):
result = []
indent = ' ' * self.indent
# Render the param description
description = [self.line] if self.line else []
for child in self.children:
child_lines = child.render_rst()
description.extend(child_lines)
self.render_title(description, indent, result)
result.extend(description[1:])
ensure_terminal_blank(result)
return result
def render_title(self, description, indent, result):
result.append(
"{indent}:returns: {first_description}".format(indent=indent,
first_description=description[0].lstrip()))
class Warning(Node):
def __init__(self, indent):
super(Warning, self).__init__(indent=indent)
def __repr__(self):
return "Warning(" + repr(self.indent) + ", children=" + str(self.children) + ")"
def render_rst(self, *args, **kwargs):
# TODO: Factor out the commonality between this and Note below
result = []
indent = ' ' * self.indent
# Render the param description
description = [self.line] if self.line else []
for child in self.children:
child_lines = child.render_rst()
description.extend(child_lines)
# Fix the indent on the first line
if len(description) > 1 and len(description[1].strip()) != 0:
body_indent = len(description[1]) - len(description[1].strip())
else:
body_indent = self.indent + 4
if len(description) > 0:
description[0] = ' ' * body_indent + description[0]
result.append(indent + ".. warning::")
result.append(indent + '')
result.extend(description)
ensure_terminal_blank(result)
return result
class Note(Node):
def __init__(self, indent):
super(Note, self).__init__(indent=indent)
self.line = ''
def __repr__(self):
return "Note(" + repr(self.indent) + ", children=" + str(self.children) + ")"
def render_rst(self, *args, **kwargs):
# TODO: Factor out the commonality between this and Warning above
result = []
indent = ' ' * self.indent
# Render the param description
description = [self.line] if self.line else []
for child in self.children:
child_lines = child.render_rst()
description.extend(child_lines)
# Fix the indent on the first line
if len(description) > 1 and len(description[1].strip()) != 0:
body_indent = len(description[1]) - len(description[1].strip())
else:
body_indent = self.indent + 4
if len(description) > 0:
description[0] = ' ' * body_indent + description[0]
result.append(indent + ".. note::")
result.append(indent + '')
result.extend(description)
ensure_terminal_blank(result)
return result
def ensure_terminal_blank(result):
'''If the description didn't end with a blank line add one here.'''
if len(result) > 0:
if len(result[-1].strip()) != 0:
result.append('')
| gpl-3.0 | 849,964,797,497,600,600 | 27.700375 | 112 | 0.549054 | false |
magicrub/mavlink | pymavlink/generator/lib/genxmlif/xmlifBase.py | 82 | 5371 | #
# genxmlif, Release 0.9.0
# file: xmlifbase.py
#
# XML interface base classes
#
# history:
# 2005-04-25 rl created
# 2006-08-18 rl some methods for XML schema validation support added
# 2007-05-25 rl performance optimization (caching) added, bugfixes for XPath handling
# 2007-07-04 rl complete re-design, API classes moved to xmlifApi.py
#
# Copyright (c) 2005-2008 by Roland Leuthe. All rights reserved.
#
# --------------------------------------------------------------------
# The generic XML interface is
#
# Copyright (c) 2005-2008 by Roland Leuthe
#
# By obtaining, using, and/or copying this software and/or its
# associated documentation, you agree that you have read, understood,
# and will comply with the following terms and conditions:
#
# Permission to use, copy, modify, and distribute this software and
# its associated documentation for any purpose and without fee is
# hereby granted, provided that the above copyright notice appears in
# all copies, and that both that copyright notice and this permission
# notice appear in supporting documentation, and that the name of
# the author not be used in advertising or publicity
# pertaining to distribution of the software without specific, written
# prior permission.
#
# THE AUTHOR DISCLAIMS ALL WARRANTIES WITH REGARD
# TO THIS SOFTWARE, INCLUDING ALL IMPLIED WARRANTIES OF MERCHANT-
# ABILITY AND FITNESS. IN NO EVENT SHALL THE AUTHOR
# BE LIABLE FOR ANY SPECIAL, INDIRECT OR CONSEQUENTIAL DAMAGES OR ANY
# DAMAGES WHATSOEVER RESULTING FROM LOSS OF USE, DATA OR PROFITS,
# WHETHER IN AN ACTION OF CONTRACT, NEGLIGENCE OR OTHER TORTIOUS
# ACTION, ARISING OUT OF OR IN CONNECTION WITH THE USE OR PERFORMANCE
# OF THIS SOFTWARE.
# --------------------------------------------------------------------
__author__ = "Roland Leuthe <[email protected]>"
__date__ = "28 July 2008"
__version__ = "0.9"
from xml.dom import XML_NAMESPACE, XMLNS_NAMESPACE
from xmlifUtils import NsNameTupleFactory, convertToAbsUrl
########################################
# XmlIf builder extension base class
# All not implemented methods have to be overloaded by the derived class!!
#
class XmlIfBuilderExtensionBase:
"""XmlIf builder extension base class.
This class provides additional data (e.g. line numbers or caches)
for an element node which are stored in the element node object during parsing.
"""
def __init__ (self, filePath, absUrl, treeWrapper, elementWrapperClass):
"""Constructor for this class
Input parameter:
filePath: contains the file path of the corresponding XML file
absUrl: contains the absolute URL of the corresponding XML file
"""
self.filePath = filePath
self.absUrl = absUrl
self.baseUrlStack = [absUrl, ]
self.treeWrapper = treeWrapper
self.elementWrapperClass = elementWrapperClass
def startElementHandler (self, curNode, startLineNumber, curNs, attributes=[]):
"""Called by the XML parser at creation of an element node.
Input parameter:
curNode: current element node
startLineNumber: first line number of the element tag in XML file
curNs: namespaces visible for this element node
attributes: list of attributes and their values for this element node
(same sequence as int he XML file)
"""
elementWrapper = self.elementWrapperClass(curNode, self.treeWrapper, curNs, initAttrSeq=0)
elementWrapper.baseUrl = self.__getBaseUrl(elementWrapper)
elementWrapper.absUrl = self.absUrl
elementWrapper.filePath = self.filePath
elementWrapper.startLineNumber = startLineNumber
elementWrapper.curNs.extend ([("xml", XML_NAMESPACE), ("xmlns", XMLNS_NAMESPACE)])
if attributes != []:
for i in range (0, len(attributes), 2):
elementWrapper.attributeSequence.append(attributes[i])
else:
attrList = elementWrapper.getAttributeDict().keys()
attrList.sort()
elementWrapper.attributeSequence.extend (attrList)
self.baseUrlStack.insert (0, elementWrapper.baseUrl)
def endElementHandler (self, curNode, endLineNumber):
"""Called by the XML parser after creation of an element node.
Input parameter:
curNode: current element node
endLineNumber: last line number of the element tag in XML file
"""
curNode.xmlIfExtElementWrapper.endLineNumber = endLineNumber
self.baseUrlStack.pop (0)
def __getBaseUrl (self, elementWrapper):
"""Retrieve base URL for the given element node.
Input parameter:
elementWrapper: wrapper of current element node
"""
nsNameBaseAttr = NsNameTupleFactory ((XML_NAMESPACE, "base"))
if elementWrapper.hasAttribute(nsNameBaseAttr):
return convertToAbsUrl (elementWrapper.getAttribute(nsNameBaseAttr), self.baseUrlStack[0])
else:
return self.baseUrlStack[0]
| lgpl-3.0 | -4,694,894,173,539,647,000 | 39.315385 | 111 | 0.638987 | false |
scascketta/LostNumber | LostNumber/process_msg.py | 1 | 1758 | from twilio.rest import TwilioRestClient
from twilio import TwilioRestException
from redis import StrictRedis
import time
import details
account_sid = details.twilio_account_sid
auth_token = details.twilio_auth_token
client = TwilioRestClient(account_sid, auth_token)
twilio_number = details.twilio_num
r = StrictRedis(host=details.redis_addr, port=details.redis_port)
def start_convo(num, body):
"""
Starts a convo by selecting random partner, adding
relevant index data, and queuing a msg.
"""
r.srem('available_nums', num)
dest = r.srandmember('available_nums')
state = r.smembers(num + ":state").pop()
body = "(1/10 - " + state + ") " + body
r.incr('total_count')
send_msg(dest, body)
pipe = r.pipeline()
pipe.srem('available_nums', dest)
pipe.sadd('in_conversation', num)
pipe.sadd('in_conversation', dest)
pipe.sadd(num, dest)
pipe.sadd(dest, num)
pipe.hset(num + ":" + dest, 'count', '1')
pipe.hset(dest + ":" + num, 'count', '1')
pipe.execute()
def add_msg(dest, body):
r.rpush('message_queue', dest + ":" + body)
def process_queue():
"""Pops msgs from the msg queue and dispatches them."""
raw = r.lpop('message_queue')
if raw:
mark = raw.find(':')
dest = raw[:mark]
body = raw[mark + 1:]
send_msg(dest, body)
def send_msg(dest, body):
# Trim msg body to 160 chars to prevent malicious attempts to send long
# msgs to incur excessive msg fees.
body = body[:160]
try:
client.sms.messages.create(body=body, to=dest, from_=twilio_number)
except TwilioRestException as e:
print repr(e)
if __name__ == '__main__':
while True:
process_queue()
time.sleep(0.5) | mit | -5,437,500,191,339,109,000 | 25.253731 | 75 | 0.632537 | false |
franck-talbart/codelet_tuning_infrastructure | src/cti_hapi/alias.py | 1 | 5818 | #************************************************************************
# Codelet Tuning Infrastructure
# Copyright (C) 2010-2015 Intel Corporation, CEA, GENCI, and UVSQ
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#************************************************************************
# Authors: Franck Talbart, Mathieu Bordet, Nicolas Petit
""" Alias module provides facilities to manage
the aliases
"""
import cti
import database_manager, database
#------------------------------------------------------------------------
def get_data_alias(uid):
""" get an alias from a data UID. It uses the database so
this function is faster that core's library
Args:
uid: the UID
Returns:
the alias, None if there is no alias
"""
if str(uid) == "":
print "Error: no UID given in get_data_alias."
exit(1)
db = database.Database()
result = database_manager.search({'NAME':["entry_uid"], 'TYPE':"=", 'VAL':uid},
db,
fields=["alias"])
alias = None
for r in result:
alias = r[0]
return alias
#------------------------------------------------------------------------
def get_plugin_alias(uid):
""" get an alias from a plugin UID.
Args:
uid: the UID
Returns:
the alias, None if there is no alias
"""
if str(uid) == "":
print "Error: no UID given in get_plugin_alias."
exit(1)
alias = cti.cti_plugin_alias_plugin_get_key(uid)
return alias
#------------------------------------------------------------------------
def get_data_uid(alias):
""" get an UID from an alias.
Args:
alias: the alias
Returns:
the UID, None if it fails
"""
alias = format_alias(alias)
if alias == "":
print "Error: no alias given in get_data_uid."
exit(1)
db = database.Database()
r = database_manager.search_uids({'NAME':["alias"], 'TYPE':"LIKE", 'VAL':alias},
db)
result = set()
for uid in r:
uid = cti.CTI_UID(str(uid))
result.add(uid)
return result
#------------------------------------------------------------------------
def get_plugin_uid(alias):
""" get an UID from a plugin.
Args:
alias: the alias
Returns:
the UID, None if it fails
"""
alias = format_alias(alias)
uid = cti.cti_plugin_alias_plugin_get_value(alias)
if uid is None:
return None
return cti.CTI_UID(str(uid))
#------------------------------------------------------------------------
def get_repository_uid(alias):
""" get an UID from a plugin.
Args:
alias: the alias
Returns:
the UID, None if it fails
"""
alias = format_alias(alias)
uid = cti.cti_plugin_alias_repository_get_value(alias)
if uid is None:
return None
return cti.CTI_UID(str(uid))
#------------------------------------------------------------------------
def set_data_alias(uid, alias):
""" Create an alias for data.
Args:
uid: CTI_UID
alias: an alias for given uid
Return 1 if it succeeds, 0 otherwise
"""
alias = format_alias(alias)
if get_data_uid(alias):
return 0
if get_plugin_uid(alias) is not None:
return 0
cti.cti_plugin_alias_data_set_value(alias, uid)
db = database.Database()
if database_manager.update("entry_info",
{"alias": alias},
{
'NAME':["entry_uid"],
'TYPE':"=",
'VAL':str(uid)
},
db) is False:
return 0
return 1
#------------------------------------------------------------------------
def set_plugin_alias(uid, alias, dir = None):
""" Create an alias for data.
Args:
uid: CTI_UID
alias: an alias for given uid
"""
alias = format_alias(alias)
if get_data_uid(alias):
return 0
if get_plugin_uid(alias) is not None:
return 0
cti.cti_plugin_alias_plugin_rm_value(uid)
cti.cti_plugin_alias_plugin_set_value(alias, uid, dir)
return 1
#------------------------------------------------------------------------
def set_repository_alias(uid, alias):
""" Create an alias for a repository.
Args:
uid: CTI_UID
alias: an alias for given uid
"""
alias = format_alias(alias)
if alias in ["common", "temp", "all"]:
return 0
if get_repository_uid(alias) is not None:
return 0
cti.cti_plugin_alias_repository_rm_value(uid)
cti.cti_plugin_alias_repository_set_value(alias, uid)
return 1
#------------------------------------------------------------------------
def format_alias(alias):
return str(alias).strip().lower().replace(" ", "_")
#------------------------------------------------------------------------
| gpl-3.0 | 7,386,282,911,155,074,000 | 26.837321 | 84 | 0.471812 | false |
tonybaloney/st2 | st2common/tests/unit/test_policies.py | 6 | 2603 | # Licensed to the StackStorm, Inc ('StackStorm') under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from st2common.persistence.policy import PolicyType, Policy
from st2common.policies import ResourcePolicyApplicator, get_driver
from st2tests import DbTestCase
from st2tests.fixturesloader import FixturesLoader
__all__ = [
'PolicyTestCase'
]
PACK = 'generic'
TEST_FIXTURES = {
'runners': [
'testrunner1.yaml'
],
'actions': [
'action1.yaml'
],
'policytypes': [
'fake_policy_type_1.yaml',
'fake_policy_type_2.yaml'
],
'policies': [
'policy_1.yaml',
'policy_2.yaml'
]
}
class PolicyTestCase(DbTestCase):
@classmethod
def setUpClass(cls):
super(PolicyTestCase, cls).setUpClass()
loader = FixturesLoader()
loader.save_fixtures_to_db(fixtures_pack=PACK,
fixtures_dict=TEST_FIXTURES)
def test_get_by_ref(self):
policy_db = Policy.get_by_ref('wolfpack.action-1.concurrency')
self.assertIsNotNone(policy_db)
self.assertEqual(policy_db.pack, 'wolfpack')
self.assertEqual(policy_db.name, 'action-1.concurrency')
policy_type_db = PolicyType.get_by_ref(policy_db.policy_type)
self.assertIsNotNone(policy_type_db)
self.assertEqual(policy_type_db.resource_type, 'action')
self.assertEqual(policy_type_db.name, 'concurrency')
def test_get_driver(self):
policy_db = Policy.get_by_ref('wolfpack.action-1.concurrency')
policy = get_driver(policy_db.ref, policy_db.policy_type, **policy_db.parameters)
self.assertIsInstance(policy, ResourcePolicyApplicator)
self.assertEqual(policy._policy_ref, policy_db.ref)
self.assertEqual(policy._policy_type, policy_db.policy_type)
self.assertTrue(hasattr(policy, 'threshold'))
self.assertEqual(policy.threshold, 3)
| apache-2.0 | -3,590,325,799,336,666,600 | 35.152778 | 89 | 0.689589 | false |
saurabhjn76/sympy | sympy/physics/mechanics/lagrange.py | 45 | 18428 | from __future__ import print_function, division
__all__ = ['LagrangesMethod']
from sympy import diff, zeros, Matrix, eye, sympify
from sympy.physics.vector import dynamicsymbols, ReferenceFrame
from sympy.physics.mechanics.functions import (find_dynamicsymbols, msubs,
_f_list_parser)
from sympy.physics.mechanics.linearize import Linearizer
from sympy.utilities import default_sort_key
from sympy.utilities.exceptions import SymPyDeprecationWarning
from sympy.utilities.iterables import iterable
class LagrangesMethod(object):
"""Lagrange's method object.
This object generates the equations of motion in a two step procedure. The
first step involves the initialization of LagrangesMethod by supplying the
Lagrangian and the generalized coordinates, at the bare minimum. If there
are any constraint equations, they can be supplied as keyword arguments.
The Lagrange multipliers are automatically generated and are equal in
number to the constraint equations. Similarly any non-conservative forces
can be supplied in an iterable (as described below and also shown in the
example) along with a ReferenceFrame. This is also discussed further in the
__init__ method.
Attributes
==========
q, u : Matrix
Matrices of the generalized coordinates and speeds
forcelist : iterable
Iterable of (Point, vector) or (ReferenceFrame, vector) tuples
describing the forces on the system.
mass_matrix : Matrix
The system's mass matrix
forcing : Matrix
The system's forcing vector
mass_matrix_full : Matrix
The "mass matrix" for the qdot's, qdoubledot's, and the
lagrange multipliers (lam)
forcing_full : Matrix
The forcing vector for the qdot's, qdoubledot's and
lagrange multipliers (lam)
Examples
========
This is a simple example for a one degree of freedom translational
spring-mass-damper.
In this example, we first need to do the kinematics.
This involves creating generalized coordinates and their derivatives.
Then we create a point and set its velocity in a frame.
>>> from sympy.physics.mechanics import LagrangesMethod, Lagrangian
>>> from sympy.physics.mechanics import ReferenceFrame, Particle, Point
>>> from sympy.physics.mechanics import dynamicsymbols, kinetic_energy
>>> from sympy import symbols
>>> q = dynamicsymbols('q')
>>> qd = dynamicsymbols('q', 1)
>>> m, k, b = symbols('m k b')
>>> N = ReferenceFrame('N')
>>> P = Point('P')
>>> P.set_vel(N, qd * N.x)
We need to then prepare the information as required by LagrangesMethod to
generate equations of motion.
First we create the Particle, which has a point attached to it.
Following this the lagrangian is created from the kinetic and potential
energies.
Then, an iterable of nonconservative forces/torques must be constructed,
where each item is a (Point, Vector) or (ReferenceFrame, Vector) tuple,
with the Vectors representing the nonconservative forces or torques.
>>> Pa = Particle('Pa', P, m)
>>> Pa.potential_energy = k * q**2 / 2.0
>>> L = Lagrangian(N, Pa)
>>> fl = [(P, -b * qd * N.x)]
Finally we can generate the equations of motion.
First we create the LagrangesMethod object. To do this one must supply
the Lagrangian, and the generalized coordinates. The constraint equations,
the forcelist, and the inertial frame may also be provided, if relevant.
Next we generate Lagrange's equations of motion, such that:
Lagrange's equations of motion = 0.
We have the equations of motion at this point.
>>> l = LagrangesMethod(L, [q], forcelist = fl, frame = N)
>>> print(l.form_lagranges_equations())
Matrix([[b*Derivative(q(t), t) + 1.0*k*q(t) + m*Derivative(q(t), t, t)]])
We can also solve for the states using the 'rhs' method.
>>> print(l.rhs())
Matrix([[Derivative(q(t), t)], [(-b*Derivative(q(t), t) - 1.0*k*q(t))/m]])
Please refer to the docstrings on each method for more details.
"""
def __init__(self, Lagrangian, qs, coneqs=None, forcelist=None,
frame=None, hol_coneqs=None, nonhol_coneqs=None):
"""Supply the following for the initialization of LagrangesMethod
Lagrangian : Sympifyable
qs: array_like
The generalized coordinates
hol_coneqs: array_like, optional
The holonomic constraint equations
nonhol_coneqs: array_like, optional
The nonholonomic constraint equations
forcelist : iterable, optional
Takes an iterable of (Point, Vector) or (ReferenceFrame, Vector)
tuples which represent the force at a point or torque on a frame.
This feature is primarily to account for the nonconservative forces
and/or moments.
frame : ReferenceFrame, optional
Supply the inertial frame. This is used to determine the
generalized forces due to non-conservative forces.
"""
self._L = Matrix([sympify(Lagrangian)])
self.eom = None
self._m_cd = Matrix() # Mass Matrix of differentiated coneqs
self._m_d = Matrix() # Mass Matrix of dynamic equations
self._f_cd = Matrix() # Forcing part of the diff coneqs
self._f_d = Matrix() # Forcing part of the dynamic equations
self.lam_coeffs = Matrix() # The coeffecients of the multipliers
forcelist = forcelist if forcelist else []
if not iterable(forcelist):
raise TypeError('Force pairs must be supplied in an iterable.')
self._forcelist = forcelist
if frame and not isinstance(frame, ReferenceFrame):
raise TypeError('frame must be a valid ReferenceFrame')
self.inertial = frame
self.lam_vec = Matrix()
self._term1 = Matrix()
self._term2 = Matrix()
self._term3 = Matrix()
self._term4 = Matrix()
# Creating the qs, qdots and qdoubledots
if not iterable(qs):
raise TypeError('Generalized coordinates must be an iterable')
self._q = Matrix(qs)
self._qdots = self.q.diff(dynamicsymbols._t)
self._qdoubledots = self._qdots.diff(dynamicsymbols._t)
# Deal with constraint equations
if coneqs:
SymPyDeprecationWarning("The `coneqs` kwarg is deprecated in "
"favor of `hol_coneqs` and `nonhol_coneqs`. Please "
"update your code").warn()
self.coneqs = coneqs
else:
mat_build = lambda x: Matrix(x) if x else Matrix()
hol_coneqs = mat_build(hol_coneqs)
nonhol_coneqs = mat_build(nonhol_coneqs)
self.coneqs = Matrix([hol_coneqs.diff(dynamicsymbols._t),
nonhol_coneqs])
self._hol_coneqs = hol_coneqs
def form_lagranges_equations(self):
"""Method to form Lagrange's equations of motion.
Returns a vector of equations of motion using Lagrange's equations of
the second kind.
"""
qds = self._qdots
qdd_zero = dict((i, 0) for i in self._qdoubledots)
n = len(self.q)
# Internally we represent the EOM as four terms:
# EOM = term1 - term2 - term3 - term4 = 0
# First term
self._term1 = self._L.jacobian(qds)
self._term1 = self._term1.diff(dynamicsymbols._t).T
# Second term
self._term2 = self._L.jacobian(self.q).T
# Third term
if self.coneqs:
coneqs = self.coneqs
m = len(coneqs)
# Creating the multipliers
self.lam_vec = Matrix(dynamicsymbols('lam1:' + str(m + 1)))
self.lam_coeffs = -coneqs.jacobian(qds)
self._term3 = self.lam_coeffs.T * self.lam_vec
# Extracting the coeffecients of the qdds from the diff coneqs
diffconeqs = coneqs.diff(dynamicsymbols._t)
self._m_cd = diffconeqs.jacobian(self._qdoubledots)
# The remaining terms i.e. the 'forcing' terms in diff coneqs
self._f_cd = -diffconeqs.subs(qdd_zero)
else:
self._term3 = zeros(n, 1)
# Fourth term
if self.forcelist:
N = self.inertial
self._term4 = zeros(n, 1)
for i, qd in enumerate(qds):
flist = zip(*_f_list_parser(self.forcelist, N))
self._term4[i] = sum(v.diff(qd, N) & f for (v, f) in flist)
else:
self._term4 = zeros(n, 1)
# Form the dynamic mass and forcing matrices
without_lam = self._term1 - self._term2 - self._term4
self._m_d = without_lam.jacobian(self._qdoubledots)
self._f_d = -without_lam.subs(qdd_zero)
# Form the EOM
self.eom = without_lam - self._term3
return self.eom
@property
def mass_matrix(self):
"""Returns the mass matrix, which is augmented by the Lagrange
multipliers, if necessary.
If the system is described by 'n' generalized coordinates and there are
no constraint equations then an n X n matrix is returned.
If there are 'n' generalized coordinates and 'm' constraint equations
have been supplied during initialization then an n X (n+m) matrix is
returned. The (n + m - 1)th and (n + m)th columns contain the
coefficients of the Lagrange multipliers.
"""
if self.eom is None:
raise ValueError('Need to compute the equations of motion first')
if self.coneqs:
return (self._m_d).row_join(self.lam_coeffs.T)
else:
return self._m_d
@property
def mass_matrix_full(self):
"""Augments the coefficients of qdots to the mass_matrix."""
if self.eom is None:
raise ValueError('Need to compute the equations of motion first')
n = len(self.q)
m = len(self.coneqs)
row1 = eye(n).row_join(zeros(n, n + m))
row2 = zeros(n, n).row_join(self.mass_matrix)
if self.coneqs:
row3 = zeros(m, n).row_join(self._m_cd).row_join(zeros(m, m))
return row1.col_join(row2).col_join(row3)
else:
return row1.col_join(row2)
@property
def forcing(self):
"""Returns the forcing vector from 'lagranges_equations' method."""
if self.eom is None:
raise ValueError('Need to compute the equations of motion first')
return self._f_d
@property
def forcing_full(self):
"""Augments qdots to the forcing vector above."""
if self.eom is None:
raise ValueError('Need to compute the equations of motion first')
if self.coneqs:
return self._qdots.col_join(self.forcing).col_join(self._f_cd)
else:
return self._qdots.col_join(self.forcing)
def to_linearizer(self, q_ind=None, qd_ind=None, q_dep=None, qd_dep=None):
"""Returns an instance of the Linearizer class, initiated from the
data in the LagrangesMethod class. This may be more desirable than using
the linearize class method, as the Linearizer object will allow more
efficient recalculation (i.e. about varying operating points).
Parameters
==========
q_ind, qd_ind : array_like, optional
The independent generalized coordinates and speeds.
q_dep, qd_dep : array_like, optional
The dependent generalized coordinates and speeds.
"""
# Compose vectors
t = dynamicsymbols._t
q = self.q
u = self._qdots
ud = u.diff(t)
# Get vector of lagrange multipliers
lams = self.lam_vec
mat_build = lambda x: Matrix(x) if x else Matrix()
q_i = mat_build(q_ind)
q_d = mat_build(q_dep)
u_i = mat_build(qd_ind)
u_d = mat_build(qd_dep)
# Compose general form equations
f_c = self._hol_coneqs
f_v = self.coneqs
f_a = f_v.diff(t)
f_0 = u
f_1 = -u
f_2 = self._term1
f_3 = -(self._term2 + self._term4)
f_4 = -self._term3
# Check that there are an appropriate number of independent and
# dependent coordinates
if len(q_d) != len(f_c) or len(u_d) != len(f_v):
raise ValueError(("Must supply {:} dependent coordinates, and " +
"{:} dependent speeds").format(len(f_c), len(f_v)))
if set(Matrix([q_i, q_d])) != set(q):
raise ValueError("Must partition q into q_ind and q_dep, with " +
"no extra or missing symbols.")
if set(Matrix([u_i, u_d])) != set(u):
raise ValueError("Must partition qd into qd_ind and qd_dep, " +
"with no extra or missing symbols.")
# Find all other dynamic symbols, forming the forcing vector r.
# Sort r to make it canonical.
insyms = set(Matrix([q, u, ud, lams]))
r = list(find_dynamicsymbols(f_3, insyms))
r.sort(key=default_sort_key)
# Check for any derivatives of variables in r that are also found in r.
for i in r:
if diff(i, dynamicsymbols._t) in r:
raise ValueError('Cannot have derivatives of specified \
quantities when linearizing forcing terms.')
return Linearizer(f_0, f_1, f_2, f_3, f_4, f_c, f_v, f_a, q, u, q_i,
q_d, u_i, u_d, r, lams)
def linearize(self, q_ind=None, qd_ind=None, q_dep=None, qd_dep=None,
**kwargs):
"""Linearize the equations of motion about a symbolic operating point.
If kwarg A_and_B is False (default), returns M, A, B, r for the
linearized form, M*[q', u']^T = A*[q_ind, u_ind]^T + B*r.
If kwarg A_and_B is True, returns A, B, r for the linearized form
dx = A*x + B*r, where x = [q_ind, u_ind]^T. Note that this is
computationally intensive if there are many symbolic parameters. For
this reason, it may be more desirable to use the default A_and_B=False,
returning M, A, and B. Values may then be substituted in to these
matrices, and the state space form found as
A = P.T*M.inv()*A, B = P.T*M.inv()*B, where P = Linearizer.perm_mat.
In both cases, r is found as all dynamicsymbols in the equations of
motion that are not part of q, u, q', or u'. They are sorted in
canonical form.
The operating points may be also entered using the ``op_point`` kwarg.
This takes a dictionary of {symbol: value}, or a an iterable of such
dictionaries. The values may be numberic or symbolic. The more values
you can specify beforehand, the faster this computation will run.
For more documentation, please see the ``Linearizer`` class."""
linearizer = self.to_linearizer(q_ind, qd_ind, q_dep, qd_dep)
result = linearizer.linearize(**kwargs)
return result + (linearizer.r,)
def solve_multipliers(self, op_point=None, sol_type='dict'):
"""Solves for the values of the lagrange multipliers symbolically at
the specified operating point
Parameters
==========
op_point : dict or iterable of dicts, optional
Point at which to solve at. The operating point is specified as
a dictionary or iterable of dictionaries of {symbol: value}. The
value may be numeric or symbolic itself.
sol_type : str, optional
Solution return type. Valid options are:
- 'dict': A dict of {symbol : value} (default)
- 'Matrix': An ordered column matrix of the solution
"""
# Determine number of multipliers
k = len(self.lam_vec)
if k == 0:
raise ValueError("System has no lagrange multipliers to solve for.")
# Compose dict of operating conditions
if isinstance(op_point, dict):
op_point_dict = op_point
elif iterable(op_point):
op_point_dict = {}
for op in op_point:
op_point_dict.update(op)
elif op_point is None:
op_point_dict = {}
else:
raise TypeError("op_point must be either a dictionary or an "
"iterable of dictionaries.")
# Compose the system to be solved
mass_matrix = self.mass_matrix.col_join((-self.lam_coeffs.row_join(
zeros(k, k))))
force_matrix = self.forcing.col_join(self._f_cd)
# Sub in the operating point
mass_matrix = msubs(mass_matrix, op_point_dict)
force_matrix = msubs(force_matrix, op_point_dict)
# Solve for the multipliers
sol_list = mass_matrix.LUsolve(-force_matrix)[-k:]
if sol_type == 'dict':
return dict(zip(self.lam_vec, sol_list))
elif sol_type == 'Matrix':
return Matrix(sol_list)
else:
raise ValueError("Unknown sol_type {:}.".format(sol_type))
def rhs(self, inv_method=None, **kwargs):
"""Returns equations that can be solved numerically
Parameters
==========
inv_method : str
The specific sympy inverse matrix calculation method to use. For a
list of valid methods, see
:meth:`~sympy.matrices.matrices.MatrixBase.inv`
"""
if 'method' in kwargs:
# The method kwarg is deprecated in favor of inv_method.
SymPyDeprecationWarning(feature="method kwarg",
useinstead="inv_method kwarg",
deprecated_since_version="0.7.6").warn()
# For now accept both
inv_method = kwargs['method']
if inv_method is None:
self._rhs = self.mass_matrix_full.LUsolve(self.forcing_full)
else:
self._rhs = (self.mass_matrix_full.inv(inv_method,
try_block_diag=True) * self.forcing_full)
return self._rhs
@property
def q(self):
return self._q
@property
def u(self):
return self._qdots
@property
def forcelist(self):
return self._forcelist
| bsd-3-clause | 487,214,146,612,534,400 | 38.715517 | 82 | 0.605003 | false |
qiqjiao/study | jsoncpp/tags/svn-release-0.6.0-rc2/doxybuild.py | 44 | 6791 | """Script to generate doxygen documentation.
"""
import re
import os
import os.path
import sys
import shutil
from devtools import tarball
def find_program(*filenames):
"""find a program in folders path_lst, and sets env[var]
@param filenames: a list of possible names of the program to search for
@return: the full path of the filename if found, or '' if filename could not be found
"""
paths = os.environ.get('PATH', '').split(os.pathsep)
suffixes = ('win32' in sys.platform ) and '.exe .com .bat .cmd' or ''
for filename in filenames:
for name in [filename+ext for ext in suffixes.split()]:
for directory in paths:
full_path = os.path.join(directory, name)
if os.path.isfile(full_path):
return full_path
return ''
def do_subst_in_file(targetfile, sourcefile, dict):
"""Replace all instances of the keys of dict with their values.
For example, if dict is {'%VERSION%': '1.2345', '%BASE%': 'MyProg'},
then all instances of %VERSION% in the file will be replaced with 1.2345 etc.
"""
try:
f = open(sourcefile, 'rb')
contents = f.read()
f.close()
except:
print "Can't read source file %s"%sourcefile
raise
for (k,v) in dict.items():
v = v.replace('\\','\\\\')
contents = re.sub(k, v, contents)
try:
f = open(targetfile, 'wb')
f.write(contents)
f.close()
except:
print "Can't write target file %s"%targetfile
raise
def run_doxygen(doxygen_path, config_file, working_dir, is_silent):
config_file = os.path.abspath( config_file )
doxygen_path = doxygen_path
old_cwd = os.getcwd()
try:
os.chdir( working_dir )
cmd = [doxygen_path, config_file]
print 'Running:', ' '.join( cmd )
try:
import subprocess
except:
if os.system( ' '.join( cmd ) ) != 0:
print 'Documentation generation failed'
return False
else:
if is_silent:
process = subprocess.Popen( cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT )
else:
process = subprocess.Popen( cmd )
stdout, _ = process.communicate()
if process.returncode:
print 'Documentation generation failed:'
print stdout
return False
return True
finally:
os.chdir( old_cwd )
def build_doc( options, make_release=False ):
if make_release:
options.make_tarball = True
options.with_dot = True
options.with_html_help = True
options.with_uml_look = True
options.open = False
options.silent = True
version = open('version','rt').read().strip()
output_dir = 'dist/doxygen' # relative to doc/doxyfile location.
if not os.path.isdir( output_dir ):
os.makedirs( output_dir )
top_dir = os.path.abspath( '.' )
html_output_dirname = 'jsoncpp-api-html-' + version
tarball_path = os.path.join( 'dist', html_output_dirname + '.tar.gz' )
warning_log_path = os.path.join( output_dir, '../jsoncpp-doxygen-warning.log' )
html_output_path = os.path.join( output_dir, html_output_dirname )
def yesno( bool ):
return bool and 'YES' or 'NO'
subst_keys = {
'%JSONCPP_VERSION%': version,
'%DOC_TOPDIR%': '',
'%TOPDIR%': top_dir,
'%HTML_OUTPUT%': os.path.join( '..', output_dir, html_output_dirname ),
'%HAVE_DOT%': yesno(options.with_dot),
'%DOT_PATH%': os.path.split(options.dot_path)[0],
'%HTML_HELP%': yesno(options.with_html_help),
'%UML_LOOK%': yesno(options.with_uml_look),
'%WARNING_LOG_PATH%': os.path.join( '..', warning_log_path )
}
if os.path.isdir( output_dir ):
print 'Deleting directory:', output_dir
shutil.rmtree( output_dir )
if not os.path.isdir( output_dir ):
os.makedirs( output_dir )
do_subst_in_file( 'doc/doxyfile', 'doc/doxyfile.in', subst_keys )
ok = run_doxygen( options.doxygen_path, 'doc/doxyfile', 'doc', is_silent=options.silent )
if not options.silent:
print open(warning_log_path, 'rb').read()
index_path = os.path.abspath(os.path.join(subst_keys['%HTML_OUTPUT%'], 'index.html'))
print 'Generated documentation can be found in:'
print index_path
if options.open:
import webbrowser
webbrowser.open( 'file://' + index_path )
if options.make_tarball:
print 'Generating doc tarball to', tarball_path
tarball_sources = [
output_dir,
'README.txt',
'LICENSE',
'NEWS.txt',
'version'
]
tarball_basedir = os.path.join( output_dir, html_output_dirname )
tarball.make_tarball( tarball_path, tarball_sources, tarball_basedir, html_output_dirname )
return tarball_path, html_output_dirname
def main():
usage = """%prog
Generates doxygen documentation in build/doxygen.
Optionaly makes a tarball of the documentation to dist/.
Must be started in the project top directory.
"""
from optparse import OptionParser
parser = OptionParser(usage=usage)
parser.allow_interspersed_args = False
parser.add_option('--with-dot', dest="with_dot", action='store_true', default=False,
help="""Enable usage of DOT to generate collaboration diagram""")
parser.add_option('--dot', dest="dot_path", action='store', default=find_program('dot'),
help="""Path to GraphViz dot tool. Must be full qualified path. [Default: %default]""")
parser.add_option('--doxygen', dest="doxygen_path", action='store', default=find_program('doxygen'),
help="""Path to Doxygen tool. [Default: %default]""")
parser.add_option('--with-html-help', dest="with_html_help", action='store_true', default=False,
help="""Enable generation of Microsoft HTML HELP""")
parser.add_option('--no-uml-look', dest="with_uml_look", action='store_false', default=True,
help="""Generates DOT graph without UML look [Default: False]""")
parser.add_option('--open', dest="open", action='store_true', default=False,
help="""Open the HTML index in the web browser after generation""")
parser.add_option('--tarball', dest="make_tarball", action='store_true', default=False,
help="""Generates a tarball of the documentation in dist/ directory""")
parser.add_option('-s', '--silent', dest="silent", action='store_true', default=False,
help="""Hides doxygen output""")
parser.enable_interspersed_args()
options, args = parser.parse_args()
build_doc( options )
if __name__ == '__main__':
main()
| lgpl-3.0 | -2,220,701,322,822,282,000 | 39.183432 | 104 | 0.607716 | false |
danielvdende/incubator-airflow | airflow/contrib/operators/bigquery_to_gcs.py | 7 | 4491 | # -*- coding: utf-8 -*-
#
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
from airflow.contrib.hooks.bigquery_hook import BigQueryHook
from airflow.models import BaseOperator
from airflow.utils.decorators import apply_defaults
class BigQueryToCloudStorageOperator(BaseOperator):
"""
Transfers a BigQuery table to a Google Cloud Storage bucket.
.. seealso::
For more details about these parameters:
https://cloud.google.com/bigquery/docs/reference/v2/jobs
:param source_project_dataset_table: The dotted
(<project>.|<project>:)<dataset>.<table> BigQuery table to use as the source
data. If <project> is not included, project will be the project
defined in the connection json. (templated)
:type source_project_dataset_table: string
:param destination_cloud_storage_uris: The destination Google Cloud
Storage URI (e.g. gs://some-bucket/some-file.txt). (templated) Follows
convention defined here:
https://cloud.google.com/bigquery/exporting-data-from-bigquery#exportingmultiple
:type destination_cloud_storage_uris: list
:param compression: Type of compression to use.
:type compression: string
:param export_format: File format to export.
:type field_delimiter: string
:param field_delimiter: The delimiter to use when extracting to a CSV.
:type field_delimiter: string
:param print_header: Whether to print a header for a CSV file extract.
:type print_header: boolean
:param bigquery_conn_id: reference to a specific BigQuery hook.
:type bigquery_conn_id: string
:param delegate_to: The account to impersonate, if any.
For this to work, the service account making the request must have domain-wide
delegation enabled.
:type delegate_to: string
:param labels: a dictionary containing labels for the job/query,
passed to BigQuery
:type labels: dict
"""
template_fields = ('source_project_dataset_table',
'destination_cloud_storage_uris', 'labels')
template_ext = ('.sql',)
ui_color = '#e4e6f0'
@apply_defaults
def __init__(self,
source_project_dataset_table,
destination_cloud_storage_uris,
compression='NONE',
export_format='CSV',
field_delimiter=',',
print_header=True,
bigquery_conn_id='bigquery_default',
delegate_to=None,
labels=None,
*args,
**kwargs):
super(BigQueryToCloudStorageOperator, self).__init__(*args, **kwargs)
self.source_project_dataset_table = source_project_dataset_table
self.destination_cloud_storage_uris = destination_cloud_storage_uris
self.compression = compression
self.export_format = export_format
self.field_delimiter = field_delimiter
self.print_header = print_header
self.bigquery_conn_id = bigquery_conn_id
self.delegate_to = delegate_to
self.labels = labels
def execute(self, context):
self.log.info('Executing extract of %s into: %s',
self.source_project_dataset_table,
self.destination_cloud_storage_uris)
hook = BigQueryHook(bigquery_conn_id=self.bigquery_conn_id,
delegate_to=self.delegate_to)
conn = hook.get_conn()
cursor = conn.cursor()
cursor.run_extract(
self.source_project_dataset_table,
self.destination_cloud_storage_uris,
self.compression,
self.export_format,
self.field_delimiter,
self.print_header,
self.labels)
| apache-2.0 | 7,774,648,981,716,407,000 | 41.771429 | 88 | 0.661545 | false |
escapewindow/python-scriptharness | scriptharness/exceptions.py | 2 | 2349 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
"""Scriptharness exceptions.
These exceptions are written with several things in mind:
#. the exceptions should be unicode-capable in python 2.7 (py3 gets that
for free),
#. the exceptions should differentiate between user-facing exceptions and
developer-facing exceptions, and
#. ScriptHarnessFatal should exit the script.
There may be more exceptions in the future, to further differentiate between
errors.
"""
from __future__ import absolute_import, division, print_function, \
unicode_literals
from scriptharness.unicode import to_unicode
import six
@six.python_2_unicode_compatible
class ScriptHarnessBaseException(Exception):
"""All scriptharness exceptions should inherit this exception.
However, in most cases you probably want to catch ScriptHarnessException
instead.
"""
def __str__(self):
"""This method will become __unicode__() in py2 via the
@six.python_2_unicode_compatible decorator.
"""
if six.PY3:
string = super(ScriptHarnessBaseException, self).__str__()
else:
string = super(ScriptHarnessBaseException, self).message
string = to_unicode(string, 'utf-8')
return string
class ScriptHarnessException(ScriptHarnessBaseException):
"""There is a problem in how scriptharness is being called.
All developer-facing exceptions should inherit this class.
If you want to catch all developer-facing scriptharness exceptions,
catch ScriptHarnessException.
"""
class ScriptHarnessTimeout(ScriptHarnessException):
"""There was a timeout while running scriptharness.
"""
class ScriptHarnessError(ScriptHarnessBaseException):
"""User-facing exception.
Scriptharness has detected an error in the running process.
Since this exception is not designed to always exit, it's best to
catch these and deal with the error.
"""
class ScriptHarnessFatal(SystemExit, ScriptHarnessBaseException):
"""User-facing exception.
Scriptharness has detected a fatal failure in the running process.
This exception should result in program termination; using try/except may
result in unexpected or dangerous behavior.
"""
def __str__(self):
return ScriptHarnessBaseException.__str__(self)
| mpl-2.0 | 1,329,770,349,764,972,800 | 31.178082 | 77 | 0.717327 | false |
bollu/vispy | examples/demo/gloo/brain.py | 18 | 4553 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# vispy: gallery 2
# Copyright (c) 2015, Vispy Development Team.
# Distributed under the (new) BSD License. See LICENSE.txt for more info.
"""
3D brain mesh viewer.
"""
from timeit import default_timer
import numpy as np
from vispy import gloo
from vispy import app
from vispy.util.transforms import perspective, translate, rotate
from vispy.io import load_data_file
brain = np.load(load_data_file('brain/brain.npz', force_download='2014-09-04'))
data = brain['vertex_buffer']
faces = brain['index_buffer']
VERT_SHADER = """
#version 120
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_projection;
uniform vec4 u_color;
attribute vec3 a_position;
attribute vec3 a_normal;
attribute vec4 a_color;
varying vec3 v_position;
varying vec3 v_normal;
varying vec4 v_color;
void main()
{
v_normal = a_normal;
v_position = a_position;
v_color = a_color * u_color;
gl_Position = u_projection * u_view * u_model * vec4(a_position,1.0);
}
"""
FRAG_SHADER = """
#version 120
uniform mat4 u_model;
uniform mat4 u_view;
uniform mat4 u_normal;
uniform vec3 u_light_intensity;
uniform vec3 u_light_position;
varying vec3 v_position;
varying vec3 v_normal;
varying vec4 v_color;
void main()
{
// Calculate normal in world coordinates
vec3 normal = normalize(u_normal * vec4(v_normal,1.0)).xyz;
// Calculate the location of this fragment (pixel) in world coordinates
vec3 position = vec3(u_view*u_model * vec4(v_position, 1));
// Calculate the vector from this pixels surface to the light source
vec3 surfaceToLight = u_light_position - position;
// Calculate the cosine of the angle of incidence (brightness)
float brightness = dot(normal, surfaceToLight) /
(length(surfaceToLight) * length(normal));
brightness = max(min(brightness,1.0),0.0);
// Calculate final color of the pixel, based on:
// 1. The angle of incidence: brightness
// 2. The color/intensities of the light: light.intensities
// 3. The texture and texture coord: texture(tex, fragTexCoord)
// Specular lighting.
vec3 surfaceToCamera = vec3(0.0, 0.0, 1.0) - position;
vec3 K = normalize(normalize(surfaceToLight) + normalize(surfaceToCamera));
float specular = clamp(pow(abs(dot(normal, K)), 40.), 0.0, 1.0);
gl_FragColor = v_color * brightness * vec4(u_light_intensity, 1);
}
"""
class Canvas(app.Canvas):
def __init__(self):
app.Canvas.__init__(self, keys='interactive')
self.size = 800, 600
self.program = gloo.Program(VERT_SHADER, FRAG_SHADER)
self.theta, self.phi = -80, 180
self.translate = 3
self.faces = gloo.IndexBuffer(faces)
self.program.bind(gloo.VertexBuffer(data))
self.program['u_color'] = 1, 1, 1, 1
self.program['u_light_position'] = (1., 1., 1.)
self.program['u_light_intensity'] = (1., 1., 1.)
self.apply_zoom()
gloo.set_state(blend=False, depth_test=True, polygon_offset_fill=True)
self._t0 = default_timer()
self._timer = app.Timer('auto', connect=self.on_timer, start=True)
self.update_matrices()
def update_matrices(self):
self.view = translate((0, 0, -self.translate))
self.model = np.dot(rotate(self.theta, (1, 0, 0)),
rotate(self.phi, (0, 1, 0)))
self.projection = np.eye(4, dtype=np.float32)
self.program['u_model'] = self.model
self.program['u_view'] = self.view
self.program['u_normal'] = np.linalg.inv(np.dot(self.view,
self.model)).T
def on_timer(self, event):
elapsed = default_timer() - self._t0
self.phi = 180 + elapsed * 50.
self.update_matrices()
self.update()
def on_resize(self, event):
self.apply_zoom()
def on_mouse_wheel(self, event):
self.translate += -event.delta[1]/5.
self.translate = max(2, self.translate)
self.update_matrices()
self.update()
def on_draw(self, event):
gloo.clear()
self.program.draw('triangles', indices=self.faces)
def apply_zoom(self):
gloo.set_viewport(0, 0, self.physical_size[0], self.physical_size[1])
self.projection = perspective(45.0, self.size[0] /
float(self.size[1]), 1.0, 20.0)
self.program['u_projection'] = self.projection
if __name__ == '__main__':
c = Canvas()
c.show()
app.run()
| bsd-3-clause | 4,354,226,615,360,536,600 | 28.374194 | 79 | 0.627059 | false |
jhuapl-boss/intern | intern/service/cv/project.py | 1 | 4848 | # Copyright 2020 The Johns Hopkins University Applied Physics Laboratory
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from intern.service.cv.service import CloudVolumeService
from intern.service.cv.metadata import MetadataService
from intern.resource.cv.resource import CloudVolumeResource
from cloudvolume import CloudVolume, Vec
class ProjectService(CloudVolumeService):
"""
ProjectService for cloudvolume service.
"""
def __init__(self, protocol, cloudpath):
"""
Constructor.
Args:
protocol (str) : protocol to use. Currently supports 'local', 'gs', and 's3'
cloudpath (str) : in the form of "$BUCKET/../$DATASET/$LAYER"
"""
CloudVolumeService.__init__(self)
self.protocol = protocol
self.cloudpath = cloudpath
def cloudvolume(self, mip, info, parallel, cache, **kwargs):
"""
Creates cloud-volume resource
Args:
mip (int): which mip layer to access
info (dict) : json-encodable dictionary of layer parameters. Necessary for creating a
new cloudvolume instance.
parallel (int: 1, bool): Number of extra processes to launch, 1 means only
use the main process. If parallel is True use the number of CPUs
returned by multiprocessing.cpu_count(). When parallel > 1, shared
memory (Linux) or emulated shared memory via files (other platforms)
is used by the underlying download.
cache (bool or str) Store downs and uploads in a cache on disk
and preferentially read from it before redownloading.
- False: no caching will occur.
- True: cache will be located in a standard location.
- non-empty string: cache is located at this file path
kwargs: optional arguments (https://github.com/seung-lab/cloud-volume#cloudvolume-constructor)
Returns:
CloudVolume Object
"""
return CloudVolumeResource(
self.protocol,
self.cloudpath,
mip=mip,
info=info,
parallel=parallel,
cache=cache,
**kwargs
)
def create_new_info(
self,
num_channels,
layer_type,
data_type,
resolution,
volume_size,
voxel_offset=(0, 0, 0),
encoding="raw",
chunk_size=(64, 64, 64),
mesh=None,
skeletons=None,
compressed_segmentation_block_size=(8, 8, 8),
max_mip=0,
factor=(2, 2, 1),
):
"""
Creates the info JSON necessary for a new cloudvolume resource.
Args:
Required:
num_channels: (int) 1 for grayscale, 3 for RGB
layer_type: (str) typically "image" or "segmentation"
data_type: (str) e.g. "uint8", "uint16", "uint32", "float32"
resolution: int (x,y,z), x,y,z voxel dimensions in nanometers
volume_size: int (x,y,z), extent of dataset in cartesian space from voxel_offset
Optional:
voxel_offset: int (x,y,z), beginning of dataset in positive cartesian space
encoding: (str) "raw" for binaries like numpy arrays, "jpeg"
mesh: (str) name of mesh directory, typically "mesh"
skeletons: (str) name of skeletons directory, typically "skeletons"
chunk_size: int (x,y,z), dimensions of each downloadable 3D image chunk in voxels
compressed_segmentation_block_size: (x,y,z) dimensions of each compressed sub-block
(only used when encoding is 'compressed_segmentation')
max_mip: (int), the maximum mip level id.
factor: (tuple), the downsampling factor for each mip level
Returns: dict representing a single mip level that's JSON encodable
"""
return CloudVolume.create_new_info(
num_channels,
layer_type,
data_type,
encoding,
resolution,
voxel_offset,
volume_size,
mesh,
skeletons,
chunk_size,
compressed_segmentation_block_size,
max_mip,
factor,
)
| apache-2.0 | 1,367,523,014,614,587,600 | 38.096774 | 106 | 0.600248 | false |
sonaht/ansible | lib/ansible/module_utils/aws/core.py | 50 | 5402 | #
# Copyright 2017 Michael De La Rue | Ansible
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
"""This module adds shared support for generic Amazon AWS modules
**This code is not yet ready for use in user modules. As of 2017**
**and through to 2018, the interface is likely to change**
**aggressively as the exact correct interface for ansible AWS modules**
**is identified. In particular, until this notice goes away or is**
**changed, methods may disappear from the interface. Please don't**
**publish modules using this except directly to the main Ansible**
**development repository.**
In order to use this module, include it as part of a custom
module as shown below.
from ansible.module_utils.aws import AnsibleAWSModule
module = AnsibleAWSModule(argument_spec=dictionary, supports_check_mode=boolean
mutually_exclusive=list1, required_together=list2)
The 'AnsibleAWSModule' module provides similar, but more restricted,
interfaces to the normal Ansible module. It also includes the
additional methods for connecting to AWS using the standard module arguments
try:
m.aws_connect(resource='lambda') # - get an AWS connection.
except Exception:
m.fail_json_aws(Exception, msg="trying to connect") # - take an exception and make a decent failure
"""
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils._text import to_native
from ansible.module_utils.ec2 import HAS_BOTO3, camel_dict_to_snake_dict, ec2_argument_spec
import traceback
# We will also export HAS_BOTO3 so end user modules can use it.
__all__ = ('AnsibleAWSModule', 'HAS_BOTO3',)
class AnsibleAWSModule(object):
"""An ansible module class for AWS modules
AnsibleAWSModule provides an a class for building modules which
connect to Amazon Web Services. The interface is currently more
restricted than the basic module class with the aim that later the
basic module class can be reduced. If you find that any key
feature is missing please contact the author/Ansible AWS team
(available on #ansible-aws on IRC) to request the additional
features needed.
"""
default_settings = {
"default_args": True,
"check_boto3": True,
"auto_retry": True,
"module_class": AnsibleModule
}
def __init__(self, **kwargs):
local_settings = {}
for key in AnsibleAWSModule.default_settings:
try:
local_settings[key] = kwargs.pop(key)
except KeyError:
local_settings[key] = AnsibleAWSModule.default_settings[key]
self.settings = local_settings
if local_settings["default_args"]:
# ec2_argument_spec contains the region so we use that; there's a patch coming which
# will add it to aws_argument_spec so if that's accepted then later we should change
# over
argument_spec_full = ec2_argument_spec()
try:
argument_spec_full.update(kwargs["argument_spec"])
except (TypeError, NameError):
pass
kwargs["argument_spec"] = argument_spec_full
self._module = AnsibleAWSModule.default_settings["module_class"](**kwargs)
if local_settings["check_boto3"] and not HAS_BOTO3:
self._module.fail_json(
msg='Python modules "botocore" or "boto3" are missing, please install both')
self.check_mode = self._module.check_mode
@property
def params(self):
return self._module.params
def exit_json(self, *args, **kwargs):
return self._module.exit_json(*args, **kwargs)
def fail_json(self, *args, **kwargs):
return self._module.fail_json(*args, **kwargs)
def fail_json_aws(self, exception, msg=None):
"""call fail_json with processed exception
function for converting exceptions thrown by AWS SDK modules,
botocore, boto3 and boto, into nice error messages.
"""
last_traceback = traceback.format_exc()
# to_native is trusted to handle exceptions that str() could
# convert to text.
try:
except_msg = to_native(exception.message)
except AttributeError:
except_msg = to_native(exception)
if msg is not None:
message = '{0}: {1}'.format(msg, except_msg)
else:
message = except_msg
try:
response = exception.response
except AttributeError:
response = None
if response is None:
self._module.fail_json(msg=message, exception=last_traceback)
else:
self._module.fail_json(msg=message, exception=last_traceback,
**camel_dict_to_snake_dict(response))
| gpl-3.0 | -5,422,023,208,306,754,000 | 36.776224 | 105 | 0.669011 | false |
aurofable/medhack-server | venv/lib/python2.7/encodings/uu_codec.py | 383 | 3738 | """ Python 'uu_codec' Codec - UU content transfer encoding
Unlike most of the other codecs which target Unicode, this codec
will return Python string objects for both encode and decode.
Written by Marc-Andre Lemburg ([email protected]). Some details were
adapted from uu.py which was written by Lance Ellinghouse and
modified by Jack Jansen and Fredrik Lundh.
"""
import codecs, binascii
### Codec APIs
def uu_encode(input,errors='strict',filename='<data>',mode=0666):
""" Encodes the object input and returns a tuple (output
object, length consumed).
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
"""
assert errors == 'strict'
from cStringIO import StringIO
from binascii import b2a_uu
# using str() because of cStringIO's Unicode undesired Unicode behavior.
infile = StringIO(str(input))
outfile = StringIO()
read = infile.read
write = outfile.write
# Encode
write('begin %o %s\n' % (mode & 0777, filename))
chunk = read(45)
while chunk:
write(b2a_uu(chunk))
chunk = read(45)
write(' \nend\n')
return (outfile.getvalue(), len(input))
def uu_decode(input,errors='strict'):
""" Decodes the object input and returns a tuple (output
object, length consumed).
input must be an object which provides the bf_getreadbuf
buffer slot. Python strings, buffer objects and memory
mapped files are examples of objects providing this slot.
errors defines the error handling to apply. It defaults to
'strict' handling which is the only currently supported
error handling for this codec.
Note: filename and file mode information in the input data is
ignored.
"""
assert errors == 'strict'
from cStringIO import StringIO
from binascii import a2b_uu
infile = StringIO(str(input))
outfile = StringIO()
readline = infile.readline
write = outfile.write
# Find start of encoded data
while 1:
s = readline()
if not s:
raise ValueError, 'Missing "begin" line in input data'
if s[:5] == 'begin':
break
# Decode
while 1:
s = readline()
if not s or \
s == 'end\n':
break
try:
data = a2b_uu(s)
except binascii.Error, v:
# Workaround for broken uuencoders by /Fredrik Lundh
nbytes = (((ord(s[0])-32) & 63) * 4 + 5) / 3
data = a2b_uu(s[:nbytes])
#sys.stderr.write("Warning: %s\n" % str(v))
write(data)
if not s:
raise ValueError, 'Truncated input data'
return (outfile.getvalue(), len(input))
class Codec(codecs.Codec):
def encode(self,input,errors='strict'):
return uu_encode(input,errors)
def decode(self,input,errors='strict'):
return uu_decode(input,errors)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return uu_encode(input, self.errors)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return uu_decode(input, self.errors)[0]
class StreamWriter(Codec,codecs.StreamWriter):
pass
class StreamReader(Codec,codecs.StreamReader):
pass
### encodings module API
def getregentry():
return codecs.CodecInfo(
name='uu',
encode=uu_encode,
decode=uu_decode,
incrementalencoder=IncrementalEncoder,
incrementaldecoder=IncrementalDecoder,
streamreader=StreamReader,
streamwriter=StreamWriter,
)
| mit | 1,159,593,495,492,654,600 | 27.976744 | 76 | 0.641787 | false |
mcgachey/edx-platform | setup.py | 11 | 3320 | """
Setup script for the Open edX package.
"""
from setuptools import setup
setup(
name="Open edX",
version="0.5",
install_requires=["setuptools"],
requires=[],
# NOTE: These are not the names we should be installing. This tree should
# be reorganized to be a more conventional Python tree.
packages=[
"openedx.core.djangoapps.course_groups",
"openedx.core.djangoapps.credit",
"openedx.core.djangoapps.user_api",
"lms",
"cms",
],
entry_points={
"openedx.course_tab": [
"ccx = lms.djangoapps.ccx.plugins:CcxCourseTab",
"courseware = lms.djangoapps.courseware.tabs:CoursewareTab",
"course_info = lms.djangoapps.courseware.tabs:CourseInfoTab",
"discussion = lms.djangoapps.django_comment_client.forum.views:DiscussionTab",
"edxnotes = lms.djangoapps.edxnotes.plugins:EdxNotesTab",
"external_discussion = lms.djangoapps.courseware.tabs:ExternalDiscussionCourseTab",
"external_link = lms.djangoapps.courseware.tabs:ExternalLinkCourseTab",
"html_textbooks = lms.djangoapps.courseware.tabs:HtmlTextbookTabs",
"instructor = lms.djangoapps.instructor.views.instructor_dashboard:InstructorDashboardTab",
"notes = lms.djangoapps.notes.views:NotesTab",
"pdf_textbooks = lms.djangoapps.courseware.tabs:PDFTextbookTabs",
"progress = lms.djangoapps.courseware.tabs:ProgressTab",
"static_tab = xmodule.tabs:StaticTab",
"syllabus = lms.djangoapps.courseware.tabs:SyllabusTab",
"teams = lms.djangoapps.teams.plugins:TeamsTab",
"textbooks = lms.djangoapps.courseware.tabs:TextbookTabs",
"wiki = lms.djangoapps.course_wiki.tab:WikiTab",
# ORA 1 tabs (deprecated)
"peer_grading = lms.djangoapps.open_ended_grading.views:PeerGradingTab",
"staff_grading = lms.djangoapps.open_ended_grading.views:StaffGradingTab",
"open_ended = lms.djangoapps.open_ended_grading.views:OpenEndedGradingTab",
],
"openedx.user_partition_scheme": [
"random = openedx.core.djangoapps.user_api.partition_schemes:RandomUserPartitionScheme",
"cohort = openedx.core.djangoapps.course_groups.partition_scheme:CohortPartitionScheme",
"verification = openedx.core.djangoapps.credit.partition_schemes:VerificationPartitionScheme",
],
"openedx.block_structure_transformer": [
"library_content = lms.djangoapps.course_blocks.transformers.library_content:ContentLibraryTransformer",
"split_test = lms.djangoapps.course_blocks.transformers.split_test:SplitTestTransformer",
"start_date = lms.djangoapps.course_blocks.transformers.start_date:StartDateTransformer",
"user_partitions = lms.djangoapps.course_blocks.transformers.user_partitions:UserPartitionTransformer",
"visibility = lms.djangoapps.course_blocks.transformers.visibility:VisibilityTransformer",
"course_blocks_api = lms.djangoapps.course_api.blocks.transformers.blocks_api:BlocksAPITransformer",
"proctored_exam = lms.djangoapps.course_api.blocks.transformers.proctored_exam:ProctoredExamTransformer",
],
}
)
| agpl-3.0 | 433,515,252,515,675,200 | 53.42623 | 117 | 0.68253 | false |
roberthawdon/toonbot | subprocesses/post-queue.py | 1 | 6346 | #!/usr/bin/env python
# Toon Bot - Poster Bot Subprocess
#
# _____ U ___ u U ___ u _ _ ____ U ___ u _____
# |_ " _| \/"_ \/ \/"_ \/ | \ |"| U | __")u \/"_ \/|_ " _|
# | | | | | | | | | |<| \| |> \| _ \/ | | | | | |
# /| |\.-,_| |_| |.-,_| |_| |U| |\ |u | |_) |.-,_| |_| | /| |\
# u |_|U \_)-\___/ \_)-\___/ |_| \_| |____/ \_)-\___/ u |_|U
# _// \\_ \\ \\ || \\,-. _|| \\_ \\ _// \\_
# (__) (__) (__) (__) (_") (_/ (__) (__) (__) (__) (__)
#
# Providing 5 minute breaks since 2016
#
# By Robert Hawdon - https://robertianhawdon.me.uk/
import sys
from argparse import ArgumentParser
import MySQLdb
import random
import time
import urllib
import urllib2
import yaml
import json
import os
import os.path
from tendo import singleton
me = singleton.SingleInstance()
script_dirpath = os.path.dirname(os.path.join(os.getcwd(), __file__))
class PosterBot(object):
def __init__(self, config):
# set the config object
self.config = config
# set slack token
self.token = config.get('SLACK_TOKEN')
# set mysql details
self.mysqlserver = config.get('MYSQL_SERVER')
self.mysqluser = config.get('MYSQL_USER')
self.mysqlpass = config.get('MYSQL_PASS')
self.mysqldb = config.get('MYSQL_DB')
# self.postcolor = config.get('POST_COLOR')
# self.posttextcolor = config.get('POST_TEXT_COLOR')
self.process_queue()
def process_queue(self):
#try:
conn = MySQLdb.Connection(self.mysqlserver, self.mysqluser, self.mysqlpass, self.mysqldb)
curs = conn.cursor()
conn.set_character_set('utf8')
curs.execute('SET NAMES utf8;')
curs.execute('SET CHARACTER SET utf8;')
curs.execute('SET character_set_connection=utf8;')
cmd = "SELECT value FROM tbl_system WHERE name = 'postcolor'"
curs.execute(cmd)
result = curs.fetchall()
for color in result:
defaultpostcolor = color[0]
cmd = "SELECT value FROM tbl_system WHERE name = 'posttextcolor'"
curs.execute(cmd)
result = curs.fetchall()
for color in result:
defaultposttextcolor = color[0]
cmd = "SELECT Q.ID, Q.slackuser, Q.displayname, Q.comichash, Q.flags, U.dmid FROM tbl_queue Q LEFT JOIN tbl_users U ON U.slackuser = Q.slackuser WHERE Q.sent = 0"
curs.execute(cmd)
result = curs.fetchall()
for items in result:
id = items[0]
slackuser = items[1]
displayname = items[2]
comichash = items[3]
flags = items[4]
dmid = items[5]
cmd = "SELECT ID FROM tbl_users WHERE slackuser = %s"
curs.execute(cmd, [slackuser])
result = curs.fetchall()
for users in result:
userid = users[0]
cmd = "SELECT name, value FROM tbl_preferences WHERE userID = %s"
curs.execute(cmd, [userid])
result = curs.fetchall()
prefname = []
prefvalue = []
for preferences in result:
prefname.append(preferences[0])
prefvalue.append(preferences[1])
if 'postcolor' in prefname:
postcolor = prefvalue[prefname.index("postcolor")]
else:
postcolor = defaultpostcolor
if 'posttextcolor' in prefname:
posttextcolor = prefvalue[prefname.index("posttextcolor")]
else:
posttextcolor = defaultposttextcolor
cmd = "SELECT image, pageurl, title, text FROM tbl_comic_data WHERE comichash = %s"
curs.execute(cmd, ([comichash]))
result2 = curs.fetchall()
for comic in result2:
image = comic[0]
pageurl = comic[1]
title = comic[2]
if title:
utitle = title.decode("utf-8")
title = utitle.encode("ascii", "ignore")
text = comic[3]
if text:
utext = text.decode("utf-8")
text = utext.encode("ascii", "ignore")
if title is None:
title = displayname
if text is not None:
body = [{"title": title,"title_link": pageurl,"author_name": displayname,"image_url": image,"color": "#" + postcolor}, {"text": text, "color": "#" + posttextcolor}]
else:
body = [{"title": title,"title_link": pageurl,"author_name": displayname,"image_url": image,"color": "#" + postcolor}]
data = body
#print json.dumps(data)
attachment = urllib.quote(str(json.dumps(data)))
url = "https://slack.com/api/chat.postMessage?token=" + self.token + "&channel=" + dmid + "&attachments=" + attachment + "&as_user=true"
req = urllib2.Request(url)
response = urllib2.urlopen(req)
# print response.read()
image is None
pageurl is None
title is None
text is None
jsonres = json.load(response)
if jsonres["ok"] is True:
cmd = "UPDATE tbl_queue SET sent = 1 WHERE ID = %s"
curs.execute(cmd, ([id]))
conn.commit()
else:
errormessage = jsonres["error"]
cmd = "UPDATE tbl_queue SET flags = 1, errormessage = %s WHERE ID = %s"
curs.execute(cmd, ([errormessage], [id]))
cmd = "INSERT INTO tbl_queue_errors (errormessage, queueID) VALUES (%s, %s)"
curs.execute(cmd, ([errormessage], [id]))
conn.commit()
time.sleep(1)
def parse_args():
parser = ArgumentParser()
parser.add_argument(
'-c',
'--config',
help='Full path to config file.',
metavar='path'
)
return parser.parse_args()
# load args with config path
args = parse_args()
config = yaml.load(open(args.config or script_dirpath + '/../toonbot.conf', 'r'))
PosterBot(config)
| gpl-3.0 | -4,305,297,769,392,593,400 | 37.695122 | 184 | 0.499842 | false |
jitka/weblate | weblate/trans/tests/__init__.py | 2 | 1838 | # -*- coding: utf-8 -*-
#
# Copyright © 2012 - 2016 Michal Čihař <[email protected]>
#
# This file is part of Weblate <https://weblate.org/>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import tempfile
import shutil
from weblate import appsettings
class OverrideSettings(object):
"""
makes a context manager also act as decorator
"""
TEMP_DIR = 0x12346578
def __init__(self, **values):
self._values = values
self._backup = {}
self._tempdir = None
def __enter__(self):
for name, value in self._values.items():
self._backup[name] = getattr(appsettings, name)
if value == self.TEMP_DIR:
self._tempdir = tempfile.mkdtemp()
setattr(appsettings, name, self._tempdir)
else:
setattr(appsettings, name, value)
return self
def __exit__(self, *args, **kwds):
for name in self._values.keys():
setattr(appsettings, name, self._backup[name])
if self._tempdir is not None:
shutil.rmtree(self._tempdir)
def __call__(self, func):
def wrapper(*args, **kwds):
with self:
return func(*args, **kwds)
return wrapper
| gpl-3.0 | -4,171,743,853,807,584,000 | 30.101695 | 71 | 0.632153 | false |
ChrisTruncer/EyeWitness | Python/modules/reporting.py | 1 | 17673 | import os
import sys
import urllib.parse
try:
from fuzzywuzzy import fuzz
except ImportError:
print('[*] fuzzywuzzy not found.')
print('[*] Please run the script in the setup directory!')
sys.exit()
def process_group(
data, group, toc, toc_table, page_num, section,
sectionid, html):
"""Retreives a group from the full data, and creates toc stuff
Args:
data (List): Full set of data containing all hosts
group (String): String representing group to process
toc (String): HTML for Table of Contents
toc_table (String): HTML for Table in ToC
page_num (int): Page number we're on in the report
section (String): Display name of the group
sectionid (String): Unique ID for ToC navigation
html (String): HTML for current page of report
Returns:
List: Elements for category sorted and grouped
String: HTML representing ToC
String: HTML representing ToC Table
String: HTML representing current report page
"""
group_data = sorted([x for x in data if x.category == group], key=lambda k: str(k.page_title))
grouped_elements = []
if len(group_data) == 0:
return grouped_elements, toc, toc_table, html
if page_num == 0:
toc += ("<li><a href=\"report.html#{0}\">{1} (Page 1)</a></li>").format(
sectionid, section)
else:
toc += ("<li><a href=\"report_page{0}.html#{1}\">{2} (Page {0})</a></li>").format(
str(page_num+1), sectionid, section)
html += "<h2 id=\"{0}\">{1}</h2>".format(sectionid, section)
unknowns = [x for x in group_data if x.page_title == 'Unknown']
group_data = [x for x in group_data if x.page_title != 'Unknown']
while len(group_data) > 0:
test_element = group_data.pop(0)
temp = [x for x in group_data if fuzz.token_sort_ratio(
test_element.page_title, x.page_title) >= 70]
temp.append(test_element)
temp = sorted(temp, key=lambda k: k.page_title)
grouped_elements.extend(temp)
group_data = [x for x in group_data if fuzz.token_sort_ratio(
test_element.page_title, x.page_title) < 70]
grouped_elements.extend(unknowns)
toc_table += ("<tr><td>{0}</td><td>{1}</td>").format(section,
str(len(grouped_elements)))
return grouped_elements, toc, toc_table, html
def sort_data_and_write(cli_parsed, data):
"""Writes out reports for HTTP objects
Args:
cli_parsed (TYPE): CLI Options
data (TYPE): Full set of data
"""
# We'll be using this number for our table of contents
total_results = len(data)
categories = [('highval', 'High Value Targets', 'highval'),
('dirlist', 'Directory Listings', 'dirlist'),
(None, 'Uncategorized', 'uncat'),
('cms', 'Content Management System (CMS)', 'cms'),
('idrac', 'IDRAC/ILo/Management Interfaces', 'idrac'),
('nas', 'Network Attached Storage (NAS)', 'nas'),
('construction', 'Under Construction', 'construction'),
('netdev', 'Network Devices', 'netdev'),
('voip', 'Voice/Video over IP (VoIP)', 'voip'),
('unauth', '401/403 Unauthorized', 'unauth'),
('notfound', '404 Not Found', 'notfound'),
('crap', 'Splash Pages', 'crap'),
('printer', 'Printers', 'printer'),
('successfulLogin', 'Successful Logins', 'successfulLogin'),
('identifiedLogin', 'Identified Logins', 'identifiedLogin'),
('infrastructure', 'Infrastructure', 'infrastructure'),
('redirector', 'Redirecting Pages', 'redirector'),
('badhost', 'Invalid Hostname', 'badhost'),
('inerror', 'Internal Error', 'inerror'),
('badreq', 'Bad Request', 'badreq'),
('serviceunavailable', 'Service Unavailable', 'serviceunavailable'),
]
if total_results == 0:
return
# Initialize stuff we need
pages = []
toc = create_report_toc_head(cli_parsed.date, cli_parsed.time)
toc_table = "<table class=\"table\">"
web_index_head = create_web_index_head(cli_parsed.date, cli_parsed.time)
table_head = create_table_head()
counter = 1
csv_request_data = "Protocol,Port,Domain,Request Status,Screenshot Path, Source Path"
# Generate and write json log of requests
for json_request in data:
url = urllib.parse.urlparse(json_request._remote_system)
# Determine protocol
csv_request_data += "\n" + url.scheme + ","
if url.port is not None:
csv_request_data += str(url.port) + ","
elif url.scheme == 'http':
csv_request_data += "80,"
elif url.scheme == 'https':
csv_request_data += "443,"
try:
csv_request_data += url.hostname + ","
except TypeError:
print("Error when accessing a target's hostname (it's not existent)")
print("Possible bad url (improperly formatted) in the URL list.")
print("Fix your list and re-try. Killing EyeWitness....")
sys.exit(1)
if json_request._error_state == None:
csv_request_data += "Successful,"
else:
csv_request_data += json_request._error_state + ","
csv_request_data += json_request._screenshot_path + ","
csv_request_data += json_request._source_path
with open(os.path.join(cli_parsed.d, 'Requests.csv'), 'a') as f:
f.write(csv_request_data)
# Pre-filter error entries
def key_lambda(k):
if k.error_state is None:
k.error_state = str(k.error_state)
if k.page_title is None:
k.page_title = str(k.page_title)
return (k.error_state, k.page_title)
errors = sorted([x for x in data if (x is not None) and (x.error_state is not None)],
key=key_lambda)
data[:] = [x for x in data if x.error_state is None]
data = sorted(data, key=lambda k: str(k.page_title))
html = u""
# Loop over our categories and populate HTML
for cat in categories:
grouped, toc, toc_table, html = process_group(
data, cat[0], toc, toc_table, len(pages), cat[1], cat[2], html)
if len(grouped) > 0:
html += table_head
pcount = 0
for obj in grouped:
pcount += 1
html += obj.create_table_html()
if (counter % cli_parsed.results == 0) or (counter == (total_results) -1):
html = (web_index_head + "EW_REPLACEME" + html +
"</table><br>")
pages.append(html)
html = u""
if pcount < len(grouped):
html += table_head
counter += 1
if len(grouped) > 0 and counter - 1 % cli_parsed.results != 0:
html += "</table><br>"
# Add our errors here (at the very very end)
if len(errors) > 0:
html += '<h2>Errors</h2>'
html += table_head
for obj in errors:
html += obj.create_table_html()
if (counter % cli_parsed.results == 0) or (counter == (total_results)):
html = (web_index_head + "EW_REPLACEME" + html +
"</table><br>")
pages.append(html)
html = u"" + table_head
counter += 1
# Close out any stuff thats hanging
toc += "</ul>"
toc_table += "<tr><td>Errors</td><td>{0}</td></tr>".format(
str(len(errors)))
toc_table += "<tr><th>Total</th><td>{0}</td></tr>".format(total_results)
toc_table += "</table>"
if (html != u"") and (counter - total_results != 0):
html = (web_index_head + "EW_REPLACEME" + html +
"</table><br>")
pages.append(html)
toc = "<center>{0}<br><br>{1}<br><br></center>".format(toc, toc_table)
if len(pages) == 1:
with open(os.path.join(cli_parsed.d, 'report.html'), 'a') as f:
f.write(toc)
f.write(pages[0].replace('EW_REPLACEME', ''))
f.write("</body>\n</html>")
else:
num_pages = len(pages) + 1
bottom_text = "\n<center><br>"
bottom_text += ("<a href=\"report.html\"> Page 1</a>")
skip_last_dummy = False
# Generate our header/footer data here
for i in range(2, num_pages):
badd_page = "</center>EW_REPLACEME<table border=\"1\">\n <tr>\n <th>Web Request Info</th>\n <th>Web Screenshot</th>\n </tr></table><br>"
if badd_page in pages[i-1]:
skip_last_dummy = True
pass
else:
bottom_text += ("<a href=\"report_page{0}.html\"> Page {0}</a>").format(str(i))
bottom_text += "</center>\n"
top_text = bottom_text
# Generate our next/previous page buttons
if skip_last_dummy:
amount = len(pages) - 1
else:
amount = len(pages)
for i in range(0, amount):
headfoot = "<h3>Page {0}</h3>".format(str(i+1))
headfoot += "<center>"
if i == 0:
headfoot += ("<a href=\"report_page2.html\" id=\"next\"> Next Page "
"</a></center>")
elif i == amount - 1:
if i == 1:
headfoot += ("<a href=\"report.html\" id=\"previous\"> Previous Page "
"</a></center>")
else:
headfoot += ("<a href=\"report_page{0}.html\" id=\"previous\"> Previous Page "
"</a></center>").format(str(i))
elif i == 1:
headfoot += ("<a href=\"report.html\" id=\"previous\">Previous Page</a> "
"<a href=\"report_page{0}.html\" id=\"next\"> Next Page"
"</a></center>").format(str(i+2))
else:
headfoot += ("<a href=\"report_page{0}.html\" id=\"previous\">Previous Page</a>"
" <a href=\"report_page{1}.html\" id=\"next\"> Next Page"
"</a></center>").format(str(i), str(i+2))
# Finalize our pages by replacing placeholder stuff and writing out
# the headers/footers
pages[i] = pages[i].replace(
'EW_REPLACEME', headfoot + top_text) + bottom_text + '<br>' + headfoot + '</body></html>'
# Write out our report to disk!
if len(pages) == 0:
return
with open(os.path.join(cli_parsed.d, 'report.html'), 'a') as f:
f.write(toc)
f.write(pages[0])
write_out = len(pages)
for i in range(2, write_out + 1):
bad_page = "<table border=\"1\">\n <tr>\n <th>Web Request Info</th>\n <th>Web Screenshot</th>\n </tr></table><br>\n<center><br><a "
badd_page2 = "</center>EW_REPLACEME<table border=\"1\">\n <tr>\n <th>Web Request Info</th>\n <th>Web Screenshot</th>\n </tr></table><br>"
if (bad_page in pages[i-1]) or (badd_page2 in pages[i-1]):
pass
else:
with open(os.path.join(cli_parsed.d, 'report_page{0}.html'.format(str(i))), 'w') as f:
f.write(pages[i - 1])
def create_web_index_head(date, time):
"""Creates the header for a http report
Args:
date (String): Date of report start
time (String): Time of report start
Returns:
String: HTTP Report Start html
"""
return ("""<html>
<head>
<link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\" type=\"text/css\"/>
<title>EyeWitness Report</title>
<script src="jquery-1.11.3.min.js"></script>
<script type="text/javascript">
function toggleUA(id, url){{
idi = "." + id;
$(idi).toggle();
change = document.getElementById(id);
if (change.innerHTML.indexOf("expand") > -1){{
change.innerHTML = "Click to collapse User Agents for " + url;
}}else{{
change.innerHTML = "Click to expand User Agents for " + url;
}}
}}
document.onkeydown = function(event){{
event = event || window.event;
switch (event.keyCode){{
case 37:
leftArrow();
break;
case 39:
rightArrow();
break;
}}
}};
function leftArrow(){{
$('#previous')[0].click();
}};
function rightArrow(){{
$('#next')[0].click();
}};
</script>
</head>
<body>
<center>
<center>Report Generated on {0} at {1}</center>""").format(date, time)
def search_index_head():
return ("""<html>
<head>
<link rel=\"stylesheet\" href=\"https://maxcdn.bootstrapcdn.com/bootstrap/3.3.7/css/bootstrap.min.css\" type=\"text/css\"/>
<title>EyeWitness Report</title>
<script src="jquery-1.11.3.min.js"></script>
<script type="text/javascript">
function toggleUA(id, url){{
idi = "." + id;
$(idi).toggle();
change = document.getElementById(id);
if (change.innerHTML.indexOf("expand") > -1){{
change.innerHTML = "Click to collapse User Agents for " + url;
}}else{{
change.innerHTML = "Click to expand User Agents for " + url;
}}
}}
</script>
</head>
<body>
<center>
""")
def create_table_head():
return ("""<table border=\"1\">
<tr>
<th>Web Request Info</th>
<th>Web Screenshot</th>
</tr>""")
def create_report_toc_head(date, time):
return ("""<html>
<head>
<title>EyeWitness Report Table of Contents</title>
</head>
<h2>Table of Contents</h2>""")
def search_report(cli_parsed, data, search_term):
pages = []
web_index_head = search_index_head()
table_head = create_table_head()
counter = 1
data[:] = [x for x in data if x.error_state is None]
data = sorted(data, key=lambda k: k.page_title)
html = u""
# Add our errors here (at the very very end)
html += '<h2>Results for {0}</h2>'.format(search_term)
html += table_head
for obj in data:
html += obj.create_table_html()
if counter % cli_parsed.results == 0:
html = (web_index_head + "EW_REPLACEME" + html +
"</table><br>")
pages.append(html)
html = u"" + table_head
counter += 1
if html != u"":
html = (web_index_head + html + "</table><br>")
pages.append(html)
if len(pages) == 1:
with open(os.path.join(cli_parsed.d, 'search.html'), 'a') as f:
f.write(pages[0].replace('EW_REPLACEME', ''))
f.write("</body>\n</html>")
else:
num_pages = len(pages) + 1
bottom_text = "\n<center><br>"
bottom_text += ("<a href=\"search.html\"> Page 1</a>")
# Generate our header/footer data here
for i in range(2, num_pages):
bottom_text += ("<a href=\"search_page{0}.html\"> Page {0}</a>").format(
str(i))
bottom_text += "</center>\n"
top_text = bottom_text
# Generate our next/previous page buttons
for i in range(0, len(pages)):
headfoot = "<center>"
if i == 0:
headfoot += ("<a href=\"search_page2.html\"> Next Page "
"</a></center>")
elif i == len(pages) - 1:
if i == 1:
headfoot += ("<a href=\"search.html\"> Previous Page "
"</a></center>")
else:
headfoot += ("<a href=\"search_page{0}.html\"> Previous Page "
"</a></center>").format(str(i))
elif i == 1:
headfoot += ("<a href=\"search.html\">Previous Page</a> "
"<a href=\"search_page{0}.html\"> Next Page"
"</a></center>").format(str(i+2))
else:
headfoot += ("<a href=\"search_page{0}.html\">Previous Page</a>"
" <a href=\"search_page{1}.html\"> Next Page"
"</a></center>").format(str(i), str(i+2))
# Finalize our pages by replacing placeholder stuff and writing out
# the headers/footers
pages[i] = pages[i].replace(
'EW_REPLACEME', headfoot + top_text) + bottom_text + '<br>' + headfoot + '</body></html>'
# Write out our report to disk!
if len(pages) == 0:
return
with open(os.path.join(cli_parsed.d, 'search.html'), 'a') as f:
try:
f.write(pages[0])
except UnicodeEncodeError:
f.write(pages[0].encode('utf-8'))
for i in range(2, len(pages) + 1):
with open(os.path.join(cli_parsed.d, 'search_page{0}.html'.format(str(i))), 'w') as f:
try:
f.write(pages[i - 1])
except UnicodeEncodeError:
f.write(pages[i - 1].encode('utf-8'))
| gpl-3.0 | 2,801,810,865,289,462,000 | 39.349315 | 177 | 0.508572 | false |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.