code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
##
## This file is part of the libsigrokdecode project.
##
## Copyright (C) 2014 Uwe Hermann <[email protected]>
##
## This program is free software; you can redistribute it and/or modify
## it under the terms of the GNU General Public License as published by
## the Free Software Foundation; either version 2 of the License, or
## (at your option) any later version.
##
## This program is distributed in the hope that it will be useful,
## but WITHOUT ANY WARRANTY; without even the implied warranty of
## MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
## GNU General Public License for more details.
##
## You should have received a copy of the GNU General Public License
## along with this program; if not, write to the Free Software
## Foundation, Inc., 51 Franklin St, Fifth Floor, Boston, MA 02110-1301 USA
##
'''
RC-5 is a biphase/manchester based infrared remote control protocol.
'''
from .pd import *
| salberin/libsigrokdecode | decoders/ir_rc5/__init__.py | Python | gpl-3.0 | 931 |
import tempfile
from pymol import cmd
cmd.show_as("cartoon", "experimental_structure")
cmd.show_as("cartoon", "predicted_structure")
rmsd = cmd.align('predicted_structure', 'experimental_structure')
cmd.bg_color('white')
handler, output_file = tempfile.mkstemp(prefix='alignment', suffix='.png')
cmd.png(output_file, ray=1)
print(rmsd[0])
print(output_file)
cmd.quit()
| mchelem/cref2 | cref/utils/pymolbench.py | Python | mit | 370 |
#coding:utf-8
'''
@author: shibkov
'''
from actions import SlavePack
| barsgroup/objectpack | src/objectpack/slave_object_pack/__init__.py | Python | mit | 70 |
# Copyright (C) 2010-2019 The ESPResSo project
#
# This file is part of ESPResSo.
#
# ESPResSo is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# ESPResSo is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Visualize lattice-Boltzmann boundary nodes.
"""
import espressomd
import espressomd.lb
import espressomd.shapes
import espressomd.lbboundaries
from espressomd import visualization_opengl
required_features = ["LB_BOUNDARIES"]
espressomd.assert_features(required_features)
system = espressomd.System(box_l=[10.0, 10.0, 5.0])
system.set_random_state_PRNG()
system.time_step = 0.01
system.cell_system.skin = 0.4
lb_fluid = espressomd.lb.LBFluid(
agrid=1.0, dens=1.0, visc=1.0, tau=0.01, ext_force_density=[0, 0, 0.15])
system.actors.add(lb_fluid)
cylinder_shape = espressomd.shapes.Cylinder(
center=[5.0, 5.0, 5.0],
axis=[0, 0, 1],
direction=-1,
radius=4.0,
length=20.0)
cylinder_boundary = espressomd.lbboundaries.LBBoundary(shape=cylinder_shape)
system.lbboundaries.add(cylinder_boundary)
visualizer = visualization_opengl.openGLLive(
system,
background_color=[1, 1, 1],
camera_position=[5, 5, 25],
LB_draw_boundaries=True,
LB_draw_nodes=True,
LB_draw_node_boundaries=True)
visualizer.run(1)
| psci2195/espresso-ffans | samples/visualization_lbboundaries.py | Python | gpl-3.0 | 1,759 |
"""
FileCatalogHandler is a simple Replica and Metadata Catalog service
in the DIRAC framework
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
__RCSID__ = "$Id$"
import six
from six import StringIO
import csv
import os
from DIRAC.Core.DISET.RequestHandler import RequestHandler, getServiceOption
from DIRAC import S_OK, S_ERROR
from DIRAC.FrameworkSystem.Client.MonitoringClient import gMonitor
from DIRAC.DataManagementSystem.DB.FileCatalogDB import FileCatalogDB
class FileCataloghandlerMixin(object):
"""
A simple Replica and Metadata Catalog service.
"""
@classmethod
def initializeHandler(cls, serviceInfo):
""" Handler initialization """
dbLocation = getServiceOption(serviceInfo, 'Database', 'DataManagement/FileCatalogDB')
cls.fileCatalogDB = FileCatalogDB(dbLocation)
databaseConfig = {}
# Obtain the plugins to be used for DB interaction
cls.log.info("Initializing with FileCatalog with following managers:")
defaultManagers = {'UserGroupManager': 'UserAndGroupManagerDB',
'SEManager': 'SEManagerDB',
'SecurityManager': 'NoSecurityManager',
'DirectoryManager': 'DirectoryLevelTree',
'FileManager': 'FileManager',
'DirectoryMetadata': 'DirectoryMetadata',
'FileMetadata': 'FileMetadata',
'DatasetManager': 'DatasetManager'}
for configKey in sorted(defaultManagers.keys()):
defaultValue = defaultManagers[configKey]
configValue = getServiceOption(serviceInfo, configKey, defaultValue)
cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue)))
databaseConfig[configKey] = configValue
# Obtain some general configuration of the database
cls.log.info("Initializing the FileCatalog with the following configuration:")
defaultConfig = {'UniqueGUID': False,
'GlobalReadAccess': True,
'LFNPFNConvention': 'Strong',
'ResolvePFN': True,
'DefaultUmask': 0o775,
'ValidFileStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'],
'ValidReplicaStatus': ['AprioriGood', 'Trash', 'Removing', 'Probing'],
'VisibleFileStatus': ['AprioriGood'],
'VisibleReplicaStatus': ['AprioriGood']}
for configKey in sorted(defaultConfig.keys()):
defaultValue = defaultConfig[configKey]
configValue = getServiceOption(serviceInfo, configKey, defaultValue)
cls.log.info("%-20s : %-20s" % (str(configKey), str(configValue)))
databaseConfig[configKey] = configValue
res = cls.fileCatalogDB.setConfig(databaseConfig)
gMonitor.registerActivity("AddFile", "Amount of addFile calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM)
gMonitor.registerActivity("AddFileSuccessful", "Files successfully added",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("AddFileFailed", "Files failed to add",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveFile", "Amount of removeFile calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveFileSuccessful", "Files successfully removed",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveFileFailed", "Files failed to remove",
"FileCatalogHandler", "files/min", gMonitor.OP_SUM)
gMonitor.registerActivity("AddReplica", "Amount of addReplica calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM)
gMonitor.registerActivity("AddReplicaSuccessful", "Replicas successfully added",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM)
gMonitor.registerActivity("AddReplicaFailed", "Replicas failed to add",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveReplica", "Amount of removeReplica calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveReplicaSuccessful", "Replicas successfully removed",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM)
gMonitor.registerActivity("RemoveReplicaFailed", "Replicas failed to remove",
"FileCatalogHandler", "replicas/min", gMonitor.OP_SUM)
gMonitor.registerActivity("ListDirectory", "Amount of listDirectory calls",
"FileCatalogHandler", "calls/min", gMonitor.OP_SUM)
return res
########################################################################
# Path operations (not updated)
#
types_changePathOwner = [[list, dict] + list(six.string_types)]
def export_changePathOwner(self, lfns, recursive=False):
""" Get replica info for the given list of LFNs
"""
return self.fileCatalogDB.changePathOwner(lfns, self.getRemoteCredentials(), recursive)
types_changePathGroup = [[list, dict] + list(six.string_types)]
def export_changePathGroup(self, lfns, recursive=False):
""" Get replica info for the given list of LFNs
"""
return self.fileCatalogDB.changePathGroup(lfns, self.getRemoteCredentials(), recursive)
types_changePathMode = [[list, dict] + list(six.string_types)]
def export_changePathMode(self, lfns, recursive=False):
""" Get replica info for the given list of LFNs
"""
return self.fileCatalogDB.changePathMode(lfns, self.getRemoteCredentials(), recursive)
########################################################################
# ACL Operations
#
types_getPathPermissions = [[list, dict] + list(six.string_types)]
def export_getPathPermissions(self, lfns):
""" Determine the ACL information for a supplied path
"""
return self.fileCatalogDB.getPathPermissions(lfns, self.getRemoteCredentials())
types_hasAccess = [[six.string_types, dict], [six.string_types, list, dict]]
def export_hasAccess(self, paths, opType):
""" Determine if the given op can be performed on the paths
The OpType is all the operations exported
The reason for the param types is backward compatibility. Between v6r14 and v6r15,
the signature of hasAccess has changed, and the two parameters were swapped.
"""
# The signature of v6r15 is (dict, str)
# The signature of v6r14 is (str, [dict, str, list])
# We swap the two params if the first attribute is a string
if isinstance(paths, six.string_types):
paths, opType = opType, paths
return self.fileCatalogDB.hasAccess(opType, paths, self.getRemoteCredentials())
###################################################################
#
# isOK
#
types_isOK = []
@classmethod
def export_isOK(cls):
""" returns S_OK if DB is connected
"""
if cls.fileCatalogDB and cls.fileCatalogDB._connected:
return S_OK()
return S_ERROR('Server not connected to DB')
###################################################################
#
# User/Group write operations
#
types_addUser = [six.string_types]
def export_addUser(self, userName):
""" Add a new user to the File Catalog """
return self.fileCatalogDB.addUser(userName, self.getRemoteCredentials())
types_deleteUser = [six.string_types]
def export_deleteUser(self, userName):
""" Delete user from the File Catalog """
return self.fileCatalogDB.deleteUser(userName, self.getRemoteCredentials())
types_addGroup = [six.string_types]
def export_addGroup(self, groupName):
""" Add a new group to the File Catalog """
return self.fileCatalogDB.addGroup(groupName, self.getRemoteCredentials())
types_deleteGroup = [six.string_types]
def export_deleteGroup(self, groupName):
""" Delete group from the File Catalog """
return self.fileCatalogDB.deleteGroup(groupName, self.getRemoteCredentials())
###################################################################
#
# User/Group read operations
#
types_getUsers = []
def export_getUsers(self):
""" Get all the users defined in the File Catalog """
return self.fileCatalogDB.getUsers(self.getRemoteCredentials())
types_getGroups = []
def export_getGroups(self):
""" Get all the groups defined in the File Catalog """
return self.fileCatalogDB.getGroups(self.getRemoteCredentials())
########################################################################
#
# Path read operations
#
types_exists = [[list, dict] + list(six.string_types)]
def export_exists(self, lfns):
""" Check whether the supplied paths exists """
return self.fileCatalogDB.exists(lfns, self.getRemoteCredentials())
########################################################################
#
# File write operations
#
types_addFile = [[list, dict] + list(six.string_types)]
def export_addFile(self, lfns):
""" Register supplied files """
gMonitor.addMark("AddFile", 1)
res = self.fileCatalogDB.addFile(lfns, self.getRemoteCredentials())
if res['OK']:
gMonitor.addMark("AddFileSuccessful", len(res.get('Value', {}).get('Successful', [])))
gMonitor.addMark("AddFileFailed", len(res.get('Value', {}).get('Failed', [])))
return res
types_removeFile = [[list, dict] + list(six.string_types)]
def export_removeFile(self, lfns):
""" Remove the supplied lfns """
gMonitor.addMark("RemoveFile", 1)
res = self.fileCatalogDB.removeFile(lfns, self.getRemoteCredentials())
if res['OK']:
gMonitor.addMark("RemoveFileSuccessful", len(res.get('Value', {}).get('Successful', [])))
gMonitor.addMark("RemoveFileFailed", len(res.get('Value', {}).get('Failed', [])))
return res
types_setFileStatus = [dict]
def export_setFileStatus(self, lfns):
""" Remove the supplied lfns """
return self.fileCatalogDB.setFileStatus(lfns, self.getRemoteCredentials())
types_addReplica = [[list, dict] + list(six.string_types)]
def export_addReplica(self, lfns):
""" Register supplied replicas """
gMonitor.addMark("AddReplica", 1)
res = self.fileCatalogDB.addReplica(lfns, self.getRemoteCredentials())
if res['OK']:
gMonitor.addMark("AddReplicaSuccessful", len(res.get('Value', {}).get('Successful', [])))
gMonitor.addMark("AddReplicaFailed", len(res.get('Value', {}).get('Failed', [])))
return res
types_removeReplica = [[list, dict] + list(six.string_types)]
def export_removeReplica(self, lfns):
""" Remove the supplied replicas """
gMonitor.addMark("RemoveReplica", 1)
res = self.fileCatalogDB.removeReplica(lfns, self.getRemoteCredentials())
if res['OK']:
gMonitor.addMark("RemoveReplicaSuccessful", len(res.get('Value', {}).get('Successful', [])))
gMonitor.addMark("RemoveReplicaFailed", len(res.get('Value', {}).get('Failed', [])))
return res
types_setReplicaStatus = [[list, dict] + list(six.string_types)]
def export_setReplicaStatus(self, lfns):
""" Set the status for the supplied replicas """
return self.fileCatalogDB.setReplicaStatus(lfns, self.getRemoteCredentials())
types_setReplicaHost = [[list, dict] + list(six.string_types)]
def export_setReplicaHost(self, lfns):
""" Change the registered SE for the supplied replicas """
return self.fileCatalogDB.setReplicaHost(lfns, self.getRemoteCredentials())
types_addFileAncestors = [dict]
def export_addFileAncestors(self, lfns):
""" Add file ancestor information for the given list of LFNs """
return self.fileCatalogDB.addFileAncestors(lfns, self.getRemoteCredentials())
########################################################################
#
# File read operations
#
types_isFile = [[list, dict] + list(six.string_types)]
def export_isFile(self, lfns):
""" Check whether the supplied lfns are files """
return self.fileCatalogDB.isFile(lfns, self.getRemoteCredentials())
types_getFileSize = [[list, dict] + list(six.string_types)]
def export_getFileSize(self, lfns):
""" Get the size associated to supplied lfns """
return self.fileCatalogDB.getFileSize(lfns, self.getRemoteCredentials())
types_getFileMetadata = [[list, dict] + list(six.string_types)]
def export_getFileMetadata(self, lfns):
""" Get the metadata associated to supplied lfns """
return self.fileCatalogDB.getFileMetadata(lfns, self.getRemoteCredentials())
types_getReplicas = [[list, dict] + list(six.string_types), bool]
def export_getReplicas(self, lfns, allStatus=False):
""" Get replicas for supplied lfns """
return self.fileCatalogDB.getReplicas(lfns, allStatus, self.getRemoteCredentials())
types_getReplicaStatus = [[list, dict] + list(six.string_types)]
def export_getReplicaStatus(self, lfns):
""" Get the status for the supplied replicas """
return self.fileCatalogDB.getReplicaStatus(lfns, self.getRemoteCredentials())
types_getFileAncestors = [[list, dict], (list,) + six.integer_types]
def export_getFileAncestors(self, lfns, depths):
""" Get the status for the supplied replicas """
dList = depths
if not isinstance(dList, list):
dList = [depths]
lfnDict = dict.fromkeys(lfns, True)
return self.fileCatalogDB.getFileAncestors(lfnDict, dList, self.getRemoteCredentials())
types_getFileDescendents = [[list, dict], (list,) + six.integer_types]
def export_getFileDescendents(self, lfns, depths):
""" Get the status for the supplied replicas """
dList = depths
if not isinstance(dList, list):
dList = [depths]
lfnDict = dict.fromkeys(lfns, True)
return self.fileCatalogDB.getFileDescendents(lfnDict, dList, self.getRemoteCredentials())
types_getLFNForGUID = [[list, dict] + list(six.string_types)]
def export_getLFNForGUID(self, guids):
"""Get the matching lfns for given guids"""
return self.fileCatalogDB.getLFNForGUID(guids, self.getRemoteCredentials())
########################################################################
#
# Directory write operations
#
types_createDirectory = [[list, dict] + list(six.string_types)]
def export_createDirectory(self, lfns):
""" Create the supplied directories """
return self.fileCatalogDB.createDirectory(lfns, self.getRemoteCredentials())
types_removeDirectory = [[list, dict] + list(six.string_types)]
def export_removeDirectory(self, lfns):
""" Remove the supplied directories """
return self.fileCatalogDB.removeDirectory(lfns, self.getRemoteCredentials())
########################################################################
#
# Directory read operations
#
types_listDirectory = [[list, dict] + list(six.string_types), bool]
def export_listDirectory(self, lfns, verbose):
""" List the contents of supplied directories """
gMonitor.addMark('ListDirectory', 1)
return self.fileCatalogDB.listDirectory(lfns, self.getRemoteCredentials(), verbose=verbose)
types_isDirectory = [[list, dict] + list(six.string_types)]
def export_isDirectory(self, lfns):
""" Determine whether supplied path is a directory """
return self.fileCatalogDB.isDirectory(lfns, self.getRemoteCredentials())
types_getDirectoryMetadata = [[list, dict] + list(six.string_types)]
def export_getDirectoryMetadata(self, lfns):
""" Get the size of the supplied directory """
return self.fileCatalogDB.getDirectoryMetadata(lfns, self.getRemoteCredentials())
types_getDirectorySize = [[list, dict] + list(six.string_types)]
def export_getDirectorySize(self, lfns, longOut=False, fromFiles=False, recursiveSum=True):
""" Get the size of the supplied directory """
return self.fileCatalogDB.getDirectorySize(lfns, longOut, fromFiles, recursiveSum, self.getRemoteCredentials())
types_getDirectoryReplicas = [[list, dict] + list(six.string_types), bool]
def export_getDirectoryReplicas(self, lfns, allStatus=False):
""" Get replicas for files in the supplied directory """
return self.fileCatalogDB.getDirectoryReplicas(lfns, allStatus, self.getRemoteCredentials())
########################################################################
#
# Administrative database operations
#
types_getCatalogCounters = []
def export_getCatalogCounters(self):
""" Get the number of registered directories, files and replicas in various tables """
return self.fileCatalogDB.getCatalogCounters(self.getRemoteCredentials())
types_rebuildDirectoryUsage = []
def export_rebuildDirectoryUsage(self):
""" Rebuild DirectoryUsage table from scratch """
return self.fileCatalogDB.rebuildDirectoryUsage()
types_repairCatalog = []
def export_repairCatalog(self):
""" Repair the catalog inconsistencies """
return self.fileCatalogDB.repairCatalog(self.getRemoteCredentials())
########################################################################
# Metadata Catalog Operations
#
types_addMetadataField = [six.string_types, six.string_types, six.string_types]
def export_addMetadataField(self, fieldName, fieldType, metaType='-d'):
""" Add a new metadata field of the given type
"""
if metaType.lower() == "-d":
return self.fileCatalogDB.dmeta.addMetadataField(
fieldName, fieldType, self.getRemoteCredentials())
elif metaType.lower() == "-f":
return self.fileCatalogDB.fmeta.addMetadataField(
fieldName, fieldType, self.getRemoteCredentials())
else:
return S_ERROR('Unknown metadata type %s' % metaType)
types_deleteMetadataField = [six.string_types]
def export_deleteMetadataField(self, fieldName):
""" Delete the metadata field
"""
result = self.fileCatalogDB.dmeta.deleteMetadataField(fieldName, self.getRemoteCredentials())
error = ''
if not result['OK']:
error = result['Message']
result = self.fileCatalogDB.fmeta.deleteMetadataField(fieldName, self.getRemoteCredentials())
if not result['OK']:
if error:
result["Message"] = error + "; " + result["Message"]
return result
types_getMetadataFields = []
def export_getMetadataFields(self):
""" Get all the metadata fields
"""
resultDir = self.fileCatalogDB.dmeta.getMetadataFields(self.getRemoteCredentials())
if not resultDir['OK']:
return resultDir
resultFile = self.fileCatalogDB.fmeta.getFileMetadataFields(self.getRemoteCredentials())
if not resultFile['OK']:
return resultFile
return S_OK({'DirectoryMetaFields': resultDir['Value'],
'FileMetaFields': resultFile['Value']})
types_setMetadata = [six.string_types, dict]
def export_setMetadata(self, path, metadatadict):
""" Set metadata parameter for the given path
"""
return self.fileCatalogDB.setMetadata(path, metadatadict, self.getRemoteCredentials())
types_setMetadataBulk = [dict]
def export_setMetadataBulk(self, pathMetadataDict):
""" Set metadata parameter for the given path
"""
return self.fileCatalogDB.setMetadataBulk(pathMetadataDict, self.getRemoteCredentials())
types_removeMetadata = [dict]
def export_removeMetadata(self, pathMetadataDict):
""" Remove the specified metadata for the given path
"""
return self.fileCatalogDB.removeMetadata(pathMetadataDict, self.getRemoteCredentials())
types_getDirectoryUserMetadata = [six.string_types]
def export_getDirectoryUserMetadata(self, path):
""" Get all the metadata valid for the given directory path
"""
return self.fileCatalogDB.dmeta.getDirectoryMetadata(path, self.getRemoteCredentials())
types_getFileUserMetadata = [six.string_types]
def export_getFileUserMetadata(self, path):
""" Get all the metadata valid for the given file
"""
return self.fileCatalogDB.fmeta.getFileUserMetadata(path, self.getRemoteCredentials())
types_findDirectoriesByMetadata = [dict]
def export_findDirectoriesByMetadata(self, metaDict, path='/'):
""" Find all the directories satisfying the given metadata set
"""
return self.fileCatalogDB.dmeta.findDirectoriesByMetadata(
metaDict, path, self.getRemoteCredentials())
types_findFilesByMetadata = [dict, six.string_types]
def export_findFilesByMetadata(self, metaDict, path='/'):
""" Find all the files satisfying the given metadata set
"""
result = self.fileCatalogDB.fmeta.findFilesByMetadata(metaDict, path, self.getRemoteCredentials())
if not result['OK']:
return result
lfns = list(result['Value'].values())
return S_OK(lfns)
types_getReplicasByMetadata = [dict, six.string_types, bool]
def export_getReplicasByMetadata(self, metaDict, path='/', allStatus=False):
""" Find all the files satisfying the given metadata set
"""
return self.fileCatalogDB.fileManager.getReplicasByMetadata(metaDict,
path,
allStatus,
self.getRemoteCredentials())
types_findFilesByMetadataDetailed = [dict, six.string_types]
def export_findFilesByMetadataDetailed(self, metaDict, path='/'):
""" Find all the files satisfying the given metadata set
"""
result = self.fileCatalogDB.fmeta.findFilesByMetadata(metaDict, path, self.getRemoteCredentials())
if not result['OK'] or not result['Value']:
return result
lfns = list(result['Value'].values())
return self.fileCatalogDB.getFileDetails(lfns, self.getRemoteCredentials())
types_findFilesByMetadataWeb = [dict, six.string_types, six.integer_types, six.integer_types]
def export_findFilesByMetadataWeb(self, metaDict, path, startItem, maxItems):
""" Find files satisfying the given metadata set
"""
result = self.fileCatalogDB.dmeta.findFileIDsByMetadata(
metaDict, path, self.getRemoteCredentials(), startItem, maxItems)
if not result['OK'] or not result['Value']:
return result
fileIDs = result['Value']
totalRecords = result['TotalRecords']
result = self.fileCatalogDB.fileManager._getFileLFNs(fileIDs)
if not result['OK']:
return result
lfnsResultList = list(result['Value']['Successful'].values())
resultDetails = self.fileCatalogDB.getFileDetails(lfnsResultList, self.getRemoteCredentials())
if not resultDetails['OK']:
return resultDetails
result = S_OK({"TotalRecords": totalRecords, "Records": resultDetails['Value']})
return result
def findFilesByMetadataWeb(self, metaDict, path, startItem, maxItems):
""" Find all the files satisfying the given metadata set
"""
result = self.fileCatalogDB.fmeta.findFilesByMetadata(metaDict, path, self.getRemoteCredentials())
if not result['OK'] or not result['Value']:
return result
lfns = []
for directory in result['Value']:
for fname in result['Value'][directory]:
lfns.append(os.path.join(directory, fname))
start = startItem
totalRecords = len(lfns)
if start > totalRecords:
return S_ERROR('Requested files out of existing range')
end = start + maxItems
if end > totalRecords:
end = totalRecords
lfnsResultList = lfns[start:end]
resultDetails = self.fileCatalogDB.getFileDetails(lfnsResultList, self.getRemoteCredentials())
if not resultDetails['OK']:
return resultDetails
result = S_OK({"TotalRecords": totalRecords, "Records": resultDetails['Value']})
return result
types_getCompatibleMetadata = [dict, six.string_types]
def export_getCompatibleMetadata(self, metaDict, path='/'):
""" Get metadata values compatible with the given metadata subset
"""
return self.fileCatalogDB.dmeta.getCompatibleMetadata(metaDict, path, self.getRemoteCredentials())
types_addMetadataSet = [six.string_types, dict]
def export_addMetadataSet(self, setName, setDict):
""" Add a new metadata set
"""
return self.fileCatalogDB.dmeta.addMetadataSet(setName, setDict, self.getRemoteCredentials())
types_getMetadataSet = [six.string_types, bool]
def export_getMetadataSet(self, setName, expandFlag):
""" Add a new metadata set
"""
return self.fileCatalogDB.dmeta.getMetadataSet(setName, expandFlag, self.getRemoteCredentials())
#########################################################################################
#
# Dataset manipulation methods
#
types_addDataset = [dict]
def export_addDataset(self, datasets):
""" Add a new dynamic dataset defined by its meta query
"""
return self.fileCatalogDB.datasetManager.addDataset(datasets, self.getRemoteCredentials())
types_addDatasetAnnotation = [dict]
def export_addDatasetAnnotation(self, datasetDict):
""" Add annotation to an already created dataset
"""
return self.fileCatalogDB.datasetManager.addDatasetAnnotation(
datasetDict, self.getRemoteCredentials())
types_removeDataset = [dict]
def export_removeDataset(self, datasets):
""" Check the given dynamic dataset for changes since its definition
"""
return self.fileCatalogDB.datasetManager.removeDataset(datasets, self.getRemoteCredentials())
types_checkDataset = [dict]
def export_checkDataset(self, datasets):
""" Check the given dynamic dataset for changes since its definition
"""
return self.fileCatalogDB.datasetManager.checkDataset(datasets, self.getRemoteCredentials())
types_updateDataset = [dict]
def export_updateDataset(self, datasets):
""" Update the given dynamic dataset for changes since its definition
"""
return self.fileCatalogDB.datasetManager.updateDataset(datasets, self.getRemoteCredentials())
types_getDatasets = [dict]
def export_getDatasets(self, datasets):
""" Get parameters of the given dynamic dataset as they are stored in the database
"""
return self.fileCatalogDB.datasetManager.getDatasets(datasets, self.getRemoteCredentials())
types_getDatasetParameters = [dict]
def export_getDatasetParameters(self, datasets):
""" Get parameters of the given dynamic dataset as they are stored in the database
"""
return self.fileCatalogDB.datasetManager.getDatasetParameters(datasets, self.getRemoteCredentials())
types_getDatasetAnnotation = [dict]
def export_getDatasetAnnotation(self, datasets):
""" Get annotation of the given datasets
"""
return self.fileCatalogDB.datasetManager.getDatasetAnnotation(datasets, self.getRemoteCredentials())
types_freezeDataset = [dict]
def export_freezeDataset(self, datasets):
""" Freeze the contents of the dataset making it effectively static
"""
return self.fileCatalogDB.datasetManager.freezeDataset(datasets, self.getRemoteCredentials())
types_releaseDataset = [dict]
def export_releaseDataset(self, datasets):
""" Release the contents of the frozen dataset allowing changes in its contents
"""
return self.fileCatalogDB.datasetManager.releaseDataset(datasets, self.getRemoteCredentials())
types_getDatasetFiles = [dict]
def export_getDatasetFiles(self, datasets):
""" Get lfns in the given dataset
"""
return self.fileCatalogDB.datasetManager.getDatasetFiles(datasets, self.getRemoteCredentials())
def getSEDump(self, seName):
"""
Return all the files at a given SE, together with checksum and size
:param seName: name of the StorageElement
:returns: S_OK with list of tuples (lfn, checksum, size)
"""
return self.fileCatalogDB.getSEDump(seName)['Value']
class FileCatalogHandler(FileCataloghandlerMixin, RequestHandler):
def transfer_toClient(self, seName, token, fileHelper):
""" This method used to transfer the SEDump to the client,
formated as CSV with '|' separation
:param seName: name of the se to dump
:returns: the result of the FileHelper
"""
retVal = self.getSEDump(seName)
try:
csvOutput = StringIO()
writer = csv.writer(csvOutput, delimiter='|')
for lfn in retVal:
writer.writerow(lfn)
csvOutput.seek(0)
ret = fileHelper.DataSourceToNetwork(csvOutput)
return ret
except Exception as e:
self.log.exception("Exception while sending seDump", repr(e))
return S_ERROR("Exception while sending seDump: %s" % repr(e))
finally:
csvOutput.close()
| yujikato/DIRAC | src/DIRAC/DataManagementSystem/Service/FileCatalogHandler.py | Python | gpl-3.0 | 28,703 |
import random
import logging
from twisted.internet import defer, reactor
from zope.interface import implements
from lbrynet import interfaces
from lbrynet import conf
from lbrynet.core.client.ClientProtocol import ClientProtocolFactory
from lbrynet.core.Error import InsufficientFundsError
from lbrynet.core import utils
log = logging.getLogger(__name__)
class PeerConnectionHandler(object):
def __init__(self, request_creators, factory):
self.request_creators = request_creators
self.factory = factory
self.connection = None
class ConnectionManager(object):
implements(interfaces.IConnectionManager)
MANAGE_CALL_INTERVAL_SEC = 5
TCP_CONNECT_TIMEOUT = 15
def __init__(self, downloader, rate_limiter,
primary_request_creators, secondary_request_creators):
self.seek_head_blob_first = conf.settings['seek_head_blob_first']
self.max_connections_per_stream = conf.settings['max_connections_per_stream']
self.downloader = downloader
self.rate_limiter = rate_limiter
self._primary_request_creators = primary_request_creators
self._secondary_request_creators = secondary_request_creators
self._peer_connections = {} # {Peer: PeerConnectionHandler}
self._connections_closing = {} # {Peer: deferred (fired when the connection is closed)}
self._next_manage_call = None
# a deferred that gets fired when a _manage call is set
self._manage_deferred = None
self.stopped = True
log.info("%s initialized", self._get_log_name())
# this identifies what the connection manager is for,
# used for logging purposes only
def _get_log_name(self):
out = 'Connection Manager Unknown'
if hasattr(self.downloader, 'stream_name'):
out = 'Connection Manager '+self.downloader.stream_name
elif hasattr(self.downloader, 'blob_hash'):
out = 'Connection Manager '+self.downloader.blob_hash
return out
def _start(self):
self.stopped = False
if self._next_manage_call is not None and self._next_manage_call.active() is True:
self._next_manage_call.cancel()
def start(self):
log.debug("%s starting", self._get_log_name())
self._start()
self._next_manage_call = utils.call_later(0, self.manage)
return defer.succeed(True)
@defer.inlineCallbacks
def stop(self):
log.debug("%s stopping", self._get_log_name())
self.stopped = True
# wait for the current manage call to finish
if self._manage_deferred:
yield self._manage_deferred
# in case we stopped between manage calls, cancel the next one
if self._next_manage_call and self._next_manage_call.active():
self._next_manage_call.cancel()
self._next_manage_call = None
yield self._close_peers()
def num_peer_connections(self):
return len(self._peer_connections)
def _close_peers(self):
def disconnect_peer(p):
d = defer.Deferred()
self._connections_closing[p] = d
self._peer_connections[p].connection.disconnect()
if p in self._peer_connections:
del self._peer_connections[p]
return d
def close_connection(p):
log.debug("%s Abruptly closing a connection to %s due to downloading being paused",
self._get_log_name(), p)
if self._peer_connections[p].factory.p is not None:
d = self._peer_connections[p].factory.p.cancel_requests()
else:
d = defer.succeed(True)
d.addBoth(lambda _: disconnect_peer(p))
return d
closing_deferreds = [close_connection(peer) for peer in self._peer_connections.keys()]
return defer.DeferredList(closing_deferreds)
@defer.inlineCallbacks
def get_next_request(self, peer, protocol):
log.debug("%s Trying to get the next request for peer %s", self._get_log_name(), peer)
if not peer in self._peer_connections or self.stopped is True:
log.debug("%s The peer %s has already been told to shut down.",
self._get_log_name(), peer)
defer.returnValue(False)
requests = yield self._send_primary_requests(peer, protocol)
have_request = any(r[1] for r in requests if r[0] is True)
if have_request:
yield self._send_secondary_requests(peer, protocol)
defer.returnValue(have_request)
def _send_primary_requests(self, peer, protocol):
def handle_error(err):
err.trap(InsufficientFundsError)
self.downloader.insufficient_funds(err)
return False
def check_if_request_sent(request_sent, request_creator):
if peer not in self._peer_connections:
# This can happen if the connection is told to close
return False
if request_sent is False:
if request_creator in self._peer_connections[peer].request_creators:
self._peer_connections[peer].request_creators.remove(request_creator)
else:
if not request_creator in self._peer_connections[peer].request_creators:
self._peer_connections[peer].request_creators.append(request_creator)
return request_sent
ds = []
for p_r_c in self._primary_request_creators:
d = p_r_c.send_next_request(peer, protocol)
d.addErrback(handle_error)
d.addCallback(check_if_request_sent, p_r_c)
ds.append(d)
return defer.DeferredList(ds, fireOnOneErrback=True)
def _send_secondary_requests(self, peer, protocol):
ds = [
s_r_c.send_next_request(peer, protocol)
for s_r_c in self._secondary_request_creators
]
return defer.DeferredList(ds)
@defer.inlineCallbacks
def manage(self, schedule_next_call=True):
self._manage_deferred = defer.Deferred()
if len(self._peer_connections) < self.max_connections_per_stream:
log.debug("%s have %d connections, looking for %d",
self._get_log_name(), len(self._peer_connections),
self.max_connections_per_stream)
peers = yield self._get_new_peers()
for peer in peers:
self._connect_to_peer(peer)
self._manage_deferred.callback(None)
self._manage_deferred = None
if not self.stopped and schedule_next_call:
self._next_manage_call = utils.call_later(self.MANAGE_CALL_INTERVAL_SEC, self.manage)
def return_shuffled_peers_not_connected_to(self, peers, new_conns_needed):
out = [peer for peer in peers if peer not in self._peer_connections]
random.shuffle(out)
return out[0:new_conns_needed]
@defer.inlineCallbacks
def _get_new_peers(self):
new_conns_needed = self.max_connections_per_stream - len(self._peer_connections)
if new_conns_needed < 1:
defer.returnValue([])
# we always get the peer from the first request creator
# must be a type BlobRequester...
request_creator = self._primary_request_creators[0]
log.debug("%s Trying to get a new peer to connect to", self._get_log_name())
# find peers for the head blob if configured to do so
if self.seek_head_blob_first:
peers = yield request_creator.get_new_peers_for_head_blob()
peers = self.return_shuffled_peers_not_connected_to(peers, new_conns_needed)
else:
peers = []
# we didn't find any new peers on the head blob,
# we have to look for the first unavailable blob
if not peers:
peers = yield request_creator.get_new_peers_for_next_unavailable()
peers = self.return_shuffled_peers_not_connected_to(peers, new_conns_needed)
log.debug("%s Got a list of peers to choose from: %s",
self._get_log_name(), peers)
log.debug("%s Current connections: %s",
self._get_log_name(), self._peer_connections.keys())
log.debug("%s List of connection states: %s", self._get_log_name(),
[p_c_h.connection.state for p_c_h in self._peer_connections.values()])
defer.returnValue(peers)
def _connect_to_peer(self, peer):
if self.stopped:
return
log.debug("%s Trying to connect to %s", self._get_log_name(), peer)
factory = ClientProtocolFactory(peer, self.rate_limiter, self)
factory.connection_was_made_deferred.addCallback(
lambda c_was_made: self._peer_disconnected(c_was_made, peer))
self._peer_connections[peer] = PeerConnectionHandler(self._primary_request_creators[:],
factory)
connection = reactor.connectTCP(peer.host, peer.port, factory,
timeout=self.TCP_CONNECT_TIMEOUT)
self._peer_connections[peer].connection = connection
def _peer_disconnected(self, connection_was_made, peer):
log.debug("%s protocol disconnected for %s",
self._get_log_name(), peer)
if peer in self._peer_connections:
del self._peer_connections[peer]
if peer in self._connections_closing:
d = self._connections_closing[peer]
del self._connections_closing[peer]
d.callback(True)
return connection_was_made
| zestyr/lbry | lbrynet/core/client/ConnectionManager.py | Python | mit | 9,657 |
## {{{ http://code.activestate.com/recipes/426406/ (r1)
from ConfigParser import SafeConfigParser
import logging, os
clog = logging.getLogger("Configuration")
#def getVersion():
# return 1
class Configuration:
def __init__ (self, fileName):
self.__checkForFile__(fileName)
cp = SafeConfigParser()
cp.read(fileName)
self.__parser = cp
self.fileName = fileName
def __getattr__ (self, name):
if name in self.__parser.sections():
return Section(name, self.__parser)
else:
return None
def __str__ (self):
p = self.__parser
result = []
result.append('<Configuration from %s>' % self.fileName)
for s in p.sections():
result.append('[%s]' % s)
for o in p.options(s):
result.append('%s=%s' % (o, p.get(s, o)))
return '\n'.join(result)
def __checkForFile__ (self,fileName):
try:
with open(fileName) as f: pass
except IOError as e:
clog.error('file not found %s ' % fileName)
raise IOError('file not found %s ' % fileName)
def setValue(self, section, name, value):
self.__parser.set(section, name, value)
with open(self.fileName, 'w') as configfile: #Save cahnge
self.__parser.write(configfile)
class Section:
def __init__ (self, name, parser):
self.name = name
self.__parser = parser
def __getattr__ (self, name):
return self.__parser.get(self.name, name)
#def __setattr__(self, name, value):
# self.__parser.set(self.name, name, value)
# Test
if __name__ == '__main__':
configFile = os.path.join(os.path.dirname(os.path.abspath(__file__)), 'homePi.ini')
c = Configuration(configFile)
# print c.log.logFile, c.log.updateFolder
c.setValue("main","lastUpdate","2")
# An extra: print the configuration object itself
print c
## end of http://code.activestate.com/recipes/426406/ }}}
| leeclarke/homePi | src/python/Configuration.py | Python | gpl-3.0 | 2,038 |
import json
__author__ = 'lauft'
class CheckJson():
"""
check whether a composer JSON file meets certain requirements
"""
def __init__(self, path):
self.failed = False
try:
composer = open(path)
self.json = json.load(composer)
composer.close()
except:
print 'error when opening "' + path + '"'
self.failed = True
def does_contain_member(self, member):
if self.failed:
return False
if member not in self.json:
print 'no key "' + member + '"'
return False
return True
| lauft/pyCheck | pycheck/checkjson.py | Python | mit | 637 |
# coding: utf8
# Copyright 2015-2018 Vincent Jacques <[email protected]>
from ActionTree import *
from . import *
class MultiThreadedExecutionTestCase(ActionTreeTestCase):
def test_many_dependencies(self):
# a
# /|\
# / | \
# b c d
a = self._action("a")
barrier = self._barrier(3)
b = self._action("b", barrier=barrier, end_event=True)
c = self._action("c", barrier=barrier, end_event=True)
d = self._action("d", barrier=barrier, end_event=True)
a.add_dependency(b)
a.add_dependency(c)
a.add_dependency(d)
execute(a, cpu_cores=3)
self.assertEventsEqual("bcd BCD a")
# @todo Understand why this failed on Travis (https://travis-ci.org/jacquev6/ActionTree/jobs/464953921#L675)
# def test_many_dependencies_with_default_cpu_cores(self):
# # a
# # /|\
# # / | \
# # b c d
# a = self._action("a")
# barrier = self._barrier(3)
# b = self._action("b", barrier=barrier, end_event=True)
# c = self._action("c", barrier=barrier, end_event=True)
# d = self._action("d", barrier=barrier, end_event=True)
# a.add_dependency(b)
# a.add_dependency(c)
# a.add_dependency(d)
# execute(a, cpu_cores=None)
# self.assertEventsEqual("bcd BCD a")
def test_deep_dependencies(self):
# a
# |
# b
# |
# c
# |
# d
# |
# e
# |
# f
a = self._action("a")
b = self._action("b", end_event=True)
c = self._action("c", end_event=True)
d = self._action("d", end_event=True)
e = self._action("e", end_event=True)
f = self._action("f", end_event=True)
a.add_dependency(b)
b.add_dependency(c)
c.add_dependency(d)
d.add_dependency(e)
e.add_dependency(f)
execute(a, cpu_cores=3)
self.assertEventsEqual("f F e E d D c C b B a")
def test_diamond_dependencies(self):
# a
# / \
# b c
# \ /
# d
a = self._action("a")
barrier = self._barrier(2)
b = self._action("b", barrier=barrier, end_event=True)
c = self._action("c", barrier=barrier, end_event=True)
d = self._action("d", end_event=True)
a.add_dependency(b)
a.add_dependency(c)
b.add_dependency(d)
c.add_dependency(d)
execute(a, cpu_cores=3)
self.assertEventsEqual("d D bc BC a")
def test_half_diamond_dependency(self):
# a
# /|
# b |
# \|
# d
a = self._action("a")
b = self._action("b", end_event=True)
d = self._action("d", end_event=True)
a.add_dependency(b)
a.add_dependency(d)
b.add_dependency(d)
execute(a, cpu_cores=3)
self.assertEventsEqual("d D b B a")
def test_two_deep_branches(self):
# a
# / \
# b c
# | |
# d e
a = self._action("a")
barrier1 = self._barrier(2)
b = self._action("b", barrier=barrier1, end_event=True)
c = self._action("c", barrier=barrier1, end_event=True)
barrier2 = self._barrier(2)
d = self._action("d", barrier=barrier2, end_event=True)
e = self._action("e", barrier=barrier2, end_event=True)
a.add_dependency(b)
a.add_dependency(c)
b.add_dependency(d)
c.add_dependency(e)
execute(a, cpu_cores=3)
self.assertEventsEqual("de DEbc BC a")
| jacquev6/ActionTree | ActionTree/tests/multi_threaded_execution.py | Python | mit | 3,724 |
#!/usr/bin/env python
# coding=utf-8
#!/usr/bin/env python
# coding=utf-8
import commands
def run_cmd(cmd):
(status, output) = commands.getstatusoutput(cmd)
if int(status) != 0:
print 'error'
exit(1)
return status, output
def ping_baidu():
cmd = 'ping baidu.com'
status,output=run_cmd(cmd)
print status,output
if __name__=='__main__':
"""
print '-main-'
status,output = run_cmd('ls')
print status
outputlist = output.split('\n')
for r in outputlist:
print 'line:',r
"""
ping_baidu()
| zhaochl/bash-utils | mac_wifi/wifi_check.py | Python | apache-2.0 | 566 |
# Copyright 2012 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Virtual file system for managing files locally or in the cloud."""
__author__ = 'Pavel Simakov ([email protected])'
import datetime
import os
import re
import sys
import threading
import unittest
from config import ConfigProperty
from counters import PerfCounter
from entities import BaseEntity
from entities import put as entities_put
import jinja2
from common import caching
from common import jinja_utils
from google.appengine.api import namespace_manager
from google.appengine.ext import db
# all caches must have limits
MAX_GLOBAL_CACHE_SIZE_BYTES = 16 * 1024 * 1024
# max size of each item; no point in storing images for example
MAX_GLOBAL_CACHE_ITEM_SIZE_BYTES = 256 * 1024
# The maximum number of bytes stored per VFS cache shard.
_MAX_VFS_SHARD_SIZE = 1000 * 1000
# Max number of shards for a single VFS cached file.
_MAX_VFS_NUM_SHARDS = 4
# Global memcache controls.
CAN_USE_VFS_IN_PROCESS_CACHE = ConfigProperty(
'gcb_can_use_vfs_in_process_cache', bool, (
'Whether or not to cache content objects. For production this value '
'should be on to enable maximum performance. For development this '
'value should be off so you can see your changes to course content '
'instantaneously.'), default_value=True, label='Content caching')
class AbstractFileSystem(object):
"""A generic file system interface that forwards to an implementation."""
def __init__(self, impl):
self._impl = impl
self._readonly = False
@property
def impl(self):
return self._impl
@classmethod
def normpath(cls, path):
"""Make Windows and Linux filenames to have the same separator '/'."""
# Replace '\' into '/' and force Unicode.
if not path:
return path
return u'' + path.replace('\\', '/')
def begin_readonly(self):
"""Activates caching of resources and prevents mutations."""
self._assert_not_readonly()
self._readonly = True
def end_readonly(self):
"""Deactivates caching of resources and enables mutations."""
if not self._readonly:
raise Exception('Not readonly.')
self._readonly = False
@property
def is_readonly(self):
return self._readonly
def _assert_not_readonly(self):
if self._readonly:
raise Exception(
'Unable to execute requested operation while readonly.')
def isfile(self, filename):
"""Checks if file exists, similar to os.path.isfile(...)."""
return self._impl.isfile(filename)
def open(self, filename):
"""Returns a stream with the file content, similar to open(...)."""
return self._impl.get(filename)
def get(self, filename):
"""Returns bytes with the file content, but no metadata."""
return self.open(filename).read()
def put(self, filename, stream, **kwargs):
"""Replaces the contents of the file with the bytes in the stream."""
self._assert_not_readonly()
self._impl.put(filename, stream, **kwargs)
def delete(self, filename):
"""Deletes a file and metadata associated with it."""
self._assert_not_readonly()
self._impl.delete(filename)
def list(self, dir_name, include_inherited=False):
"""Lists all files in a directory."""
return self._impl.list(dir_name, include_inherited)
def get_jinja_environ(self, dir_names, autoescape=True):
"""Configures jinja environment loaders for this file system."""
return self._impl.get_jinja_environ(dir_names, autoescape=autoescape)
def is_read_write(self):
return self._impl.is_read_write()
def is_draft(self, stream):
if not hasattr(stream, 'metadata'):
return False
if not stream.metadata:
return False
return stream.metadata.is_draft
class LocalReadOnlyFileSystem(object):
"""A read-only file system serving only local files."""
def __init__(self, logical_home_folder=None, physical_home_folder=None):
"""Creates a new instance of the disk-backed read-only file system.
Args:
logical_home_folder: A logical home dir of all files (/a/b/c/...).
physical_home_folder: A physical location on the file system (/x/y).
Returns:
A new instance of the object.
"""
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._physical_home_folder = AbstractFileSystem.normpath(
physical_home_folder)
def _logical_to_physical(self, filename):
filename = AbstractFileSystem.normpath(filename)
if not (self._logical_home_folder and self._physical_home_folder):
return filename
filename = os.path.join(
self._physical_home_folder,
os.path.relpath(filename, self._logical_home_folder))
return AbstractFileSystem.normpath(filename)
def _physical_to_logical(self, filename):
filename = AbstractFileSystem.normpath(filename)
if not (self._logical_home_folder and self._physical_home_folder):
return filename
filename = os.path.join(
self._logical_home_folder,
os.path.relpath(filename, self._physical_home_folder))
return AbstractFileSystem.normpath(filename)
def isfile(self, filename):
return os.path.isfile(self._logical_to_physical(filename))
def get(self, filename):
if not self.isfile(filename):
return None
return open(self._logical_to_physical(filename), 'rb')
def put(self, unused_filename, unused_stream):
raise Exception('Not implemented.')
def delete(self, unused_filename):
raise Exception('Not implemented.')
# Need argument to be named exactly 'include_inherited' to match
# keyword-parameter names from derived/related classes.
# pylint: disable=unused-argument
def list(self, root_dir, include_inherited=False):
"""Lists all files in a directory."""
files = []
for dirname, unused_dirnames, filenames in os.walk(
self._logical_to_physical(root_dir)):
for filename in filenames:
files.append(
self._physical_to_logical(os.path.join(dirname, filename)))
return sorted(files)
def get_jinja_environ(self, dir_names, autoescape=True):
"""Configure the environment for Jinja templates."""
physical_dir_names = []
for dir_name in dir_names:
physical_dir_names.append(self._logical_to_physical(dir_name))
return jinja_utils.create_jinja_environment(
loader=jinja2.FileSystemLoader(physical_dir_names),
autoescape=autoescape)
def is_read_write(self):
return False
class FileMetadataEntity(BaseEntity):
"""An entity to represent a file metadata; absolute file name is a key."""
# TODO(psimakov): do we need 'version' to support concurrent updates
# TODO(psimakov): can we put 'data' here and still have fast isfile/list?
created_on = db.DateTimeProperty(auto_now_add=True, indexed=False)
updated_on = db.DateTimeProperty(indexed=True)
# Draft file is just as any other file. It's up to the consumer of the file
# to decide whether to treat draft differently (not to serve it to the
# public, for example). This class does not care and just stores the bit.
is_draft = db.BooleanProperty(indexed=False)
size = db.IntegerProperty(indexed=False)
class FileDataEntity(BaseEntity):
"""An entity to represent file content; absolute file name is a key."""
data = db.BlobProperty()
class FileStreamWrapped(object):
"""A class that wraps a file stream, but adds extra attributes to it."""
def __init__(self, metadata, data):
self._metadata = metadata
self._data = data
def read(self):
"""Emulates stream.read(). Returns all bytes and emulates EOF."""
data = self._data
self._data = ''
return data
@property
def metadata(self):
return self._metadata
class StringStream(object):
"""A wrapper to pose a string as a UTF-8 byte stream."""
def __init__(self, text):
self._data = unicode.encode(text, 'utf-8')
def read(self):
"""Emulates stream.read(). Returns all bytes and emulates EOF."""
data = self._data
self._data = ''
return data
def string_to_stream(text):
return StringStream(text)
def stream_to_string(stream):
return stream.read().decode('utf-8')
class VirtualFileSystemTemplateLoader(jinja2.BaseLoader):
"""Loader of jinja2 templates from a virtual file system."""
def __init__(self, fs, logical_home_folder, dir_names):
self._fs = fs
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._dir_names = []
if dir_names:
for dir_name in dir_names:
self._dir_names.append(AbstractFileSystem.normpath(dir_name))
def get_source(self, unused_environment, template):
for dir_name in self._dir_names:
filename = AbstractFileSystem.normpath(
os.path.join(dir_name, template))
stream = self._fs.open(filename)
if stream:
return stream.read().decode('utf-8'), filename, True
raise jinja2.TemplateNotFound(template)
def list_templates(self):
all_templates = []
for dir_name in self._dir_names:
all_templates += self._fs.list(dir_name)
return all_templates
class ProcessScopedVfsCache(caching.ProcessScopedSingleton):
"""This class holds in-process global cache of VFS objects."""
@classmethod
def get_vfs_cache_len(cls):
# pylint: disable=protected-access
return len(ProcessScopedVfsCache.instance()._cache.items.keys())
@classmethod
def get_vfs_cache_size(cls):
# pylint: disable=protected-access
return ProcessScopedVfsCache.instance()._cache.total_size
def __init__(self):
self._cache = caching.LRUCache(
max_size_bytes=MAX_GLOBAL_CACHE_SIZE_BYTES,
max_item_size_bytes=MAX_GLOBAL_CACHE_ITEM_SIZE_BYTES)
self._cache.get_entry_size = self._get_entry_size
def _get_entry_size(self, key, value):
return sys.getsizeof(key) + value.getsizeof() if value else 0
@property
def cache(self):
return self._cache
VFS_CACHE_LEN = PerfCounter(
'gcb-models-VfsCacheConnection-cache-len',
'A total number of items in vfs cache.')
VFS_CACHE_SIZE_BYTES = PerfCounter(
'gcb-models-VfsCacheConnection-cache-bytes',
'A total size of items in vfs cache in bytes.')
VFS_CACHE_LEN.poll_value = ProcessScopedVfsCache.get_vfs_cache_len
VFS_CACHE_SIZE_BYTES.poll_value = ProcessScopedVfsCache.get_vfs_cache_size
class CacheFileEntry(caching.AbstractCacheEntry):
"""Cache entry representing a file."""
def __init__(self, filename, metadata, body):
self.filename = filename
self.metadata = metadata
self.body = body
self.created_on = datetime.datetime.utcnow()
def getsizeof(self):
return (
sys.getsizeof(self.filename) +
sys.getsizeof(self.metadata) +
sys.getsizeof(self.body) +
sys.getsizeof(self.created_on))
def is_up_to_date(self, key, update):
metadata = update
if not self.metadata and not metadata:
return True
if self.metadata and metadata:
return (
metadata.updated_on == self.metadata.updated_on and
metadata.is_draft == self.metadata.is_draft)
return False
def updated_on(self):
return self.metadata.updated_on
@classmethod
def externalize(cls, key, entry):
return FileStreamWrapped(entry.metadata, entry.body)
@classmethod
def internalize(cls, key, metadata, data):
if metadata and data:
return CacheFileEntry(key, metadata, data)
return None
class VfsCacheConnection(caching.AbstractCacheConnection):
PERSISTENT_ENTITY = FileMetadataEntity
CACHE_ENTRY = CacheFileEntry
@classmethod
def init_counters(cls):
super(VfsCacheConnection, cls).init_counters()
cls.CACHE_NO_METADATA = PerfCounter(
'gcb-models-VfsCacheConnection-cache-no-metadata',
'A number of times an object was requested, but was not found and '
'had no metadata.')
cls.CACHE_INHERITED = PerfCounter(
'gcb-models-VfsCacheConnection-cache-inherited',
'A number of times an object was obtained from the inherited vfs.')
@classmethod
def is_enabled(cls):
return CAN_USE_VFS_IN_PROCESS_CACHE.value
def __init__(self, namespace):
super(VfsCacheConnection, self).__init__(namespace)
self.cache = ProcessScopedVfsCache.instance().cache
VfsCacheConnection.init_counters()
class DatastoreBackedFileSystem(object):
"""A read-write file system backed by a datastore."""
@classmethod
def make_key(cls, filename):
return 'vfs:dsbfs:%s' % filename
def __init__(
self, ns, logical_home_folder,
inherits_from=None, inheritable_folders=None):
"""Creates a new instance of the datastore-backed file system.
Args:
ns: A datastore namespace to use for storing all data and metadata.
logical_home_folder: A logical home dir of all files (/a/b/c/...).
inherits_from: A file system to use for the inheritance.
inheritable_folders: A list of folders that support inheritance.
Returns:
A new instance of the object.
Raises:
Exception: if invalid inherits_from is given.
"""
if inherits_from and not isinstance(
inherits_from, LocalReadOnlyFileSystem):
raise Exception('Can only inherit from LocalReadOnlyFileSystem.')
self._ns = ns
self._logical_home_folder = AbstractFileSystem.normpath(
logical_home_folder)
self._inherits_from = inherits_from
self._inheritable_folders = []
self._cache = threading.local()
if inheritable_folders:
for folder in inheritable_folders:
self._inheritable_folders.append(AbstractFileSystem.normpath(
folder))
def __getstate__(self):
"""Remove transient members that can't survive pickling."""
# TODO(psimakov): we need to properly pickle app_context so vfs is not
# being serialized at all
state = self.__dict__.copy()
if '_cache' in state:
del state['_cache']
return state
def __setstate__(self, state_dict):
"""Set persistent members and re-initialize transient members."""
self.__dict__ = state_dict
self._cache = threading.local()
def __getattribute__(self, name):
attr = object.__getattribute__(self, name)
# Don't intercept access to private methods and attributes.
if name.startswith('_'):
return attr
# Do intercept all methods.
if hasattr(attr, '__call__'):
def newfunc(*args, **kwargs):
"""Set proper namespace for each method call."""
old_namespace = namespace_manager.get_namespace()
try:
namespace_manager.set_namespace(self._ns)
if not hasattr(self._cache, 'connection'):
self._cache.connection = (
VfsCacheConnection.new_connection(self.ns))
return attr(*args, **kwargs)
finally:
namespace_manager.set_namespace(old_namespace)
return newfunc
# Don't intercept access to non-method attributes.
return attr
@property
def ns(self):
return self._ns
@property
def cache(self):
return self._cache.connection
def _logical_to_physical(self, filename):
filename = AbstractFileSystem.normpath(filename)
# For now we only support '/' as a physical folder name.
if self._logical_home_folder == '/':
return filename
if not filename.startswith(self._logical_home_folder):
raise Exception(
'Expected path \'%s\' to start with a prefix \'%s\'.' % (
filename, self._logical_home_folder))
rel_path = filename[len(self._logical_home_folder):]
if not rel_path.startswith('/'):
rel_path = '/%s' % rel_path
return rel_path
def physical_to_logical(self, filename):
"""Converts an internal filename to and external filename."""
# This class receives and stores absolute file names. The logical
# filename is the external file name. The physical filename is an
# internal filename. This function does the convertions.
# Let's say you want to store a file named '/assets/img/foo.png'.
# This would be a physical filename in the VFS. But the put() operation
# expects an absolute filename from the root of the app installation,
# i.e. something like '/dev/apps/coursebuilder/assets/img/foo.png',
# which is called a logical filename. This is a legacy expectation from
# the days the course was defined as files on the file system.
#
# This function will do the conversion you need.
return self._physical_to_logical(filename)
def _physical_to_logical(self, filename):
filename = AbstractFileSystem.normpath(filename)
# For now we only support '/' as a physical folder name.
if filename and not filename.startswith('/'):
filename = '/' + filename
if self._logical_home_folder == '/':
return filename
return '%s%s' % (self._logical_home_folder, filename)
def _can_inherit(self, filename):
"""Checks if a file can be inherited from a parent file system."""
for prefix in self._inheritable_folders:
if filename.startswith(prefix):
return True
return False
def get(self, afilename):
return self.open(afilename)
def open(self, afilename):
"""Gets a file from a datastore. Raw bytes stream, no encodings."""
filename = self._logical_to_physical(afilename)
found, stream = self.cache.get(filename)
if found and stream:
return stream
if not found:
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
keys = self._generate_file_key_names(filename, metadata.size)
data_shards = []
for data_entity in FileDataEntity.get_by_key_name(keys):
data_shards.append(data_entity.data)
data = ''.join(data_shards)
if data:
# TODO: Note that this will ask the cache to accept
# potentially very large items. The caching strategy both
# for in-memory and Memcache should be revisited to
# determine how best to address chunking strategies.
self.cache.put(filename, metadata, data)
return FileStreamWrapped(metadata, data)
# lets us cache the (None, None) so next time we asked for this key
# we fall right into the inherited section without trying to load
# the metadata/data from the datastore; if a new object with this
# key is added in the datastore, we will see it in the update list
VfsCacheConnection.CACHE_NO_METADATA.inc()
self.cache.put(filename, None, None)
result = None
if self._inherits_from and self._can_inherit(filename):
result = self._inherits_from.get(afilename)
if result:
VfsCacheConnection.CACHE_INHERITED.inc()
return FileStreamWrapped(None, result.read())
VfsCacheConnection.CACHE_NOT_FOUND.inc()
return None
def put(self, filename, stream, is_draft=False, metadata_only=False):
"""Puts a file stream to a database. Raw bytes stream, no encodings."""
if stream: # Must be outside the transactional operation
content = stream.read()
else:
content = stream
self._transactional_put(filename, content, is_draft, metadata_only)
@db.transactional(xg=True)
def _transactional_put(
self, filename, stream, is_draft=False, metadata_only=False):
self.non_transactional_put(
filename, stream, is_draft=is_draft, metadata_only=metadata_only)
@classmethod
def _generate_file_key_names(cls, filename, size):
"""Generate names for key(s) for DB entities holding file data.
Files may be larger than 1M, the AppEngine limit. To work around
that, just store more entities. Names of additional entities beyond
the first are of the form "<filename>:shard:<number>". This naming
scheme is "in-band", in the sense that it is possible that a user
could try to name a file with this format. However, that format is
unusual enough that prohibiting it in incoming file names is both
simple and very unlikely to cause users undue distress.
Args:
filename: The base name of the file.
size: The size of the file, in bytes.
Returns:
A list of database entity keys. Files smaller than
_MAX_VFS_SHARD_SIZE are stored in one entity named by the
'filename' parameter. If larger, sufficient additional names of the
form <filename>/0, <filename>/1, ..... <filename>/N are added.
"""
if re.search(':shard:[0-9]+$', filename):
raise ValueError(
'Files may not end with ":shard:NNN"; this pattern is '
'reserved for internal use. Filename "%s" violates this. ' %
filename)
if size > _MAX_VFS_SHARD_SIZE * _MAX_VFS_NUM_SHARDS:
raise ValueError(
'Cannot store file "%s"; its size of %d bytes is larger than '
'the maximum supported size of %d.' % (
filename, size, _MAX_VFS_SHARD_SIZE * _MAX_VFS_NUM_SHARDS))
key_names = [filename]
for segment_id in range(size // _MAX_VFS_SHARD_SIZE):
key_names.append('%s:shard:%d' % (filename, segment_id))
return key_names
def non_transactional_put(
self, filename, content, is_draft=False, metadata_only=False):
"""Non-transactional put; use only when transactions are impossible."""
filename = self._logical_to_physical(filename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if not metadata:
metadata = FileMetadataEntity(key_name=filename)
metadata.updated_on = datetime.datetime.utcnow()
metadata.is_draft = is_draft
if not metadata_only:
# We operate with raw bytes. The consumer must deal with encoding.
metadata.size = len(content)
# Chunk the data into entites based on max entity size limits
# imposed by AppEngine
key_names = self._generate_file_key_names(filename, metadata.size)
shard_entities = []
for index, key_name in enumerate(key_names):
data = FileDataEntity(key_name=key_name)
start_offset = index * _MAX_VFS_SHARD_SIZE
end_offset = (index + 1) * _MAX_VFS_SHARD_SIZE
data.data = content[start_offset:end_offset]
shard_entities.append(data)
entities_put(shard_entities)
metadata.put()
self.cache.delete(filename)
def put_multi_async(self, filedata_list):
"""Initiate an async put of the given files.
This method initiates an asynchronous put of a list of file data
(presented as pairs of the form (filename, data_source)). It is not
transactional, and does not block, and instead immediately returns a
callback function. When this function is called it will block until
the puts are confirmed to have completed. For maximum efficiency it's
advisable to defer calling the callback until all other request handling
has completed, but in any event, it MUST be called before the request
handler can exit successfully.
Args:
filedata_list: list. A list of tuples. The first entry of each
tuple is the file name, the second is a filelike object holding
the file data.
Returns:
callable. Returns a wait-and-finalize function. This function must
be called at some point before the request handler exists, in order
to confirm that the puts have succeeded.
"""
filename_list = []
data_list = []
metadata_list = []
for filename, stream in filedata_list:
filename = self._logical_to_physical(filename)
filename_list.append(filename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if not metadata:
metadata = FileMetadataEntity(key_name=filename)
metadata_list.append(metadata)
metadata.updated_on = datetime.datetime.utcnow()
# We operate with raw bytes. The consumer must deal with encoding.
raw_bytes = stream.read()
metadata.size = len(raw_bytes)
data = FileDataEntity(key_name=filename)
data_list.append(data)
data.data = raw_bytes
# we do call delete here; so this instance will not increment EVICT
# counter value, but the DELETE value; other instance will not
# record DELETE, but EVICT when they query for updates
self.cache.delete(filename)
data_future = db.put_async(data_list)
metadata_future = db.put_async(metadata_list)
def wait_and_finalize():
data_future.check_success()
metadata_future.check_success()
return wait_and_finalize
@db.transactional(xg=True)
def delete(self, filename):
filename = self._logical_to_physical(filename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
metadata.delete()
data = FileDataEntity(key_name=filename)
if data:
data.delete()
self.cache.delete(filename)
def isfile(self, afilename):
"""Checks file existence by looking up the datastore row."""
filename = self._logical_to_physical(afilename)
metadata = FileMetadataEntity.get_by_key_name(filename)
if metadata:
return True
result = False
if self._inherits_from and self._can_inherit(filename):
result = self._inherits_from.isfile(afilename)
return result
def list(self, dir_name, include_inherited=False):
"""Lists all files in a directory by using datastore query.
Args:
dir_name: string. Directory to list contents of.
include_inherited: boolean. If True, includes all inheritable files
from the parent filesystem.
Returns:
List of string. Lexicographically-sorted unique filenames
recursively found in dir_name.
"""
dir_name = self._logical_to_physical(dir_name)
result = set()
keys = FileMetadataEntity.all(keys_only=True)
for key in keys.fetch(1000):
filename = key.name()
if filename.startswith(dir_name):
result.add(self._physical_to_logical(filename))
if include_inherited and self._inherits_from:
for inheritable_folder in self._inheritable_folders:
logical_folder = self._physical_to_logical(inheritable_folder)
result.update(set(self._inherits_from.list(
logical_folder,
include_inherited)))
return sorted(list(result))
def get_jinja_environ(self, dir_names, autoescape=True):
return jinja_utils.create_jinja_environment(
loader=VirtualFileSystemTemplateLoader(
self, self._logical_home_folder, dir_names),
autoescape=autoescape)
def is_read_write(self):
return True
class VfsTests(unittest.TestCase):
def test_pickling(self):
import pickle
pickle.dumps(caching.NoopCacheConnection())
pickle.dumps(caching.AbstractCacheConnection(None))
pickle.dumps(caching.AbstractCacheEntry())
pickle.dumps(CacheFileEntry('foo.bar', 'file metadata', 'file data'))
pickle.dumps(DatastoreBackedFileSystem('/', 'ns_test'))
with self.assertRaises(TypeError):
pickle.dumps(VfsCacheConnection('ns_test'))
def _setup_cache_with_one_entry(self, is_draft=True, updated_on=None):
ProcessScopedVfsCache.clear_all()
conn = VfsCacheConnection('ns_test')
meta = FileMetadataEntity()
meta.is_draft = is_draft
meta.updated_on = updated_on
conn.put('sample.txt', meta, 'file data')
found, stream = conn.get('sample.txt')
self.assertTrue(found)
self.assertEquals(stream.metadata.is_draft, meta.is_draft)
return conn
def test_expire(self):
conn = self._setup_cache_with_one_entry()
entry = conn.cache.items.get(conn.make_key('ns_test', 'sample.txt'))
self.assertTrue(entry)
entry.created_on = datetime.datetime.utcnow() - datetime.timedelta(
0, CacheFileEntry.CACHE_ENTRY_TTL_SEC + 1)
old_expire_count = VfsCacheConnection.CACHE_EXPIRE.value
found, stream = conn.get('sample.txt')
self.assertFalse(found)
self.assertEquals(stream, None)
self.assertEquals(
VfsCacheConnection.CACHE_EXPIRE.value - old_expire_count, 1)
def test_updates_with_no_changes_dont_evict(self):
class _Key(object):
def name(self):
return 'sample.txt'
def _key():
return _Key()
for is_draft, updated_on in [
(True, None), (True, datetime.datetime.utcnow()),
(False, None), (False, datetime.datetime.utcnow())]:
conn = self._setup_cache_with_one_entry(
is_draft=is_draft, updated_on=updated_on)
_, stream = conn.get('sample.txt')
meta = FileMetadataEntity()
meta.key = _key
meta.is_draft = stream.metadata.is_draft
meta.updated_on = stream.metadata.updated_on
updates = {'sample.txt': meta}
old_expire_count = VfsCacheConnection.CACHE_EVICT.value
conn.apply_updates(updates)
found, _ = conn.get('sample.txt')
self.assertTrue(found)
self.assertEquals(
VfsCacheConnection.CACHE_EVICT.value - old_expire_count, 0)
def test_empty_updates_dont_evict(self):
conn = self._setup_cache_with_one_entry()
updates = {}
old_expire_count = VfsCacheConnection.CACHE_EVICT.value
conn.apply_updates(updates)
found, _ = conn.get('sample.txt')
self.assertTrue(found)
self.assertEquals(
VfsCacheConnection.CACHE_EVICT.value - old_expire_count, 0)
def test_updates_with_changes_do_evict(self):
class _Key(object):
def name(self):
return 'sample.txt'
def _key():
return _Key()
def set_is_draft(meta, value):
meta.is_draft = value
def set_updated_on(meta, value):
meta.updated_on = value
conn = self._setup_cache_with_one_entry()
mutations = [
(lambda meta: set_is_draft(meta, False)),
(lambda meta: set_updated_on(meta, datetime.datetime.utcnow()))]
for mutation in mutations:
meta = FileMetadataEntity()
meta.key = _key
mutation(meta)
updates = {'sample.txt': meta}
conn.apply_updates(updates)
found, _ = conn.get('sample.txt')
self.assertFalse(found)
def test_apply_updates_expires_entries(self):
conn = self._setup_cache_with_one_entry()
entry = conn.cache.items.get(conn.make_key('ns_test', 'sample.txt'))
self.assertTrue(entry)
entry.created_on = datetime.datetime.utcnow() - datetime.timedelta(
0, CacheFileEntry.CACHE_ENTRY_TTL_SEC + 1)
updates = {}
conn.apply_updates(updates)
old_expire_count = VfsCacheConnection.CACHE_EXPIRE.value
found, stream = conn.get('sample.txt')
self.assertFalse(found)
self.assertEquals(stream, None)
self.assertEquals(
VfsCacheConnection.CACHE_EXPIRE.value - old_expire_count, 1)
def test_no_metadata_and_no_data_is_evicted(self):
ProcessScopedVfsCache.clear_all()
conn = VfsCacheConnection('ns_test')
conn.put('sample.txt', None, None)
meta = FileMetadataEntity()
meta.key = 'sample/txt'
updates = {'sample.txt': meta}
conn.apply_updates(updates)
found, stream = conn.get('sample.txt')
self.assertFalse(found)
self.assertEquals(stream, None)
def test_metadata_but_no_data_is_evicted(self):
ProcessScopedVfsCache.clear_all()
conn = VfsCacheConnection('ns_test')
meta = FileMetadataEntity()
meta.is_draft = True
meta.updated_on = datetime.datetime.utcnow()
conn.put('sample.txt', meta, None)
meta = FileMetadataEntity()
meta.key = 'sample/txt'
updates = {'sample.txt': meta}
conn.apply_updates(updates)
found, stream = conn.get('sample.txt')
self.assertFalse(found)
self.assertEquals(stream, None)
def run_all_unit_tests():
"""Runs all unit tests in this module."""
suites_list = []
for test_class in [VfsTests]:
suite = unittest.TestLoader().loadTestsFromTestCase(test_class)
suites_list.append(suite)
result = unittest.TextTestRunner().run(unittest.TestSuite(suites_list))
if not result.wasSuccessful() or result.errors:
raise Exception(result)
if __name__ == '__main__':
run_all_unit_tests()
| ehiller/CourseBuilderV19-TeacherDashboard | models/vfs.py | Python | apache-2.0 | 35,365 |
"""Python implementation of the InvenSense MPU-6050 Gyroscope / Accelerometer libray
Original inspiration:
MrTijn/Tijndagamer https://github.com/Tijndagamer/mpu6050
Jrowberg https://github.com/jrowberg/i2cdevlib/tree/master/Arduino/MPU6050
InvenSense https://invensense.com
Released under the MIT License
Copyright 2016
"""
import smbus
from utils_3d import *
import time
import json
import os
DEBUG = False
class MPU6050(object):
"""Main MPU6050 Class
Including support for Accelerometer and Gyro readout, Digital Low Pass Filter (DLPF)
and Digital Motion Processor (DMP)"""
# Global Variables
GRAVITIY_MS2 = 9.80665
address = None
bus = None
# Power Management
PWR_MGMT_1 = 0x6B
PWR_MGMT_2 = 0x6C
# Clock Select
CLK_SEL_0 = 0
CLK_SEL_XGYRO = 1
CLK_SEL_YGYRO = 2
CLK_SEL_ZGYRO = 3
CLK_SEL_EXT_32K = 4
CLK_SEL_EXT_19K = 5
CLK_SEL_PLL = CLK_SEL_XGYRO # not sure which axis we should use
# Sensor config
INV_X_GYRO = 0x40
INV_Y_GYRO = 0x20
INV_Z_GYRO = 0x10
INV_XYZ_GYRO = (INV_X_GYRO | INV_Y_GYRO | INV_Z_GYRO)
INV_XYZ_ACCEL = 0x08
INV_XYZ_COMPASS = 0x01
INV_WXYZ_QUAT = 0x100
# Sleep & Cycle moes
SLEEP_MODE = 0b01000000
CYCLE_MODE = 0b00100000
# Sample Rate Division
SMPLRT_DIV = 0x19
# Config
CONFIG = 0x1A
FIFO_EN = 0x23
RA_INT_PIN_CFG = 0x37
RA_WHO_AM_I = 0x75
# sample rate division
RA_RATE_DIV = 0x19
BIT_MOT_INT_EN = 0x40
BITS_FSR = 0x18
BITS_LPF = 0x07
BITS_HPF = 0x07
BITS_CLK = 0x07
BIT_RESET = 0x80
BIT_SLEEP = 0x40
# interrupt stuff
BIT_LATCH_EN = 0x20
BIT_ANY_RD_CLR = 0x10
BIT_BYPASS_EN = 0x02
BITS_WOM_EN = 0xC0
BIT_ACTL = 0x80
BIT_AUX_IF_EN = 0x20
# interrupt mode Data Ready / other mode is DMP
BIT_DATA_RDY_EN = 0x01
# low power mode settings
BIT_LPA_CYCLE = 0x20
BIT_STBY_XA = 0x20
BIT_STBY_YA = 0x10
BIT_STBY_ZA = 0x08
BIT_STBY_XG = 0x04
BIT_STBY_YG = 0x02
BIT_STBY_ZG = 0x01
BIT_STBY_XYZA = (BIT_STBY_XA | BIT_STBY_YA | BIT_STBY_ZA)
BIT_STBY_XYZG = (BIT_STBY_XG | BIT_STBY_YG | BIT_STBY_ZG)
# low power mode wake up frequncies
INV_LPA_1_25HZ = 0x00
INV_LPA_5HZ = 0x01
INV_LPA_20HZ = 0x02
INV_LPA_40HZ = 0x03
class X(object):
"""Class for the X axis
Contains variable specific to the X axis (Accelerometer or Gyro)"""
GYRO_OUT0 = 0x43
GYRO_OUT1 = 0x44
OFFS_TC = 0x00 # bits 6-1
OFFS_MASK = 0b01111110
ACCEL_OUT0 = 0x3B
ACCEL_OUT1 = 0x3C
OFFS_H = 0x06
OFFS_L = 0x07
SELF_TEST_SEL_BIT = 0b10000000
SELF_TEST_REG_H = 0x0D
SELF_TEST_REG_L = 0x10
SELF_TEST_A_4_2 = 0b11100000
SELF_TEST_A_1_0 = 0b00110000
SELF_TEST_G_MASK = 0b00011111
FINE_GAIN = 0x03
class Y(object):
"""Class for the Y axis
Contains variable specific to the Y axis (Accelerometer or Gyro)"""
GYRO_OUT0 = 0x45
GYRO_OUT1 = 0x46
OFFS_TC = 0x01 # bits 6-1
OFFS_MASK = 0b01111110
ACCEL_OUT0 = 0x3D
ACCEL_OUT1 = 0x3E
OFFS_H = 0x08
OFFS_L = 0x09
SELF_TEST_SEL_BIT = 0b01000000
SELF_TEST_REG_H = 0x0E
SELF_TEST_REG_L = 0x10
SELF_TEST_A_4_2 = 0b11100000
SELF_TEST_A_1_0 = 0b00001100
SELF_TEST_G_MASK = 0b00011111
FINE_GAIN = 0x04
class Z(object):
"""Class for the Z axis
Contains variable specific to the Z axis (Accelerometer or Gyro)"""
GYRO_OUT0 = 0x47
GYRO_OUT1 = 0x48
OFFS_TC = 0x02 # bits 6-1
OFFS_MASK = 0b01111110
ACCEL_OUT0 = 0x3F
ACCEL_OUT1 = 0x40
OFFS_H = 0x0A
OFFS_L = 0x0B
SELF_TEST_SEL_BIT = 0b00100000
SELF_TEST_REG_H = 0x0F
SELF_TEST_REG_L = 0x10
SELF_TEST_A_4_2 = 0b11100000
SELF_TEST_A_1_0 = 0b00000011
SELF_TEST_G_MASK = 0b00011111
FINE_GAIN = 0x05
class I2CClass(object):
"""I2C helper class
Wraps some of the I2C read / write functionalities of smbus to easier access throughout"""
# Slave device
RA_I2C_MST_CTRL = 0x24
RA_I2C_SLV0_ADDR = 0x25
RA_I2C_SLV0_REG = 0x26
RA_I2C_SLV0_CTRL = 0x27
RA_I2C_SLV1_ADDR = 0x28
RA_I2C_SLV1_REG = 0x29
RA_I2C_SLV1_CTRL = 0x2A
RA_I2C_SLV2_ADDR = 0x2B
RA_I2C_SLV2_REG = 0x2C
RA_I2C_SLV2_CTRL = 0x2D
RA_I2C_SLV3_ADDR = 0x2E
RA_I2C_SLV3_REG = 0x2F
RA_I2C_SLV3_CTRL = 0x30
RA_I2C_SLV4_ADDR = 0x31
RA_I2C_SLV4_REG = 0x32
RA_I2C_SLV4_DO = 0x33
RA_I2C_SLV4_CTRL = 0x34
RA_I2C_SLV4_DI = 0x35
RA_I2C_MST_STATUS = 0x36
# define MPU6050_I2C_SLV_RW_BIT = 7
# define MPU6050_I2C_SLV_ADDR_BIT = 6
# define MPU6050_I2C_SLV_ADDR_LENGTH= 7
# define MPU6050_I2C_SLV_EN_BIT = 7
# define MPU6050_I2C_SLV_BYTE_SW_BIT= 6
# define MPU6050_I2C_SLV_REG_DIS_BIT= 5
# define MPU6050_I2C_SLV_GRP_BIT = 4
# define MPU6050_I2C_SLV_LEN_BIT = 3
# define MPU6050_I2C_SLV_LEN_LENGTH = 4
# define MPU6050_I2C_SLV4_RW_BIT = 7
# define MPU6050_I2C_SLV4_ADDR_BIT = 6
# define MPU6050_I2C_SLV4_ADDR_LENGTH = 7
# define MPU6050_I2C_SLV4_EN_BIT = 7
# define MPU6050_I2C_SLV4_INT_EN_BIT = 6
# define MPU6050_I2C_SLV4_REG_DIS_BIT = 5
# define MPU6050_I2C_SLV4_MST_DLY_BIT = 4
# define MPU6050_I2C_SLV4_MST_DLY_LENGTH= 5
# define MPU6050_MST_PASS_THROUGH_BIT = 7
# define MPU6050_MST_I2C_SLV4_DONE_BIT = 6
# define MPU6050_MST_I2C_LOST_ARB_BIT = 5
# define MPU6050_MST_I2C_SLV4_NACK_BIT = 4
# define MPU6050_MST_I2C_SLV3_NACK_BIT = 3
# define MPU6050_MST_I2C_SLV2_NACK_BIT = 2
# define MPU6050_MST_I2C_SLV1_NACK_BIT = 1
# define MPU6050_MST_I2C_SLV0_NACK_BIT = 0
def __init__(self, mpu):
self.mpu = mpu
self.bus = mpu.bus
self.address = mpu.address
def read_byte(self, register):
"""Read a single byte from a register
:param register: byte -- the register to read from
:return: byte -- the byte read from register
"""
return self.bus.read_byte_data(self.address, register)
def write_byte(self, register, value):
"""Write a single byte to a register
:param register: byte -- the register to write to
:param value: byte -- the byte to write
:return:
"""
self.bus.write_byte_data(self.address, register, value)
def read_word(self, register):
"""Reads two bytes starting at register.
:param register -- the register to start reading from.
:return: word -- Returns the combined 2 bytes read from the register.
"""
high = self.bus.read_byte_data(self.address, register)
low = self.bus.read_byte_data(self.address, register + 1)
value = (high << 8) + low
if value >= 0x8000:
return value - 65536
else:
return value
def write_word(self, register, value):
"""Write 2 bytes starting at register
:param register: byte -- the register to start writing at
:param value: word -- the word combining the 2 bytes to write starting at register
:return:
"""
low = value & 0x00FF
high = value >> 8
self.bus.write_byte_data(self.address, register, high)
self.bus.write_byte_data(self.address, register + 1, low)
def read_bytes(self, cmd, length):
"""Reads data from I2C bus
:param cmd: byte -- the control command to send to the I2C device. It is often a register address.
:param length: int -- number of bytes to read from I2C bus
:return: list -- array of bytes read
"""
return self.bus.read_i2c_block_data(self.address, cmd, length)
def write_bytes(self, cmd, length, data):
"""Writes data to I2C bus
:param cmd: byte -- the control command to send to the I2C device. It is often a register address.
:param length: int -- the number of bytes to send (can be omitted since data is an array and its length is known
:param data: list -- the array of bytes to write to the I2C bus
:return:
"""
self.bus.write_i2c_block_data(self.address, cmd, data)
def set_slave_address(self, slave, addr):
"""Set I2C slave device address on the bus
:param slave: byte -- slave device id on the bus
:param addr: byte -- address of the slave device
:return:
"""
if slave > 3:
return
self.bus.write_byte_data(self.address, self.RA_I2C_SLV0_ADDR + slave * 3, addr)
def set_master_mode(self, enable):
# if enable:
# self.bus.write_byte_data(self.address, self.RA_I2C_MST_CTRL, )
# else:
return
def reset_master(self):
return
class TemperatureClass:
"""Temperature sensor Class"""
TEMP_OUT0 = 0x41
TEMP_OUT1 = 0x42
CELSIUS = 0
KELVIN = 1
FAHRENHEIT = 2
def __init__(self, mpu):
self.mpu = mpu
self.i2c = self.mpu.i2c
self._unit = self.CELSIUS
def get_value(self, unit=None):
"""Get temperature from internal sensor in the specified unit
(default to set unit or degrees Celsius if not set)
:param unit: int -- 0: Celsius, 1: Kelvin, 2: Fahrenheit. Default: Celsius or unit set with set_unit()
:return: float -- temperature in specified unit
"""
if unit is None:
unit = self._unit
raw_temp = self.i2c.read_word(self.TEMP_OUT0)
# Get the actual temperature using the formule given in the
# MPU-6050 Register Map and Descriptions revision 4.2, page 30
actual_temp = (raw_temp / 340.0) + 36.53 # in celcius
if unit == self.CELSIUS:
return actual_temp
elif unit == self.KELVIN:
return actual_temp + 273.15
elif unit == self.FAHRENHEIT:
return actual_temp * 1.8 + 32
def set_unit(self, unit=CELSIUS):
"""Set temperature unit to use
:param unit: 0: CELSIUS, 1: KELVIN, 2: FAHRRENHEIT
:return:
"""
self._unit = unit
@property
def value(self):
"""Temperature value in specified unit
:return: float -- temperature
"""
return self.get_value()
class GyroClass(object):
"""Gyroscope Class"""
GYRO_CONFIG = 0x1B
GYRO_SCALE_MODIFIER_250DEG = 131.0
GYRO_SCALE_MODIFIER_500DEG = 65.5
GYRO_SCALE_MODIFIER_1000DEG = 32.8
GYRO_SCALE_MODIFIER_2000DEG = 16.4
GYRO_RANGE_250DEG = 0x00
GYRO_RANGE_500DEG = 0x08
GYRO_RANGE_1000DEG = 0x10
GYRO_RANGE_2000DEG = 0x18
def __init__(self, mpu):
self.mpu = mpu
self.i2c = self.mpu.i2c
self._scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG
self._range_raw = self.GYRO_RANGE_250DEG
class Axis:
def __init__(self, gyro, axis, name):
self._name = name
self.gyro = gyro
self.mpu = self.gyro.mpu
self.i2c = self.gyro.mpu.i2c
self.axis = axis
def get_value(self, raw=False):
"""Get Gyroscope Axis value, either raw or in deg/sec (dps)
:param raw: bool -- raw values are returned if True
:return: float -- Gyro Axis value
"""
val = self.i2c.read_word(self.axis.GYRO_OUT0)
# gyro_range = self.gyro.range_raw
gyro_scale_modifier = self.gyro.scale_modifier
# if gyro_range == self.gyro.GYRO_RANGE_250DEG:
# gyro_scale_modifier = self.gyro.GYRO_SCALE_MODIFIER_250DEG
# elif gyro_range == self.gyro.GYRO_RANGE_500DEG:
# gyro_scale_modifier = self.gyro.GYRO_SCALE_MODIFIER_500DEG
# elif gyro_range == self.gyro.GYRO_RANGE_1000DEG:
# gyro_scale_modifier = self.gyro.GYRO_SCALE_MODIFIER_1000DEG
# elif gyro_range == self.gyro.GYRO_RANGE_2000DEG:
# gyro_scale_modifier = self.gyro.GYRO_SCALE_MODIFIER_2000DEG
# else:
# if DEBUG: print("Unknown range - gyro_scale_modifier set to self.GYRO_SCALE_MODIFIER_250DEG")
# gyro_scale_modifier = self.gyro.GYRO_SCALE_MODIFIER_250DEG
if raw:
return val
else:
return val / gyro_scale_modifier
def get_offset(self):
return (self.i2c.read_byte(self.axis.OFFS_TC) & self.axis.OFFS_MASK) >> 1
def set_offset(self, offset):
o = self.i2c.read_byte(self.axis.OFFS_TC) & ~self.axis.OFFS_MASK
o |= offset << 1
self.i2c.write_byte(self.axis.OFFS_TC, o)
def set_self_test_mode(self, state=None):
if state is None:
# save state
state = self.i2c.read_byte(self.gyro.GYRO_CONFIG)
# set to self-test mode
self_test_mask = self.axis.SELF_TEST_SEL_BIT | self.gyro.GYRO_RANGE_250DEG
self.i2c.write_byte(self.gyro.GYRO_CONFIG, self_test_mask)
return state
else:
# reset register to previous state
self.i2c.write_byte(self.gyro.GYRO_CONFIG, state)
def get_self_test_value(self):
gyro_state = self.set_self_test_mode()
# read ST registers
st = self.i2c.read_byte(self.axis.SELF_TEST_REG_H)
self_test = (st & self.axis.SELF_TEST_G_MASK)
# reset register to previous state
self.set_self_test_mode(gyro_state)
return self_test
def get_factory_trim_value(self):
self_test = self.get_self_test_value()
return 25 * 131 * pow(1.046, self_test - 1) if self_test != 0 else 0
@property
def value(self):
return self.get_value()
@property
def offset(self):
return self.get_offset()
@property
def name(self):
return self._name
def get_range(self, raw=False):
"""Get Gyro Full Scale Range (FSR) in raw format or deg/sec (dps)
:param raw: bool -- raw values are returned if True
:return: int -- Gyro Full Scale Range
"""
raw_data = self.i2c.read_byte(self.GYRO_CONFIG) & 0x00011000
if raw is True:
return raw_data
elif raw is False:
if raw_data == self.GYRO_RANGE_250DEG:
return 250
elif raw_data == self.GYRO_RANGE_500DEG:
return 500
elif raw_data == self.GYRO_RANGE_1000DEG:
return 1000
elif raw_data == self.GYRO_RANGE_2000DEG:
return 2000
else:
return -1
def set_range(self, value):
"""Sets the range of the gyroscope to 'value'.
:param value: int -- Gyro Full Scale Range to set: on of GYRO_RANGE_250DEG, GYRO_RANGE_500DEG, GYRO_RANGE_1000DEG or GYRO_RANGE_2000DEG
:return:
"""
if value == self.GYRO_RANGE_250DEG:
self._scale_modifier = self.GYRO_SCALE_MODIFIER_250DEG
elif value == self.GYRO_RANGE_500DEG:
self._scale_modifier = self.GYRO_SCALE_MODIFIER_500DEG
elif value == self.GYRO_RANGE_1000DEG:
self._scale_modifier = self.GYRO_SCALE_MODIFIER_1000DEG
elif value == self.GYRO_RANGE_2000DEG:
self._scale_modifier = self.GYRO_SCALE_MODIFIER_2000DEG
else:
raise ValueError("Set range: not within the possible values")
# First change it to 0x00 to make sure we write the correct value later
self.i2c.write_byte(self.GYRO_CONFIG, 0x00)
# Write the new range to the ACCEL_CONFIG register
self.i2c.write_byte(self.GYRO_CONFIG, value)
@property
def scale_modifier(self):
return self._scale_modifier
@property
def x(self):
return self.Axis(self, self.mpu.X, "Gyro X")
@property
def y(self):
return self.Axis(self, self.mpu.Y, "Gyro Y")
@property
def z(self):
return self.Axis(self, self.mpu.Z, "Gyro Z")
@property
def axes(self):
return self.x, self.y, self.z
@property
def values(self):
return {"x": self.x.value, "y": self.y.value, "z": self.z.value}
@property
def offsets(self):
return {"x": self.x.offset, "y": self.y.offset, "z": self.z.offset}
@property
def range(self):
return self.get_range()
@property
def range_raw(self):
return self.get_range(raw=True)
class AccelerometerClass(object):
ACCEL_CONFIG = 0x1C
ACCEL_SCALE_MODIFIER_2G = 16384.0
ACCEL_SCALE_MODIFIER_4G = 8192.0
ACCEL_SCALE_MODIFIER_8G = 4096.0
ACCEL_SCALE_MODIFIER_16G = 2048.0
ACCEL_RANGE_2G = 0x00
ACCEL_RANGE_4G = 0x08
ACCEL_RANGE_8G = 0x10
ACCEL_RANGE_16G = 0x18
def __init__(self, mpu):
self.mpu = mpu
self.i2c = self.mpu.i2c
self._scale_modifier = self.ACCEL_SCALE_MODIFIER_2G
self._range = self.ACCEL_RANGE_2G
class Axis:
def __init__(self, accel, axis, name):
self._name = name
self.accel = accel
self.mpu = self.accel.mpu
self.i2c = self.accel.mpu.i2c
self.axis = axis
def get_value(self, raw=False):
"""Accelerometer Axis value, in raw format (ms^2) or G's
:param raw: bool -- unit ms^2 if True otherwise G's
:return: float -- Accelerometer axis value
"""
val = self.i2c.read_word(self.axis.ACCEL_OUT0)
# accel_range = self.accel.range_raw
accel_scale_modifier = self.accel.scale_modifier
# if accel_range == self.accel.ACCEL_RANGE_2G:
# accel_scale_modifier = self.accel.ACCEL_SCALE_MODIFIER_2G
# elif accel_range == self.accel.ACCEL_RANGE_4G:
# accel_scale_modifier = self.accel.ACCEL_SCALE_MODIFIER_4G
# elif accel_range == self.accel.ACCEL_RANGE_8G:
# accel_scale_modifier = self.accel.ACCEL_SCALE_MODIFIER_8G
# elif accel_range == self.accel.ACCEL_RANGE_16G:
# accel_scale_modifier = self.accel.ACCEL_SCALE_MODIFIER_16G
# else:
# if DEBUG: print("Unkown range - accel_scale_modifier set to self.ACCEL_SCALE_MODIFIER_2G")
# accel_scale_modifier = self.accel.ACCEL_SCALE_MODIFIER_2G
if raw:
return val
else:
return val / accel_scale_modifier * self.mpu.GRAVITIY_MS2
def get_offset(self):
"""Get Accelerometer axis offset
:return: int -- Accelerometer axis offset used for calibration
"""
return self.i2c.read_word(self.axis.OFFS_H)
def set_offset(self, offset):
"""Set Accelerometer axis offset
:param offset: int -- Accelerometer axis offset to use for calibration
:return:
"""
self.i2c.write_word(self.axis.OFFS_H, offset)
def set_self_test_mode(self, state=None):
if state == None:
# save state
state = self.i2c.read_byte(self.accel.ACCEL_CONFIG)
# set to self-test mode
# set X, Y, Z bits (7,6,5) + range to +/-8g -> AFS_SEL=2 (bit 4-3 to 10) as per MPU-Register Map doc p11
self_test_mask = self.axis.SELF_TEST_SEL_BIT | self.accel.ACCEL_RANGE_8G
self.i2c.write_byte(self.accel.ACCEL_CONFIG, self_test_mask)
return state
else:
self.i2c.write_byte(self.accel.ACCEL_CONFIG, state)
def get_self_test_value(self):
# self-test mode
accel_state = self.set_self_test_mode()
# read ST registers
high = self.i2c.read_byte(self.axis.SELF_TEST_REG_H)
low = self.i2c.read_byte(self.axis.SELF_TEST_REG_L)
# mask operations to get values
self_test_value = (high & self.axis.SELF_TEST_A_4_2) | (low & self.axis.SELF_TEST_A_1_0)
# reset register to previous state
self.set_self_test_mode(accel_state)
return self_test_value
def get_factory_trim_value(self):
self_test = self.get_self_test_value()
return 4096 * 0.34 * (pow(0.92, (self_test - 1) / 30) / 0.34) if self_test != 0 else 0
@property
def value(self):
return self.get_value()
@property
def offset(self):
return self.get_offset()
@property
def name(self):
return self._name
def get_range(self, raw=False):
"""Reads the range the accelerometer is set to.
If raw is True, it will return the raw value from the ACCEL_CONFIG
register
If raw is False, it will return an integer: -1, 2, 4, 8 or 16. When it
returns -1 something went wrong.
"""
raw_data = self.i2c.read_byte(self.ACCEL_CONFIG) & 0x00011000
if raw is True:
return raw_data
elif raw is False:
if raw_data == self.ACCEL_RANGE_2G:
return 2
elif raw_data == self.ACCEL_RANGE_4G:
return 4
elif raw_data == self.ACCEL_RANGE_8G:
return 8
elif raw_data == self.ACCEL_RANGE_16G:
return 16
else:
return -1
def set_range(self, value):
"""Sets the range of the accelerometer to range.
accel_range -- the range to set the accelerometer to. Using a
pre-defined range is advised.
"""
if value == self.ACCEL_RANGE_2G:
self._scale_modifier = self.ACCEL_SCALE_MODIFIER_2G
elif value == self.ACCEL_RANGE_4G:
self._scale_modifier = self.ACCEL_SCALE_MODIFIER_4G
elif value == self.ACCEL_RANGE_8G:
self._scale_modifier = self.ACCEL_SCALE_MODIFIER_8G
elif value == self.ACCEL_RANGE_16G:
self._scale_modifier = self.ACCEL_SCALE_MODIFIER_16G
else:
raise ValueError("Not within permissible values")
# First change it to 0x00 to make sure we write the correct value later
self.i2c.write_byte(self.ACCEL_CONFIG, 0x00)
# Write the new range to the ACCEL_CONFIG register
self.i2c.write_byte(self.ACCEL_CONFIG, value)
self._range = value
@property
def scale_modifier(self):
return self._scale_modifier
@property
def x(self):
return self.Axis(self, self.mpu.X, "Accelerometer X")
@property
def y(self):
return self.Axis(self, self.mpu.Y, "Accelerometer Y")
@property
def z(self):
return self.Axis(self, self.mpu.Z, "Accelerometer Z")
@property
def axes(self):
return self.x, self.y, self.z
@property
def values(self):
return {"x": self.x.value, "y": self.y.value, "z": self.z.value}
@property
def offsets(self):
return {"x": self.x.offset, "y": self.y.offset, "z": self.z.offset}
@property
def range(self):
return self.get_range()
@property
def range_raw(self):
return self.get_range(raw=True)
class DLPFClass(object):
# Digital Low Pass Filter DLPF
DLPF_CFG_5 = 6
DLPF_CFG_10 = 5
DLPF_CFG_21 = 4
DLPF_CFG_44 = 3
DLPF_CFG_94 = 2
DLPF_CFG_184 = 1
DLPF_CFG_260 = 0
DLPF_CFG_MASK = 0b00000111
def __init__(self, mpu):
self.mpu = mpu
self.i2c = self.mpu.i2c
self._value = 0
self._frequency = 0
@property
def value(self):
return self._value
@property
def frequency(self):
return self._frequency
def get(self):
# Read CONFIG register
state = self.i2c.read_byte(self.mpu.CONFIG)
# clear DLPF_CFG
return state & self.DLPF_CFG_MASK
def set(self, value):
# Read original state
state = self.i2c.read_byte(self.mpu.CONFIG)
# clear DLPF_CFG (AND with inverted mask)
config = state & ~self.DLPF_CFG_MASK
# Apply new value
config = config | value
# Write the new value to the CONFIG register
self.i2c.write_byte(self.mpu.CONFIG, config)
self._value = value
def get_frequency(self):
# /**
# * @brief Get the current DLPF setting.
# * @param[out] lpf Current LPF setting.
# * 0 if successful.
# */
freq = self.get()
if freq == self.DLPF_CFG_260:
return 260
elif freq == self.DLPF_CFG_184:
return 184
elif freq == self.DLPF_CFG_94:
return 94
elif freq == self.DLPF_CFG_44:
return 44
elif freq == self.DLPF_CFG_21:
return 21
elif freq == self.DLPF_CFG_10:
return 10
elif freq == self.DLPF_CFG_5:
return 5
else:
return 0
def set_frequency(self, value):
# /**
# * @brief Set digital low pass filter.
# * The following LPF settings are supported: 188, 98, 42, 20, 10, 5.
# * @param[in] lpf Desired LPF setting.
# * @return 0 if successful.
# */
if value >= 260:
val = self.DLPF_CFG_260
freq = 260
elif value >= 188:
val = self.DLPF_CFG_184
freq = 188
elif value >= 94:
val = self.DLPF_CFG_94
freq = 94
elif value >= 44:
val = self.DLPF_CFG_44
freq = 44
elif value >= 21:
val = self.DLPF_CFG_21
freq = 21
elif value >= 10:
val = self.DLPF_CFG_10
freq = 10
elif value >= 5:
val = self.DLPF_CFG_5
freq = 5
else:
val = self.DLPF_CFG_260
freq = 260
if self._value == val:
self._frequency = freq
return True
else:
self.set(val)
self._frequency = freq
class DMPClass(object):
"""Digital Motion Processor Class
It covers advanced features such as tap detection, podometer (steps), orientation detection,
controllable through interrupts, as well as internal Gyro calibration
and output of the rotation quaternion.
"""
DMP_CODE_SIZE = 1929 # dmpMemory[]
DMP_CONFIG_SIZE = 192 # dmpConfig[]
DMP_UPDATES_SIZE = 47 # dmpUpdates[]
RA_BANK_SEL = 0x6D
RA_MEM_START_ADDR = 0x6E
RA_MEM_R_W = 0x6F
RA_DMP_CFG_1 = 0x70
RA_DMP_CFG_2 = 0x71
BANKSEL_PRFTCH_EN_BIT = 6
BANKSEL_CFG_USER_BANK_BIT = 5
BANKSEL_MEM_SEL_BIT = 4
BANKSEL_MEM_SEL_LENGTH = 5
RA_XG_OFFS_TC = 0x00 # [7] PWR_MODE, [6:1] XG_OFFS_TC, [0] OTP_BNK_VLD
RA_YG_OFFS_TC = 0x01 # [7] PWR_MODE, [6:1] YG_OFFS_TC, [0] OTP_BNK_VLD
RA_ZG_OFFS_TC = 0x02 # [7] PWR_MODE, [6:1] ZG_OFFS_TC, [0] OTP_BNK_VLD
TC_OTP_BNK_VLD_BIT = 0
WHO_AM_I_BIT = 6
WHO_AM_I_LENGTH = 6
DMP_MEMORY_BANKS = 8
DMP_MEMORY_BANK_SIZE = 256
DMP_MEMORY_CHUNK_SIZE = 16
DMP_CODE_START_ADDR = 0x0400
BIT_I2C_MST_VDDIO = 0x80
BIT_DMP_EN = 0x80
BIT_DMP_RST = 0x08
BIT_DMP_INT_EN = 0x02
BIT_S0_DELAY_EN = 0x01
BIT_S2_DELAY_EN = 0x04
BITS_SLAVE_LENGTH = 0x0F
BIT_SLAVE_BYTE_SW = 0x40
BIT_SLAVE_GROUP = 0x10
BIT_SLAVE_EN = 0x80
BIT_I2C_READ = 0x80
BITS_I2C_MASTER_DLY = 0x1F
DINA0A = 0x0a
DINA22 = 0x22
DINA42 = 0x42
DINA5A = 0x5a
DINA06 = 0x06
DINA0E = 0x0e
DINA16 = 0x16
DINA1E = 0x1e
DINA26 = 0x26
DINA2E = 0x2e
DINA36 = 0x36
DINA3E = 0x3e
DINA46 = 0x46
DINA4E = 0x4e
DINA56 = 0x56
DINA5E = 0x5e
DINA66 = 0x66
DINA6E = 0x6e
DINA76 = 0x76
DINA7E = 0x7e
DINA00 = 0x00
DINA08 = 0x08
DINA10 = 0x10
DINA18 = 0x18
DINA20 = 0x20
DINA28 = 0x28
DINA30 = 0x30
DINA38 = 0x38
DINA40 = 0x40
DINA48 = 0x48
DINA50 = 0x50
DINA58 = 0x58
DINA60 = 0x60
DINA68 = 0x68
DINA70 = 0x70
DINA78 = 0x78
DINA04 = 0x04
DINA0C = 0x0c
DINA14 = 0x14
DINA1C = 0x1C
DINA24 = 0x24
DINA2C = 0x2c
DINA34 = 0x34
DINA3C = 0x3c
DINA44 = 0x44
DINA4C = 0x4c
DINA54 = 0x54
DINA5C = 0x5c
DINA64 = 0x64
DINA6C = 0x6c
DINA74 = 0x74
DINA7C = 0x7c
DINA01 = 0x01
DINA09 = 0x09
DINA11 = 0x11
DINA19 = 0x19
DINA21 = 0x21
DINA29 = 0x29
DINA31 = 0x31
DINA39 = 0x39
DINA41 = 0x41
DINA49 = 0x49
DINA51 = 0x51
DINA59 = 0x59
DINA61 = 0x61
DINA69 = 0x69
DINA71 = 0x71
DINA79 = 0x79
DINA25 = 0x25
DINA2D = 0x2d
DINA35 = 0x35
DINA3D = 0x3d
DINA4D = 0x4d
DINA55 = 0x55
DINA5D = 0x5D
DINA6D = 0x6d
DINA75 = 0x75
DINA7D = 0x7d
DINADC = 0xdc
DINAF2 = 0xf2
DINAAB = 0xab
DINAAA = 0xaa
DINAF1 = 0xf1
DINADF = 0xdf
DINADA = 0xda
DINAB1 = 0xb1
DINAB9 = 0xb9
DINAF3 = 0xf3
DINA8B = 0x8b
DINAA3 = 0xa3
DINA91 = 0x91
DINAB6 = 0xb6
DINAB4 = 0xb4
DINC00 = 0x00
DINC01 = 0x01
DINC02 = 0x02
DINC03 = 0x03
DINC08 = 0x08
DINC09 = 0x09
DINC0A = 0x0a
DINC0B = 0x0b
DINC10 = 0x10
DINC11 = 0x11
DINC12 = 0x12
DINC13 = 0x13
DINC18 = 0x18
DINC19 = 0x19
DINC1A = 0x1a
DINC1B = 0x1b
DINC20 = 0x20
DINC21 = 0x21
DINC22 = 0x22
DINC23 = 0x23
DINC28 = 0x28
DINC29 = 0x29
DINC2A = 0x2a
DINC2B = 0x2b
DINC30 = 0x30
DINC31 = 0x31
DINC32 = 0x32
DINC33 = 0x33
DINC38 = 0x38
DINC39 = 0x39
DINC3A = 0x3a
DINC3B = 0x3b
DINC40 = 0x40
DINC41 = 0x41
DINC42 = 0x42
DINC43 = 0x43
DINC48 = 0x48
DINC49 = 0x49
DINC4A = 0x4a
DINC4B = 0x4b
DINC50 = 0x50
DINC51 = 0x51
DINC52 = 0x52
DINC53 = 0x53
DINC58 = 0x58
DINC59 = 0x59
DINC5A = 0x5a
DINC5B = 0x5b
DINC60 = 0x60
DINC61 = 0x61
DINC62 = 0x62
DINC63 = 0x63
DINC68 = 0x68
DINC69 = 0x69
DINC6A = 0x6a
DINC6B = 0x6b
DINC70 = 0x70
DINC71 = 0x71
DINC72 = 0x72
DINC73 = 0x73
DINC78 = 0x78
DINC79 = 0x79
DINC7A = 0x7a
DINC7B = 0x7b
DIND40 = 0x40
DINA80 = 0x80
DINA90 = 0x90
DINAA0 = 0xa0
DINAC9 = 0xc9
DINACB = 0xcb
DINACD = 0xcd
DINACF = 0xcf
DINAC8 = 0xc8
DINACA = 0xca
DINACC = 0xcc
DINACE = 0xce
DINAD8 = 0xd8
DINADD = 0xdd
DINAF8 = 0xf0
DINAFE = 0xfe
DINBF8 = 0xf8
DINAC0 = 0xb0
DINAC1 = 0xb1
DINAC2 = 0xb4
DINAC3 = 0xb5
DINAC4 = 0xb8
DINAC5 = 0xb9
DINBC0 = 0xc0
DINBC2 = 0xc2
DINBC4 = 0xc4
DINBC6 = 0xc6
# /* These defines are copied from dmpDefaultMPU6050.c in the general MPL
# * releases. These defines may change for each DMP image, so be sure to modify
# * these values when switching to a new image.
# */
CFG_LP_QUAT = 2712
END_ORIENT_TEMP = 1866
CFG_27 = 2742
CFG_20 = 2224
CFG_23 = 2745
CFG_FIFO_ON_EVENT = 2690
END_PREDICTION_UPDATE = 1761
CGNOTICE_INTR = 2620
X_GRT_Y_TMP = 1358
CFG_DR_INT = 1029
CFG_AUTH = 1035
UPDATE_PROP_ROT = 1835
END_COMPARE_Y_X_TMP2 = 1455
SKIP_X_GRT_Y_TMP = 1359
SKIP_END_COMPARE = 1435
FCFG_3 = 1088
FCFG_2 = 1066
FCFG_1 = 1062
END_COMPARE_Y_X_TMP3 = 1434
FCFG_7 = 1073
FCFG_6 = 1106
FLAT_STATE_END = 1713
SWING_END_4 = 1616
SWING_END_2 = 1565
SWING_END_3 = 1587
SWING_END_1 = 1550
CFG_8 = 2718
CFG_15 = 2727
CFG_16 = 2746
CFG_EXT_GYRO_BIAS = 1189
END_COMPARE_Y_X_TMP = 1407
DO_NOT_UPDATE_PROP_ROT = 1839
CFG_7 = 1205
FLAT_STATE_END_TEMP = 1683
END_COMPARE_Y_X = 1484
SKIP_SWING_END_1 = 1551
SKIP_SWING_END_3 = 1588
SKIP_SWING_END_2 = 1566
TILTG75_START = 1672
CFG_6 = 2753
TILTL75_END = 1669
END_ORIENT = 1884
CFG_FLICK_IN = 2573
TILTL75_START = 1643
CFG_MOTION_BIAS = 1208
X_GRT_Y = 1408
TEMPLABEL = 2324
CFG_ANDROID_ORIENT_INT = 1853
CFG_GYRO_RAW_DATA = 2722
X_GRT_Y_TMP2 = 1379
D_0_22 = 22 + 512
D_0_24 = 24 + 512
D_0_36 = 36
D_0_52 = 52
D_0_96 = 96
D_0_104 = 104
D_0_108 = 108
D_0_163 = 163
D_0_188 = 188
D_0_192 = 192
D_0_224 = 224
D_0_228 = 228
D_0_232 = 232
D_0_236 = 236
D_1_2 = 256 + 2
D_1_4 = 256 + 4
D_1_8 = 256 + 8
D_1_10 = 256 + 10
D_1_24 = 256 + 24
D_1_28 = 256 + 28
D_1_36 = 256 + 36
D_1_40 = 256 + 40
D_1_44 = 256 + 44
D_1_72 = 256 + 72
D_1_74 = 256 + 74
D_1_79 = 256 + 79
D_1_88 = 256 + 88
D_1_90 = 256 + 90
D_1_92 = 256 + 92
D_1_96 = 256 + 96
D_1_98 = 256 + 98
D_1_106 = 256 + 106
D_1_108 = 256 + 108
D_1_112 = 256 + 112
D_1_128 = 256 + 144
D_1_152 = 256 + 12
D_1_160 = 256 + 160
D_1_176 = 256 + 176
D_1_178 = 256 + 178
D_1_218 = 256 + 218
D_1_232 = 256 + 232
D_1_236 = 256 + 236
D_1_240 = 256 + 240
D_1_244 = 256 + 244
D_1_250 = 256 + 250
D_1_252 = 256 + 252
D_2_12 = 512 + 12
D_2_96 = 512 + 96
D_2_108 = 512 + 108
D_2_208 = 512 + 208
D_2_224 = 512 + 224
D_2_236 = 512 + 236
D_2_244 = 512 + 244
D_2_248 = 512 + 248
D_2_252 = 512 + 252
CPASS_BIAS_X = 35 * 16 + 4
CPASS_BIAS_Y = 35 * 16 + 8
CPASS_BIAS_Z = 35 * 16 + 12
CPASS_MTX_00 = 36 * 16
CPASS_MTX_01 = 36 * 16 + 4
CPASS_MTX_02 = 36 * 16 + 8
CPASS_MTX_10 = 36 * 16 + 12
CPASS_MTX_11 = 37 * 16
CPASS_MTX_12 = 37 * 16 + 4
CPASS_MTX_20 = 37 * 16 + 8
CPASS_MTX_21 = 37 * 16 + 12
CPASS_MTX_22 = 43 * 16 + 12
D_EXT_GYRO_BIAS_X = 61 * 16
D_EXT_GYRO_BIAS_Y = 61 * 16 + 4
D_EXT_GYRO_BIAS_Z = 61 * 16 + 8
D_ACT0 = 40 * 16
D_ACSX = 40 * 16 + 4
D_ACSY = 40 * 16 + 8
D_ACSZ = 40 * 16 + 12
FLICK_MSG = 45 * 16 + 4
FLICK_COUNTER = 45 * 16 + 8
FLICK_LOWER = 45 * 16 + 12
FLICK_UPPER = 46 * 16 + 12
D_AUTH_OUT = 992
D_AUTH_IN = 996
D_AUTH_A = 1000
D_AUTH_B = 1004
D_PEDSTD_BP_B = 768 + 0x1C
D_PEDSTD_HP_A = 768 + 0x78
D_PEDSTD_HP_B = 768 + 0x7C
D_PEDSTD_BP_A4 = 768 + 0x40
D_PEDSTD_BP_A3 = 768 + 0x44
D_PEDSTD_BP_A2 = 768 + 0x48
D_PEDSTD_BP_A1 = 768 + 0x4C
D_PEDSTD_INT_THRSH = 768 + 0x68
D_PEDSTD_CLIP = 768 + 0x6C
D_PEDSTD_SB = 768 + 0x28
D_PEDSTD_SB_TIME = 768 + 0x2C
D_PEDSTD_PEAKTHRSH = 768 + 0x98
D_PEDSTD_TIML = 768 + 0x2A
D_PEDSTD_TIMH = 768 + 0x2E
D_PEDSTD_PEAK = 768 + 0X94
D_PEDSTD_STEPCTR = 768 + 0x60
D_PEDSTD_TIMECTR = 964
D_PEDSTD_DECI = 768 + 0xA0
D_HOST_NO_MOT = 976
D_ACCEL_BIAS = 660
D_ORIENT_GAP = 76
D_TILT0_H = 48
D_TILT0_L = 50
D_TILT1_H = 52
D_TILT1_L = 54
D_TILT2_H = 56
D_TILT2_L = 58
D_TILT3_H = 60
D_TILT3_L = 62
INT_SRC_TAP = 0x01
INT_SRC_ANDROID_ORIENT = 0x08
TAP_X = 0x01
TAP_Y = 0x02
TAP_Z = 0x04
TAP_XYZ = 0x07
TAP_X_UP = 0x01
TAP_X_DOWN = 0x02
TAP_Y_UP = 0x03
TAP_Y_DOWN = 0x04
TAP_Z_UP = 0x05
TAP_Z_DOWN = 0x06
ANDROID_ORIENT_PORTRAIT = 0x00
ANDROID_ORIENT_LANDSCAPE = 0x01
ANDROID_ORIENT_REVERSE_PORTRAIT = 0x02
ANDROID_ORIENT_REVERSE_LANDSCAPE = 0x03
DMP_INT_GESTURE = 0x01
DMP_INT_CONTINUOUS = 0x02
DMP_FEATURE_TAP = 0x001
DMP_FEATURE_ANDROID_ORIENT = 0x002
DMP_FEATURE_LP_QUAT = 0x004
DMP_FEATURE_PEDOMETER = 0x008
DMP_FEATURE_6X_LP_QUAT = 0x010
DMP_FEATURE_GYRO_CAL = 0x020
DMP_FEATURE_SEND_RAW_ACCEL = 0x040
DMP_FEATURE_SEND_RAW_GYRO = 0x080
DMP_FEATURE_SEND_CAL_GYRO = 0x100
DMP_FEATURE_SEND_ANY_GYRO = DMP_FEATURE_SEND_RAW_GYRO | DMP_FEATURE_SEND_CAL_GYRO
QUAT_ERROR_THRESH = 1 << 24
QUAT_MAG_SQ_NORMALIZED = 1 << 28
QUAT_MAG_SQ_MIN = (QUAT_MAG_SQ_NORMALIZED - QUAT_ERROR_THRESH)
QUAT_MAG_SQ_MAX = (QUAT_MAG_SQ_NORMALIZED + QUAT_ERROR_THRESH)
DMP_SAMPLE_RATE = 50 # was 200
GYRO_SF = 46850825 * 200 / DMP_SAMPLE_RATE
NUM_REG = 128
# ifdef AK89xx_SECONDARY
# , .raw_compass = 0x49,
# .s0_addr = 0x25,
# .s0_reg = 0x26,
# .s0_ctrl = 0x27,
# .s1_addr = 0x28,
# .s1_reg = 0x29,
# .s1_ctrl = 0x2A,
# .s4_ctrl = 0x34,
# .s0_do = 0x63,
S1_D0 = 0x64
# .i2c_delay_ctrl = 0x67
# endif
'''
/* ================================================================================================ *
| Default MotionApps v2.0 42-byte FIFO packet structure: |
| |
| [QUAT W][ ][QUAT X][ ][QUAT Y][ ][QUAT Z][ ][GYRO X][ ][GYRO Y][ ] |
| 0 1 2 3 4 5 6 7 8 9 10 11 12 13 14 15 16 17 18 19 20 21 22 23 |
| |
| [GYRO Z][ ][ACC X ][ ][ACC Y ][ ][ACC Z ][ ][ ] |
| 24 25 26 27 28 29 30 31 32 33 34 35 36 37 38 39 40 41 |
* ================================================================================================ */
'''
# this block of memory gets written to the MPU on start-up, and it seems
# to be volatile memory, so it has to be done each time (it only takes ~1
# second though)
# Code from MOTION DRIVER v6.12
DMP_CODE = [
# bank # 0
0x00, 0x00, 0x70, 0x00, 0x00, 0x00, 0x00, 0x24, 0x00, 0x00, 0x00, 0x02, 0x00, 0x03, 0x00, 0x00,
0x00, 0x65, 0x00, 0x54, 0xff, 0xef, 0x00, 0x00, 0xfa, 0x80, 0x00, 0x0b, 0x12, 0x82, 0x00, 0x01,
0x03, 0x0c, 0x30, 0xc3, 0x0e, 0x8c, 0x8c, 0xe9, 0x14, 0xd5, 0x40, 0x02, 0x13, 0x71, 0x0f, 0x8e,
0x38, 0x83, 0xf8, 0x83, 0x30, 0x00, 0xf8, 0x83, 0x25, 0x8e, 0xf8, 0x83, 0x30, 0x00, 0xf8, 0x83,
0xff, 0xff, 0xff, 0xff, 0x0f, 0xfe, 0xa9, 0xd6, 0x24, 0x00, 0x04, 0x00, 0x1a, 0x82, 0x79, 0xa1,
0x00, 0x00, 0x00, 0x3c, 0xff, 0xff, 0x00, 0x00, 0x00, 0x10, 0x00, 0x00, 0x38, 0x83, 0x6f, 0xa2,
0x00, 0x3e, 0x03, 0x30, 0x40, 0x00, 0x00, 0x00, 0x02, 0xca, 0xe3, 0x09, 0x3e, 0x80, 0x00, 0x00,
0x20, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x60, 0x00, 0x00, 0x00,
0x00, 0x0c, 0x00, 0x00, 0x00, 0x0c, 0x18, 0x6e, 0x00, 0x00, 0x06, 0x92, 0x0a, 0x16, 0xc0, 0xdf,
0xff, 0xff, 0x02, 0x56, 0xfd, 0x8c, 0xd3, 0x77, 0xff, 0xe1, 0xc4, 0x96, 0xe0, 0xc5, 0xbe, 0xaa,
0x00, 0x00, 0x00, 0x00, 0xff, 0xff, 0x0b, 0x2b, 0x00, 0x00, 0x16, 0x57, 0x00, 0x00, 0x03, 0x59,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x1d, 0xfa, 0x00, 0x02, 0x6c, 0x1d, 0x00, 0x00, 0x00, 0x00,
0x3f, 0xff, 0xdf, 0xeb, 0x00, 0x3e, 0xb3, 0xb6, 0x00, 0x0d, 0x22, 0x78, 0x00, 0x00, 0x2f, 0x3c,
0x00, 0x00, 0x00, 0x00, 0x00, 0x19, 0x42, 0xb5, 0x00, 0x00, 0x39, 0xa2, 0x00, 0x00, 0xb3, 0x65,
0xd9, 0x0e, 0x9f, 0xc9, 0x1d, 0xcf, 0x4c, 0x34, 0x30, 0x00, 0x00, 0x00, 0x50, 0x00, 0x00, 0x00,
0x3b, 0xb6, 0x7a, 0xe8, 0x00, 0x64, 0x00, 0x00, 0x00, 0xc8, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
# bank # 1
0x10, 0x00, 0x00, 0x00, 0x10, 0x00, 0xfa, 0x92, 0x10, 0x00, 0x22, 0x5e, 0x00, 0x0d, 0x22, 0x9f,
0x00, 0x01, 0x00, 0x00, 0x00, 0x32, 0x00, 0x00, 0xff, 0x46, 0x00, 0x00, 0x63, 0xd4, 0x00, 0x00,
0x10, 0x00, 0x00, 0x00, 0x04, 0xd6, 0x00, 0x00, 0x04, 0xcc, 0x00, 0x00, 0x04, 0xcc, 0x00, 0x00,
0x00, 0x00, 0x10, 0x72, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x06, 0x00, 0x02, 0x00, 0x05, 0x00, 0x07, 0x00, 0x00, 0x00, 0x00, 0x00, 0x64, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x05, 0x00, 0x05, 0x00, 0x64, 0x00, 0x20, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x40, 0x00, 0x00, 0x00, 0x03, 0x00,
0x00, 0x00, 0x00, 0x32, 0xf8, 0x98, 0x00, 0x00, 0xff, 0x65, 0x00, 0x00, 0x83, 0x0f, 0x00, 0x00,
0xff, 0x9b, 0xfc, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0xb2, 0x6a, 0x00, 0x02, 0x00, 0x00,
0x00, 0x01, 0xfb, 0x83, 0x00, 0x68, 0x00, 0x00, 0x00, 0xd9, 0xfc, 0x00, 0x7c, 0xf1, 0xff, 0x83,
0x00, 0x00, 0x00, 0x00, 0x00, 0x65, 0x00, 0x00, 0x00, 0x64, 0x03, 0xe8, 0x00, 0x64, 0x00, 0x28,
0x00, 0x00, 0x00, 0x25, 0x00, 0x00, 0x00, 0x00, 0x16, 0xa0, 0x00, 0x00, 0x00, 0x00, 0x10, 0x00,
0x00, 0x00, 0x10, 0x00, 0x00, 0x2f, 0x00, 0x00, 0x00, 0x00, 0x01, 0xf4, 0x00, 0x00, 0x10, 0x00,
# bank # 2
0x00, 0x28, 0x00, 0x00, 0xff, 0xff, 0x45, 0x81, 0xff, 0xff, 0xfa, 0x72, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x44, 0x00, 0x05, 0x00, 0x05, 0xba, 0xc6, 0x00, 0x47, 0x78, 0xa2,
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x00, 0x00, 0x00, 0x14,
0x00, 0x00, 0x25, 0x4d, 0x00, 0x2f, 0x70, 0x6d, 0x00, 0x00, 0x05, 0xae, 0x00, 0x0c, 0x02, 0xd0,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x64, 0x00, 0x00, 0x00, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x1b, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x0e, 0x00, 0x0e,
0x00, 0x00, 0x0a, 0xc7, 0x00, 0x04, 0x00, 0x00, 0x00, 0x00, 0x00, 0x32, 0xff, 0xff, 0xff, 0x9c,
0x00, 0x00, 0x0b, 0x2b, 0x00, 0x00, 0x00, 0x02, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x64,
0xff, 0xe5, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
# bank # 3
0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x01, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x01, 0x80, 0x00, 0x00, 0x01, 0x80, 0x00, 0x00, 0x01, 0x80, 0x00, 0x00, 0x24, 0x26, 0xd3,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x06, 0x00, 0x10, 0x00, 0x96, 0x00, 0x3c,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x0c, 0x0a, 0x4e, 0x68, 0xcd, 0xcf, 0x77, 0x09, 0x50, 0x16, 0x67, 0x59, 0xc6, 0x19, 0xce, 0x82,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x17, 0xd7, 0x84, 0x00, 0x03, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0xc7, 0x93, 0x8f, 0x9d, 0x1e, 0x1b, 0x1c, 0x19,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x02, 0x03, 0x18, 0x85, 0x00, 0x00, 0x40, 0x00,
0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x03, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x40, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x00, 0x00, 0x00, 0x67, 0x7d, 0xdf, 0x7e, 0x72, 0x90, 0x2e, 0x55, 0x4c, 0xf6, 0xe6, 0x88,
0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
# bank # 4
0xd8, 0xdc, 0xb4, 0xb8, 0xb0, 0xd8, 0xb9, 0xab, 0xf3, 0xf8, 0xfa, 0xb3, 0xb7, 0xbb, 0x8e, 0x9e,
0xae, 0xf1, 0x32, 0xf5, 0x1b, 0xf1, 0xb4, 0xb8, 0xb0, 0x80, 0x97, 0xf1, 0xa9, 0xdf, 0xdf, 0xdf,
0xaa, 0xdf, 0xdf, 0xdf, 0xf2, 0xaa, 0xc5, 0xcd, 0xc7, 0xa9, 0x0c, 0xc9, 0x2c, 0x97, 0xf1, 0xa9,
0x89, 0x26, 0x46, 0x66, 0xb2, 0x89, 0x99, 0xa9, 0x2d, 0x55, 0x7d, 0xb0, 0xb0, 0x8a, 0xa8, 0x96,
0x36, 0x56, 0x76, 0xf1, 0xba, 0xa3, 0xb4, 0xb2, 0x80, 0xc0, 0xb8, 0xa8, 0x97, 0x11, 0xb2, 0x83,
0x98, 0xba, 0xa3, 0xf0, 0x24, 0x08, 0x44, 0x10, 0x64, 0x18, 0xb2, 0xb9, 0xb4, 0x98, 0x83, 0xf1,
0xa3, 0x29, 0x55, 0x7d, 0xba, 0xb5, 0xb1, 0xa3, 0x83, 0x93, 0xf0, 0x00, 0x28, 0x50, 0xf5, 0xb2,
0xb6, 0xaa, 0x83, 0x93, 0x28, 0x54, 0x7c, 0xf1, 0xb9, 0xa3, 0x82, 0x93, 0x61, 0xba, 0xa2, 0xda,
0xde, 0xdf, 0xdb, 0x81, 0x9a, 0xb9, 0xae, 0xf5, 0x60, 0x68, 0x70, 0xf1, 0xda, 0xba, 0xa2, 0xdf,
0xd9, 0xba, 0xa2, 0xfa, 0xb9, 0xa3, 0x82, 0x92, 0xdb, 0x31, 0xba, 0xa2, 0xd9, 0xba, 0xa2, 0xf8,
0xdf, 0x85, 0xa4, 0xd0, 0xc1, 0xbb, 0xad, 0x83, 0xc2, 0xc5, 0xc7, 0xb8, 0xa2, 0xdf, 0xdf, 0xdf,
0xba, 0xa0, 0xdf, 0xdf, 0xdf, 0xd8, 0xd8, 0xf1, 0xb8, 0xaa, 0xb3, 0x8d, 0xb4, 0x98, 0x0d, 0x35,
0x5d, 0xb2, 0xb6, 0xba, 0xaf, 0x8c, 0x96, 0x19, 0x8f, 0x9f, 0xa7, 0x0e, 0x16, 0x1e, 0xb4, 0x9a,
0xb8, 0xaa, 0x87, 0x2c, 0x54, 0x7c, 0xba, 0xa4, 0xb0, 0x8a, 0xb6, 0x91, 0x32, 0x56, 0x76, 0xb2,
0x84, 0x94, 0xa4, 0xc8, 0x08, 0xcd, 0xd8, 0xb8, 0xb4, 0xb0, 0xf1, 0x99, 0x82, 0xa8, 0x2d, 0x55,
0x7d, 0x98, 0xa8, 0x0e, 0x16, 0x1e, 0xa2, 0x2c, 0x54, 0x7c, 0x92, 0xa4, 0xf0, 0x2c, 0x50, 0x78,
# bank # 5
0xf1, 0x84, 0xa8, 0x98, 0xc4, 0xcd, 0xfc, 0xd8, 0x0d, 0xdb, 0xa8, 0xfc, 0x2d, 0xf3, 0xd9, 0xba,
0xa6, 0xf8, 0xda, 0xba, 0xa6, 0xde, 0xd8, 0xba, 0xb2, 0xb6, 0x86, 0x96, 0xa6, 0xd0, 0xf3, 0xc8,
0x41, 0xda, 0xa6, 0xc8, 0xf8, 0xd8, 0xb0, 0xb4, 0xb8, 0x82, 0xa8, 0x92, 0xf5, 0x2c, 0x54, 0x88,
0x98, 0xf1, 0x35, 0xd9, 0xf4, 0x18, 0xd8, 0xf1, 0xa2, 0xd0, 0xf8, 0xf9, 0xa8, 0x84, 0xd9, 0xc7,
0xdf, 0xf8, 0xf8, 0x83, 0xc5, 0xda, 0xdf, 0x69, 0xdf, 0x83, 0xc1, 0xd8, 0xf4, 0x01, 0x14, 0xf1,
0xa8, 0x82, 0x4e, 0xa8, 0x84, 0xf3, 0x11, 0xd1, 0x82, 0xf5, 0xd9, 0x92, 0x28, 0x97, 0x88, 0xf1,
0x09, 0xf4, 0x1c, 0x1c, 0xd8, 0x84, 0xa8, 0xf3, 0xc0, 0xf9, 0xd1, 0xd9, 0x97, 0x82, 0xf1, 0x29,
0xf4, 0x0d, 0xd8, 0xf3, 0xf9, 0xf9, 0xd1, 0xd9, 0x82, 0xf4, 0xc2, 0x03, 0xd8, 0xde, 0xdf, 0x1a,
0xd8, 0xf1, 0xa2, 0xfa, 0xf9, 0xa8, 0x84, 0x98, 0xd9, 0xc7, 0xdf, 0xf8, 0xf8, 0xf8, 0x83, 0xc7,
0xda, 0xdf, 0x69, 0xdf, 0xf8, 0x83, 0xc3, 0xd8, 0xf4, 0x01, 0x14, 0xf1, 0x98, 0xa8, 0x82, 0x2e,
0xa8, 0x84, 0xf3, 0x11, 0xd1, 0x82, 0xf5, 0xd9, 0x92, 0x50, 0x97, 0x88, 0xf1, 0x09, 0xf4, 0x1c,
0xd8, 0x84, 0xa8, 0xf3, 0xc0, 0xf8, 0xf9, 0xd1, 0xd9, 0x97, 0x82, 0xf1, 0x49, 0xf4, 0x0d, 0xd8,
0xf3, 0xf9, 0xf9, 0xd1, 0xd9, 0x82, 0xf4, 0xc4, 0x03, 0xd8, 0xde, 0xdf, 0xd8, 0xf1, 0xad, 0x88,
0x98, 0xcc, 0xa8, 0x09, 0xf9, 0xd9, 0x82, 0x92, 0xa8, 0xf5, 0x7c, 0xf1, 0x88, 0x3a, 0xcf, 0x94,
0x4a, 0x6e, 0x98, 0xdb, 0x69, 0x31, 0xda, 0xad, 0xf2, 0xde, 0xf9, 0xd8, 0x87, 0x95, 0xa8, 0xf2,
0x21, 0xd1, 0xda, 0xa5, 0xf9, 0xf4, 0x17, 0xd9, 0xf1, 0xae, 0x8e, 0xd0, 0xc0, 0xc3, 0xae, 0x82,
# bank # 6
0xc6, 0x84, 0xc3, 0xa8, 0x85, 0x95, 0xc8, 0xa5, 0x88, 0xf2, 0xc0, 0xf1, 0xf4, 0x01, 0x0e, 0xf1,
0x8e, 0x9e, 0xa8, 0xc6, 0x3e, 0x56, 0xf5, 0x54, 0xf1, 0x88, 0x72, 0xf4, 0x01, 0x15, 0xf1, 0x98,
0x45, 0x85, 0x6e, 0xf5, 0x8e, 0x9e, 0x04, 0x88, 0xf1, 0x42, 0x98, 0x5a, 0x8e, 0x9e, 0x06, 0x88,
0x69, 0xf4, 0x01, 0x1c, 0xf1, 0x98, 0x1e, 0x11, 0x08, 0xd0, 0xf5, 0x04, 0xf1, 0x1e, 0x97, 0x02,
0x02, 0x98, 0x36, 0x25, 0xdb, 0xf9, 0xd9, 0x85, 0xa5, 0xf3, 0xc1, 0xda, 0x85, 0xa5, 0xf3, 0xdf,
0xd8, 0x85, 0x95, 0xa8, 0xf3, 0x09, 0xda, 0xa5, 0xfa, 0xd8, 0x82, 0x92, 0xa8, 0xf5, 0x78, 0xf1,
0x88, 0x1a, 0x84, 0x9f, 0x26, 0x88, 0x98, 0x21, 0xda, 0xf4, 0x1d, 0xf3, 0xd8, 0x87, 0x9f, 0x39,
0xd1, 0xaf, 0xd9, 0xdf, 0xdf, 0xfb, 0xf9, 0xf4, 0x0c, 0xf3, 0xd8, 0xfa, 0xd0, 0xf8, 0xda, 0xf9,
0xf9, 0xd0, 0xdf, 0xd9, 0xf9, 0xd8, 0xf4, 0x0b, 0xd8, 0xf3, 0x87, 0x9f, 0x39, 0xd1, 0xaf, 0xd9,
0xdf, 0xdf, 0xf4, 0x1d, 0xf3, 0xd8, 0xfa, 0xfc, 0xa8, 0x69, 0xf9, 0xf9, 0xaf, 0xd0, 0xda, 0xde,
0xfa, 0xd9, 0xf8, 0x8f, 0x9f, 0xa8, 0xf1, 0xcc, 0xf3, 0x98, 0xdb, 0x45, 0xd9, 0xaf, 0xdf, 0xd0,
0xf8, 0xd8, 0xf1, 0x8f, 0x9f, 0xa8, 0xca, 0xf3, 0x88, 0x09, 0xda, 0xaf, 0x8f, 0xcb, 0xf8, 0xd8,
0xf2, 0xad, 0x97, 0x8d, 0x0c, 0xd9, 0xa5, 0xdf, 0xf9, 0xba, 0xa6, 0xf3, 0xfa, 0xf4, 0x12, 0xf2,
0xd8, 0x95, 0x0d, 0xd1, 0xd9, 0xba, 0xa6, 0xf3, 0xfa, 0xda, 0xa5, 0xf2, 0xc1, 0xba, 0xa6, 0xf3,
0xdf, 0xd8, 0xf1, 0xba, 0xb2, 0xb6, 0x86, 0x96, 0xa6, 0xd0, 0xca, 0xf3, 0x49, 0xda, 0xa6, 0xcb,
0xf8, 0xd8, 0xb0, 0xb4, 0xb8, 0xd8, 0xad, 0x84, 0xf2, 0xc0, 0xdf, 0xf1, 0x8f, 0xcb, 0xc3, 0xa8,
# bank # 7
0xb2, 0xb6, 0x86, 0x96, 0xc8, 0xc1, 0xcb, 0xc3, 0xf3, 0xb0, 0xb4, 0x88, 0x98, 0xa8, 0x21, 0xdb,
0x71, 0x8d, 0x9d, 0x71, 0x85, 0x95, 0x21, 0xd9, 0xad, 0xf2, 0xfa, 0xd8, 0x85, 0x97, 0xa8, 0x28,
0xd9, 0xf4, 0x08, 0xd8, 0xf2, 0x8d, 0x29, 0xda, 0xf4, 0x05, 0xd9, 0xf2, 0x85, 0xa4, 0xc2, 0xf2,
0xd8, 0xa8, 0x8d, 0x94, 0x01, 0xd1, 0xd9, 0xf4, 0x11, 0xf2, 0xd8, 0x87, 0x21, 0xd8, 0xf4, 0x0a,
0xd8, 0xf2, 0x84, 0x98, 0xa8, 0xc8, 0x01, 0xd1, 0xd9, 0xf4, 0x11, 0xd8, 0xf3, 0xa4, 0xc8, 0xbb,
0xaf, 0xd0, 0xf2, 0xde, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xd8, 0xf1, 0xb8, 0xf6,
0xb5, 0xb9, 0xb0, 0x8a, 0x95, 0xa3, 0xde, 0x3c, 0xa3, 0xd9, 0xf8, 0xd8, 0x5c, 0xa3, 0xd9, 0xf8,
0xd8, 0x7c, 0xa3, 0xd9, 0xf8, 0xd8, 0xf8, 0xf9, 0xd1, 0xa5, 0xd9, 0xdf, 0xda, 0xfa, 0xd8, 0xb1,
0x85, 0x30, 0xf7, 0xd9, 0xde, 0xd8, 0xf8, 0x30, 0xad, 0xda, 0xde, 0xd8, 0xf2, 0xb4, 0x8c, 0x99,
0xa3, 0x2d, 0x55, 0x7d, 0xa0, 0x83, 0xdf, 0xdf, 0xdf, 0xb5, 0x91, 0xa0, 0xf6, 0x29, 0xd9, 0xfb,
0xd8, 0xa0, 0xfc, 0x29, 0xd9, 0xfa, 0xd8, 0xa0, 0xd0, 0x51, 0xd9, 0xf8, 0xd8, 0xfc, 0x51, 0xd9,
0xf9, 0xd8, 0x79, 0xd9, 0xfb, 0xd8, 0xa0, 0xd0, 0xfc, 0x79, 0xd9, 0xfa, 0xd8, 0xa1, 0xf9, 0xf9,
0xf9, 0xf9, 0xf9, 0xa0, 0xda, 0xdf, 0xdf, 0xdf, 0xd8, 0xa1, 0xf8, 0xf8, 0xf8, 0xf8, 0xf8, 0xac,
0xde, 0xf8, 0xad, 0xde, 0x83, 0x93, 0xac, 0x2c, 0x54, 0x7c, 0xf1, 0xa8, 0xdf, 0xdf, 0xdf, 0xf6,
0x9d, 0x2c, 0xda, 0xa0, 0xdf, 0xd9, 0xfa, 0xdb, 0x2d, 0xf8, 0xd8, 0xa8, 0x50, 0xda, 0xa0, 0xd0,
0xde, 0xd9, 0xd0, 0xf8, 0xf8, 0xf8, 0xdb, 0x55, 0xf8, 0xd8, 0xa8, 0x78, 0xda, 0xa0, 0xd0, 0xdf,
# bank # 8
0xd9, 0xd0, 0xfa, 0xf8, 0xf8, 0xf8, 0xf8, 0xdb, 0x7d, 0xf8, 0xd8, 0x9c, 0xa8, 0x8c, 0xf5, 0x30,
0xdb, 0x38, 0xd9, 0xd0, 0xde, 0xdf, 0xa0, 0xd0, 0xde, 0xdf, 0xd8, 0xa8, 0x48, 0xdb, 0x58, 0xd9,
0xdf, 0xd0, 0xde, 0xa0, 0xdf, 0xd0, 0xde, 0xd8, 0xa8, 0x68, 0xdb, 0x70, 0xd9, 0xdf, 0xdf, 0xa0,
0xdf, 0xdf, 0xd8, 0xf1, 0xa8, 0x88, 0x90, 0x2c, 0x54, 0x7c, 0x98, 0xa8, 0xd0, 0x5c, 0x38, 0xd1,
0xda, 0xf2, 0xae, 0x8c, 0xdf, 0xf9, 0xd8, 0xb0, 0x87, 0xa8, 0xc1, 0xc1, 0xb1, 0x88, 0xa8, 0xc6,
0xf9, 0xf9, 0xda, 0x36, 0xd8, 0xa8, 0xf9, 0xda, 0x36, 0xd8, 0xa8, 0xf9, 0xda, 0x36, 0xd8, 0xa8,
0xf9, 0xda, 0x36, 0xd8, 0xa8, 0xf9, 0xda, 0x36, 0xd8, 0xf7, 0x8d, 0x9d, 0xad, 0xf8, 0x18, 0xda,
0xf2, 0xae, 0xdf, 0xd8, 0xf7, 0xad, 0xfa, 0x30, 0xd9, 0xa4, 0xde, 0xf9, 0xd8, 0xf2, 0xae, 0xde,
0xfa, 0xf9, 0x83, 0xa7, 0xd9, 0xc3, 0xc5, 0xc7, 0xf1, 0x88, 0x9b, 0xa7, 0x7a, 0xad, 0xf7, 0xde,
0xdf, 0xa4, 0xf8, 0x84, 0x94, 0x08, 0xa7, 0x97, 0xf3, 0x00, 0xae, 0xf2, 0x98, 0x19, 0xa4, 0x88,
0xc6, 0xa3, 0x94, 0x88, 0xf6, 0x32, 0xdf, 0xf2, 0x83, 0x93, 0xdb, 0x09, 0xd9, 0xf2, 0xaa, 0xdf,
0xd8, 0xd8, 0xae, 0xf8, 0xf9, 0xd1, 0xda, 0xf3, 0xa4, 0xde, 0xa7, 0xf1, 0x88, 0x9b, 0x7a, 0xd8,
0xf3, 0x84, 0x94, 0xae, 0x19, 0xf9, 0xda, 0xaa, 0xf1, 0xdf, 0xd8, 0xa8, 0x81, 0xc0, 0xc3, 0xc5,
0xc7, 0xa3, 0x92, 0x83, 0xf6, 0x28, 0xad, 0xde, 0xd9, 0xf8, 0xd8, 0xa3, 0x50, 0xad, 0xd9, 0xf8,
0xd8, 0xa3, 0x78, 0xad, 0xd9, 0xf8, 0xd8, 0xf8, 0xf9, 0xd1, 0xa1, 0xda, 0xde, 0xc3, 0xc5, 0xc7,
0xd8, 0xa1, 0x81, 0x94, 0xf8, 0x18, 0xf2, 0xb0, 0x89, 0xac, 0xc3, 0xc5, 0xc7, 0xf1, 0xd8, 0xb8,
# bank # 9
0xb4, 0xb0, 0x97, 0x86, 0xa8, 0x31, 0x9b, 0x06, 0x99, 0x07, 0xab, 0x97, 0x28, 0x88, 0x9b, 0xf0,
0x0c, 0x20, 0x14, 0x40, 0xb0, 0xb4, 0xb8, 0xf0, 0xa8, 0x8a, 0x9a, 0x28, 0x50, 0x78, 0xb7, 0x9b,
0xa8, 0x29, 0x51, 0x79, 0x24, 0x70, 0x59, 0x44, 0x69, 0x38, 0x64, 0x48, 0x31, 0xf1, 0xbb, 0xab,
0x88, 0x00, 0x2c, 0x54, 0x7c, 0xf0, 0xb3, 0x8b, 0xb8, 0xa8, 0x04, 0x28, 0x50, 0x78, 0xf1, 0xb0,
0x88, 0xb4, 0x97, 0x26, 0xa8, 0x59, 0x98, 0xbb, 0xab, 0xb3, 0x8b, 0x02, 0x26, 0x46, 0x66, 0xb0,
0xb8, 0xf0, 0x8a, 0x9c, 0xa8, 0x29, 0x51, 0x79, 0x8b, 0x29, 0x51, 0x79, 0x8a, 0x24, 0x70, 0x59,
0x8b, 0x20, 0x58, 0x71, 0x8a, 0x44, 0x69, 0x38, 0x8b, 0x39, 0x40, 0x68, 0x8a, 0x64, 0x48, 0x31,
0x8b, 0x30, 0x49, 0x60, 0x88, 0xf1, 0xac, 0x00, 0x2c, 0x54, 0x7c, 0xf0, 0x8c, 0xa8, 0x04, 0x28,
0x50, 0x78, 0xf1, 0x88, 0x97, 0x26, 0xa8, 0x59, 0x98, 0xac, 0x8c, 0x02, 0x26, 0x46, 0x66, 0xf0,
0x89, 0x9c, 0xa8, 0x29, 0x51, 0x79, 0x24, 0x70, 0x59, 0x44, 0x69, 0x38, 0x64, 0x48, 0x31, 0xa9,
0x88, 0x09, 0x20, 0x59, 0x70, 0xab, 0x11, 0x38, 0x40, 0x69, 0xa8, 0x19, 0x31, 0x48, 0x60, 0x8c,
0xa8, 0x3c, 0x41, 0x5c, 0x20, 0x7c, 0x00, 0xf1, 0x87, 0x98, 0x19, 0x86, 0xa8, 0x6e, 0x76, 0x7e,
0xa9, 0x99, 0x88, 0x2d, 0x55, 0x7d, 0xd8, 0xb1, 0xb5, 0xb9, 0xa3, 0xdf, 0xdf, 0xdf, 0xae, 0xd0,
0xdf, 0xaa, 0xd0, 0xde, 0xf2, 0xab, 0xf8, 0xf9, 0xd9, 0xb0, 0x87, 0xc4, 0xaa, 0xf1, 0xdf, 0xdf,
0xbb, 0xaf, 0xdf, 0xdf, 0xb9, 0xd8, 0xb1, 0xf1, 0xa3, 0x97, 0x8e, 0x60, 0xdf, 0xb0, 0x84, 0xf2,
0xc8, 0xf8, 0xf9, 0xd9, 0xde, 0xd8, 0x93, 0x85, 0xf1, 0x4a, 0xb1, 0x83, 0xa3, 0x08, 0xb5, 0x83,
# bank # 10
0x9a, 0x08, 0x10, 0xb7, 0x9f, 0x10, 0xd8, 0xf1, 0xb0, 0xba, 0xae, 0xb0, 0x8a, 0xc2, 0xb2, 0xb6,
0x8e, 0x9e, 0xf1, 0xfb, 0xd9, 0xf4, 0x1d, 0xd8, 0xf9, 0xd9, 0x0c, 0xf1, 0xd8, 0xf8, 0xf8, 0xad,
0x61, 0xd9, 0xae, 0xfb, 0xd8, 0xf4, 0x0c, 0xf1, 0xd8, 0xf8, 0xf8, 0xad, 0x19, 0xd9, 0xae, 0xfb,
0xdf, 0xd8, 0xf4, 0x16, 0xf1, 0xd8, 0xf8, 0xad, 0x8d, 0x61, 0xd9, 0xf4, 0xf4, 0xac, 0xf5, 0x9c,
0x9c, 0x8d, 0xdf, 0x2b, 0xba, 0xb6, 0xae, 0xfa, 0xf8, 0xf4, 0x0b, 0xd8, 0xf1, 0xae, 0xd0, 0xf8,
0xad, 0x51, 0xda, 0xae, 0xfa, 0xf8, 0xf1, 0xd8, 0xb9, 0xb1, 0xb6, 0xa3, 0x83, 0x9c, 0x08, 0xb9,
0xb1, 0x83, 0x9a, 0xb5, 0xaa, 0xc0, 0xfd, 0x30, 0x83, 0xb7, 0x9f, 0x10, 0xb5, 0x8b, 0x93, 0xf2,
0x02, 0x02, 0xd1, 0xab, 0xda, 0xde, 0xd8, 0xf1, 0xb0, 0x80, 0xba, 0xab, 0xc0, 0xc3, 0xb2, 0x84,
0xc1, 0xc3, 0xd8, 0xb1, 0xb9, 0xf3, 0x8b, 0xa3, 0x91, 0xb6, 0x09, 0xb4, 0xd9, 0xab, 0xde, 0xb0,
0x87, 0x9c, 0xb9, 0xa3, 0xdd, 0xf1, 0xb3, 0x8b, 0x8b, 0x8b, 0x8b, 0x8b, 0xb0, 0x87, 0xa3, 0xa3,
0xa3, 0xa3, 0xb2, 0x8b, 0xb6, 0x9b, 0xf2, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
0xa3, 0xf1, 0xb0, 0x87, 0xb5, 0x9a, 0xa3, 0xf3, 0x9b, 0xa3, 0xa3, 0xdc, 0xba, 0xac, 0xdf, 0xb9,
0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3, 0xa3,
0xd8, 0xd8, 0xd8, 0xbb, 0xb3, 0xb7, 0xf1, 0xaa, 0xf9, 0xda, 0xff, 0xd9, 0x80, 0x9a, 0xaa, 0x28,
0xb4, 0x80, 0x98, 0xa7, 0x20, 0xb7, 0x97, 0x87, 0xa8, 0x66, 0x88, 0xf0, 0x79, 0x51, 0xf1, 0x90,
0x2c, 0x87, 0x0c, 0xa7, 0x81, 0x97, 0x62, 0x93, 0xf0, 0x71, 0x71, 0x60, 0x85, 0x94, 0x01, 0x29,
# bank # 11
0x51, 0x79, 0x90, 0xa5, 0xf1, 0x28, 0x4c, 0x6c, 0x87, 0x0c, 0x95, 0x18, 0x85, 0x78, 0xa3, 0x83,
0x90, 0x28, 0x4c, 0x6c, 0x88, 0x6c, 0xd8, 0xf3, 0xa2, 0x82, 0x00, 0xf2, 0x10, 0xa8, 0x92, 0x19,
0x80, 0xa2, 0xf2, 0xd9, 0x26, 0xd8, 0xf1, 0x88, 0xa8, 0x4d, 0xd9, 0x48, 0xd8, 0x96, 0xa8, 0x39,
0x80, 0xd9, 0x3c, 0xd8, 0x95, 0x80, 0xa8, 0x39, 0xa6, 0x86, 0x98, 0xd9, 0x2c, 0xda, 0x87, 0xa7,
0x2c, 0xd8, 0xa8, 0x89, 0x95, 0x19, 0xa9, 0x80, 0xd9, 0x38, 0xd8, 0xa8, 0x89, 0x39, 0xa9, 0x80,
0xda, 0x3c, 0xd8, 0xa8, 0x2e, 0xa8, 0x39, 0x90, 0xd9, 0x0c, 0xd8, 0xa8, 0x95, 0x31, 0x98, 0xd9,
0x0c, 0xd8, 0xa8, 0x09, 0xd9, 0xff, 0xd8, 0x01, 0xda, 0xff, 0xd8, 0x95, 0x39, 0xa9, 0xda, 0x26,
0xff, 0xd8, 0x90, 0xa8, 0x0d, 0x89, 0x99, 0xa8, 0x10, 0x80, 0x98, 0x21, 0xda, 0x2e, 0xd8, 0x89,
0x99, 0xa8, 0x31, 0x80, 0xda, 0x2e, 0xd8, 0xa8, 0x86, 0x96, 0x31, 0x80, 0xda, 0x2e, 0xd8, 0xa8,
0x87, 0x31, 0x80, 0xda, 0x2e, 0xd8, 0xa8, 0x82, 0x92, 0xf3, 0x41, 0x80, 0xf1, 0xd9, 0x2e, 0xd8,
0xa8, 0x82, 0xf3, 0x19, 0x80, 0xf1, 0xd9, 0x2e, 0xd8, 0x82, 0xac, 0xf3, 0xc0, 0xa2, 0x80, 0x22,
0xf1, 0xa6, 0x2e, 0xa7, 0x2e, 0xa9, 0x22, 0x98, 0xa8, 0x29, 0xda, 0xac, 0xde, 0xff, 0xd8, 0xa2,
0xf2, 0x2a, 0xf1, 0xa9, 0x2e, 0x82, 0x92, 0xa8, 0xf2, 0x31, 0x80, 0xa6, 0x96, 0xf1, 0xd9, 0x00,
0xac, 0x8c, 0x9c, 0x0c, 0x30, 0xac, 0xde, 0xd0, 0xde, 0xff, 0xd8, 0x8c, 0x9c, 0xac, 0xd0, 0x10,
0xac, 0xde, 0x80, 0x92, 0xa2, 0xf2, 0x4c, 0x82, 0xa8, 0xf1, 0xca, 0xf2, 0x35, 0xf1, 0x96, 0x88,
0xa6, 0xd9, 0x00, 0xd8, 0xf1, 0xff
]
# thanks to Noah Zerkin for piecing this stuff together!
DMP_CONFIG = [
# BANK OFFSET LENGTH [DATA]
0x03, 0x7B, 0x03, 0x4C, 0xCD, 0x6C, # FCFG_1 inv_set_gyro_calibration
0x03, 0xAB, 0x03, 0x36, 0x56, 0x76, # FCFG_3 inv_set_gyro_calibration
0x00, 0x68, 0x04, 0x02, 0xCB, 0x47, 0xA2, # D_0_104 inv_set_gyro_calibration
0x02, 0x18, 0x04, 0x00, 0x05, 0x8B, 0xC1, # D_0_24 inv_set_gyro_calibration
0x01, 0x0C, 0x04, 0x00, 0x00, 0x00, 0x00, # D_1_152 inv_set_accel_calibration
0x03, 0x7F, 0x06, 0x0C, 0xC9, 0x2C, 0x97, 0x97, 0x97, # FCFG_2 inv_set_accel_calibration
0x03, 0x89, 0x03, 0x26, 0x46, 0x66, # FCFG_7 inv_set_accel_calibration
0x00, 0x6C, 0x02, 0x20, 0x00, # D_0_108 inv_set_accel_calibration
0x02, 0x40, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_00 inv_set_compass_calibration
0x02, 0x44, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_01
0x02, 0x48, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_02
0x02, 0x4C, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_10
0x02, 0x50, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_11
0x02, 0x54, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_12
0x02, 0x58, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_20
0x02, 0x5C, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_21
0x02, 0xBC, 0x04, 0x00, 0x00, 0x00, 0x00, # CPASS_MTX_22
0x01, 0xEC, 0x04, 0x00, 0x00, 0x40, 0x00, # D_1_236 inv_apply_endian_accel
0x03, 0x7F, 0x06, 0x0C, 0xC9, 0x2C, 0x97, 0x97, 0x97, # FCFG_2 inv_set_mpu_sensors
0x04, 0x02, 0x03, 0x0D, 0x35, 0x5D, # CFG_MOTION_BIAS inv_turn_on_bias_from_no_motion
0x04, 0x09, 0x04, 0x87, 0x2D, 0x35, 0x3D, # FCFG_5 inv_set_bias_update
0x00, 0xA3, 0x01, 0x00, # D_0_163 inv_set_dead_zone
# SPECIAL 0x01 = enable interrupts
0x00, 0x00, 0x00, 0x01, # SET INT_ENABLE at i=22, SPECIAL INSTRUCTION
0x07, 0x86, 0x01, 0xFE, # CFG_6 inv_set_fifo_interupt
0x07, 0x41, 0x05, 0xF1, 0x20, 0x28, 0x30, 0x38, # CFG_8 inv_send_quaternion
0x07, 0x7E, 0x01, 0x30, # CFG_16 inv_set_footer
0x07, 0x46, 0x01, 0x9A, # CFG_GYRO_SOURCE inv_send_gyro
0x07, 0x47, 0x04, 0xF1, 0x28, 0x30, 0x38, # CFG_9 inv_send_gyro -> inv_construct3_fifo
0x07, 0x6C, 0x04, 0xF1, 0x28, 0x30, 0x38, # CFG_12 inv_send_accel -> inv_construct3_fifo
0x02, 0x16, 0x02, 0x00, 0x01 # D_0_22 inv_set_fifo_rate
# This very last 0x01 WAS a 0x09, which drops the FIFO rate down to 20 Hz. 0x07 is 25 Hz,
# 0x01 is 100Hz. Going faster than 100Hz (0x00=200Hz) tends to result in very noisy data.
# DMP output frequency is calculated easily using this equation: (200Hz / (1 + value))
# It is important to make sure the host processor can keep up with reading and processing
# the FIFO output at the desired rate. Handling FIFO overflow cleanly is also a good idea.
]
DMP_UPDATE = [
0x01, 0xB2, 0x02, 0xFF, 0xFF,
0x01, 0x90, 0x04, 0x09, 0x23, 0xA1, 0x35,
0x01, 0x6A, 0x02, 0x06, 0x00,
0x01, 0x60, 0x08, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00, 0x00,
0x00, 0x60, 0x04, 0x40, 0x00, 0x00, 0x00,
0x01, 0x62, 0x02, 0x00, 0x00,
0x00, 0x60, 0x04, 0x00, 0x40, 0x00, 0x00
]
class FIFOClass(object):
"""FIFO Class
The DMP uses a FIFO buffer that needs to be configured / reset to work properly."""
RA_FIFO_COUNTH = 0x72
RA_FIFO_COUNTL = 0x73
RA_FIFO_R_W = 0x74
RA_FIFO_ENABLE = 0x23
RA_INT_ENABLE = 0x38
RA_USER_CTRL = 0x6A
RA_INT_STATUS = 0x3A
BIT_FIFO_EN = 0x40
BIT_FIFO_RST = 0x04
BIT_FIFO_SIZE_1024 = 0x40
BIT_FIFO_SIZE_2048 = 0x80
BIT_FIFO_SIZE_4096 = 0xC0
BIT_FIFO_OVERFLOW = 0x10
MAX_FIFO = 1024
MAX_PACKET_LENGTH = 32
# FIFO init
def __init__(self, dmp):
self.DMP = dmp
self.mpu = dmp.mpu
self.i2c = dmp.mpu.i2c
self._rate = dmp.DMP_SAMPLE_RATE
self._fifo_enable = 0x00
self._no_reads = 0
@property
def rate(self):
"""FIFO sampling rate"""
return self._rate
@property
def fifo_enable_mask(self):
"""FIFO sensor enabled mask: used to determine which sensors will be read whe nreadign the FIFO.
Disabled sensors data should be discarded.
"""
return self._fifo_enable
def set_fifo_enable_mask(self, value):
self._fifo_enable = value
def reset(self):
"""Reset the FIFO
:return:
"""
if self.mpu.sensors == 0:
raise ValueError("No sensor is defined. ")
data = 0
self.i2c.write_byte(self.RA_INT_ENABLE, data)
self.i2c.write_byte(self.RA_FIFO_ENABLE, data)
self.i2c.write_byte(self.RA_USER_CTRL, data)
if self.DMP.enabled:
if DEBUG: print("DMP enabled, restting FIFO")
data = self.BIT_FIFO_RST | self.DMP.BIT_DMP_RST
self.i2c.write_byte(self.RA_USER_CTRL, data)
time.sleep(0.05)
data = self.DMP.BIT_DMP_EN | self.BIT_FIFO_EN
if self.mpu.sensors & self.mpu.INV_XYZ_COMPASS:
data |= self.mpu.BIT_AUX_IF_EN
self.i2c.write_byte(self.RA_USER_CTRL, data)
if self.DMP.int_enabled:
data = self.DMP.BIT_DMP_INT_EN
else:
data = 0
self.i2c.write_byte(self.RA_INT_ENABLE, data)
data = 0
self.i2c.write_byte(self.RA_FIFO_ENABLE, data)
# if (i2c_write(st.hw->addr, st.reg->fifo_en, 1, &data))
# return -1;
# } else {
data = self.BIT_FIFO_RST
self.i2c.write_byte(self.RA_USER_CTRL, data)
if self.mpu.bypass_mode or not (self.mpu.sensors & self.mpu.INV_XYZ_COMPASS):
data = self.BIT_FIFO_EN
else:
data = self.BIT_FIFO_EN | self.mpu.BIT_AUX_IF_EN
self.i2c.write_byte(self.RA_USER_CTRL, data)
time.sleep(0.05)
if self.DMP.int_enabled:
data = self.DMP.BIT_DATA_RDY_EN
else:
data = 0
self.i2c.write_byte(self.RA_INT_ENABLE, data)
self.i2c.write_byte(self.RA_FIFO_ENABLE, self._fifo_enable)
if DEBUG: print("fifo enable: " + str(hex(self._fifo_enable)))
return True
def set_rate(self, rate):
"""Set DMP FIFO sampling rate. It should not be higher than the DMP sampling rate (200Hz)
Only used when DMP is ON.
:param rate: int -- sampling rate in Hz (2 - 200Hz)
:return:
"""
regs_end = [
self.DMP.DINAFE, self.DMP.DINAF2, self.DMP.DINAAB,
0xc4, self.DMP.DINAAA, self.DMP.DINAF1,
self.DMP.DINADF, self.DMP.DINADF, 0xBB,
0xAF, self.DMP.DINADF, self.DMP.DINADF
]
if rate > self.DMP.DMP_SAMPLE_RATE:
raise ValueError("Sample rate too high: {} > {}".format(rate, self.DMP.DMP_SAMPLE_RATE))
div = self.DMP.DMP_SAMPLE_RATE / rate - 1
tmp = [0x00] * 2
tmp[0] = (div >> 8) & 0xFF
tmp[1] = div & 0xFF
self.DMP._write_mem(self.DMP.D_0_22, 2, tmp)
self.DMP._write_mem(self.DMP.CFG_6, 12, regs_end)
if DEBUG: print("Setting FIFO rate to: " + str(rate))
self._rate = rate
return True
def get_rate(self):
"""Get current DMP FIFO sampling rate (Hz)
:return: int -- DMP FIFO sampling rate (Hz)
"""
return self._rate
def read_stream(self, length):
"""Read unparsed packets from the FIFO buffer
:param length: int -- number of bytes to read in one FIFO packet
:return: (list, int) -- FIFO packet as a byte array, number of packets left to read
"""
# /**
# * @brief Get one unparsed packet from the FIFO.
# * This function should be used if the packet is to be parsed elsewhere.
# * @param[in] length Length of one FIFO packet.
# * @param[in] data FIFO packet.
# * @param[in] more Number of remaining packets.
# */
if not self.DMP.enabled:
raise Exception("DMP is not enabled" + str(self.DMP.enabled == True))
# if not self.mpu.sensors:
# raise Exception("No sensor defined. DMP needs to be enabled")
# tmp = self.i2c.read_word(self.RA_FIFO_COUNTH)
fifo_count = self.i2c.read_word(self.RA_FIFO_COUNTH)
# if DEBUG: print("FIFO count: " + str(fifo_count) + " length requested: " + str(length))
# fifo_count = (tmp[0] << 8) | tmp[1];
while fifo_count < length + 8: # to account for the extra bytes
# loop here rather than in the read
fifo_count = self.i2c.read_word(self.RA_FIFO_COUNTH)
if DEBUG: print("." * fifo_count),
# more = 0
# return [], more # raise IOError("fifo.read_stream: Could not read the number of bytes requested")
if fifo_count > (self.MAX_FIFO >> 1):
# FIFO is 50% full, better check overflow bit.
tmp = self.i2c.read_byte(self.RA_INT_STATUS)
# if DEBUG: print("FIFO OVERFLOW BIT: " + str(hex(tmp)))
if (tmp & 0xFF) & self.BIT_FIFO_OVERFLOW:
self.reset()
return [], 0
# raise IOError("FIFO has been reset")
# for some reason when the fifo has more than the number of bytes in the packet, it has some
# extra junk on front. We dump it here.
extra = []
if fifo_count > length:
# if fifo_count - length > self.MAX_PACKET_LENGTH:
# # clean FIFO if too many bytes to read
# self.reset()
# else:
# else dump the difference
extra = self.i2c.read_bytes(self.RA_FIFO_R_W, fifo_count - length) # dump the rest ???
if DEBUG: print(":".join(["%0.2x" % x for x in extra]))
# read out data
data = self.i2c.read_bytes(self.RA_FIFO_R_W, length)
more = float(fifo_count / length) - 1 > 0
if DEBUG: print(":".join(["%0.2x" % x for x in data]) + " -- count: " + str(fifo_count) + " -- read: " + str(length) + " -- more: " + str(more))
return extra + data, more
def read(self):
# /**
# * @brief Get one packet from the FIFO.
# * If @e sensors does not contain a particular sensor, disregard the data
# * returned to that pointer.
# * \n @e sensors can contain a combination of the following flags:
# * \n INV_X_GYRO, INV_Y_GYRO, INV_Z_GYRO
# * \n INV_XYZ_GYRO
# * \n INV_XYZ_ACCEL
# * \n INV_WXYZ_QUAT
# * \n If the FIFO has no new data, @e sensors will be zero.
# * \n If the FIFO is disabled, @e sensors will be zero and this function will
# * return a non-zero error code.
# * @param[out] gyro Gyro data in hardware units.
# * @param[out] accel Accel data in hardware units.
# * @param[out] quat 3-axis quaternion data in hardware units.
# * @param[out] timestamp Timestamp in milliseconds.
# * @param[out] sensors Mask of sensors read from FIFO.
# * @param[out] more Number of remaining packets.
# * @return 0 if successful.
# */
fifo_data = [0x00] * self.MAX_PACKET_LENGTH
quat = [0x00] * 4
accel = [0x00] * 3
gyro = [0x00] * 3
ii = 0
# /* TODO: sensors[0] only changes when dmp_enable_feature is called. We can
# * cache this value and save some cycles.
# */
sensors = 0
timestamp = time.time()
if DEBUG: print("No reads: " + str(self._no_reads) + " Packet length: " + str(self.DMP.packet_length))
# Get a packet. Packet length defined by sensors setup
fifo_data, more = self.read_stream(self.DMP.packet_length)
# should never happen since we check on the stream read
while len(fifo_data) == 0:
fifo_data, more = self.read_stream(self.DMP.packet_length)
self._no_reads += 1
if self._no_reads > 100:
raise IOError("Could not read anything from FIFO after 100 tries")
self._no_reads = 0
# Parse DMP packet.
if DEBUG: print("enabled features: " + str(hex(self.DMP.get_enabled_features())))
if self.DMP.get_enabled_features() & (self.DMP.DMP_FEATURE_LP_QUAT | self.DMP.DMP_FEATURE_6X_LP_QUAT):
# ifdef FIFO_CORRUPTION_CHECK
quat_q14 = [0x00] * 4
# endif
quat[0] = (fifo_data[0] << 24) | (fifo_data[1] << 16) | (fifo_data[2] << 8) | fifo_data[3]
quat[1] = (fifo_data[4] << 24) | (fifo_data[5] << 16) | (fifo_data[6] << 8) | fifo_data[7]
quat[2] = (fifo_data[8] << 24) | (fifo_data[9] << 16) | (fifo_data[10] << 8) | fifo_data[11]
quat[3] = (fifo_data[12] << 24) | (fifo_data[13] << 16) | (fifo_data[14] << 8) | fifo_data[15]
ii += 16
ii += 8 # to account for extra bytes read
# ifdef FIFO_CORRUPTION_CHECK
# /* We can detect a corrupted FIFO by monitoring the quaternion data and
# * ensuring that the magnitude is always normalized to one. This
# * shouldn't happen in normal operation, but if an I2C error occurs,
# * the FIFO reads might become misaligned.
# *
# * Let's start by scaling down the quaternion data to avoid long long
# * math.
# */
quat_q14[0] = (quat[0] >> 16) & 0xFFFF
quat_q14[1] = (quat[1] >> 16) & 0xFFFF
quat_q14[2] = (quat[2] >> 16) & 0xFFFF
quat_q14[3] = (quat[3] >> 16) & 0xFFFF
quat_q14 = [ x - 65536 if x >= 0x8000 else x for x in quat_q14]
quat_mag_sq = quat_q14[0] * quat_q14[0] + quat_q14[1] * quat_q14[1] + quat_q14[2] * quat_q14[2] + quat_q14[3] * quat_q14[3]
if DEBUG: print("quat_q14: " + str(quat_q14) + " mag_sqrt: " + str(quat_mag_sq))
# if (quat_mag_sq < self.DMP.QUAT_MAG_SQ_MIN) or (quat_mag_sq > self.DMP.QUAT_MAG_SQ_MAX):
# # Quaternion is outside of the acceptable threshold.
# self.reset()
# sensors = 0
# if DEBUG: print("Quaternion outside of acceptable threshold")
# self._no_reads += 1
# if self._no_reads > 100:
# self._no_reads = 0
# raise IOError("Could not read anything from FIFO after 100 tries")
# return self.read()
# return gyro, accel, quat, timestamp, sensors, more
# # raise ValueError("Quaternion outside of acceptable threshold")
sensors |= self.mpu.INV_WXYZ_QUAT
# endif
if self.DMP.get_enabled_features() & self.DMP.DMP_FEATURE_SEND_RAW_ACCEL:
accel[0] = (fifo_data[ii + 0] << 8) | fifo_data[ii + 1]
accel[1] = (fifo_data[ii + 2] << 8) | fifo_data[ii + 3]
accel[2] = (fifo_data[ii + 4] << 8) | fifo_data[ii + 5]
ii += 6
sensors |= self.mpu.INV_XYZ_ACCEL
if self.DMP.get_enabled_features() & self.DMP.DMP_FEATURE_SEND_ANY_GYRO:
gyro[0] = (fifo_data[ii + 0] << 8) | fifo_data[ii + 1]
gyro[1] = (fifo_data[ii + 2] << 8) | fifo_data[ii + 3]
gyro[2] = (fifo_data[ii + 4] << 8) | fifo_data[ii + 5]
ii += 6
sensors |= self.mpu.INV_XYZ_GYRO
# /* Gesture data is at the end of the DMP packet. Parse it and call
# * the gesture callbacks (if registered).
# */
if self.DMP.get_enabled_features() & (self.DMP.DMP_FEATURE_TAP | self.DMP.DMP_FEATURE_ANDROID_ORIENT):
self.DMP.decode_gesture([fifo_data + ii])
return gyro, accel, quat, timestamp, sensors, more
def configure(self, sensors):
# /**
# * @brief Select which sensors are pushed to FIFO.
# * @e sensors can contain a combination of the following flags:
# * \n INV_X_GYRO, INV_Y_GYRO, INV_Z_GYRO
# * \n INV_XYZ_GYRO
# * \n INV_XYZ_ACCEL
# * @param[in] sensors Mask of sensors to push to FIFO.
# * @return 0 if successful.
# */
# /* Compass data isn't going into the FIFO. Stop trying. */
sensors &= ~self.mpu.INV_XYZ_COMPASS
if self.DMP.enabled:
print("DMP enabled, quitting")
return True
else:
if self.mpu.sensors == 0:
raise ValueError("Sensor not defined")
prev = self._fifo_enable
self._fifo_enable = sensors & self.mpu.sensors
if self._fifo_enable != sensors:
raise AssertionError("You're not getting what you asked for. Some sensors are not activated in MPU")
else:
result = 0
if sensors or self.mpu.low_power_mode:
if DEBUG: print("Setting interrupt enable")
self.mpu.set_int_enable(True)
else:
if DEBUG: print("Disabling interrupt enable")
self.mpu.set_int_enable(False)
if sensors:
if not self.reset():
self._fifo_enable = prev
raise IOError("Could not reset FIFO")
return result
# DMP init
def __init__(self, mpu):
self.mpu = mpu
self.i2c = self.mpu.i2c
self._fifo = self.FIFOClass(self)
self._loaded = False
self._enabled = False
self._int_enabled = False
self._sample_rate = 0
self._feature_mask = 0x00
self._packet_length = 12
self._tap_cb = None
self._android_orient_cb = None
self._gyro_cal_enabled = False
self._lp_quat_enabled = False
self._lp_6x_quat_enabled = False
@property
def fifo(self):
return self._fifo
@property
def loaded(self):
return self._loaded
@property
def enabled(self):
return self._enabled
@property
def int_enabled(self):
return self._int_enabled
@property
def sample_rate(self):
return self._sample_rate
@property
def packet_length(self):
return self._packet_length
@property
def gyro_cal_enabled(self):
return self._gyro_cal_enabled
@property
def lp_quat_enabled(self):
return self._lp_quat_enabled
@property
def lp_6x_quat_enabled(self):
return self._lp_6x_quat_enabled
def load_motion_driver_firmware(self):
return self._load_firmware(self.DMP_CODE_SIZE, self.DMP_CODE, self.DMP_CODE_START_ADDR, self.DMP_SAMPLE_RATE)
def _load_firmware(self, length, firmware, start_address, sample_rate):
if self._loaded:
print("firmware already loaded")
return False
# raise Exception("firmware already loaded")
if not firmware:
raise Exception("firmware buffer empty")
this_write = self.DMP_MEMORY_CHUNK_SIZE
for i in range(0, length, this_write):
this_write = min(self.DMP_MEMORY_CHUNK_SIZE, length - i)
if DEBUG: print("i: " + str(i) + " writing: \r\n" + str(["0x%0.2x" % x for x in firmware[i:this_write + i]]))
self._write_mem(i, this_write, firmware[i:this_write + i])
dump = self._read_mem(i, this_write)
self._mem_compare(firmware[i:this_write + i], dump)
# Set program start address.
tmp = [0x00] * 2
tmp[0] = start_address >> 8
tmp[1] = start_address & 0xFF
self.i2c.write_bytes(self.RA_DMP_CFG_1, 2, tmp)
self._loaded = True
self._sample_rate = sample_rate
return True
def _write_mem(self, mem_addr, length, data):
# /**
# * @brief Write to the DMP memory.
# * This function prevents I2C writes past the bank boundaries. The DMP memory
# * is only accessible when the chip is awake.
# * @param[in] mem_addr Memory location (bank << 8 | start address)
# * @param[in] length Number of bytes to write.
# * @param[in] data Bytes to write to memory.
# * @return 0 if successful.
# */
if DEBUG: print("writing at " + "0x%0.4x" % mem_addr)
if not data or not isinstance(data, list):
raise ValueError("firmware not in array format")
# if not self.mpu.sensors:
# raise ValueError("No sensor is defined.")
tmp = [0x00] * 2
tmp[0] = mem_addr >> 8
tmp[1] = mem_addr & 0x00FF
# Check bank boundaries.
if tmp[1] + length > self.DMP_MEMORY_BANK_SIZE:
raise ValueError("write -> length beyond bank memory")
# NOTE: this is different from writing a WORD, the BANK SELECT value seems to be a command rather than a REGISTER address
self.i2c.write_bytes(self.RA_BANK_SEL, 2, tmp) # Bank select + Start Memory Addr.
self.i2c.write_bytes(self.RA_MEM_R_W, length, data)
return True
def _read_mem(self, mem_addr, length):
# /**
# * @brief Read from the DMP memory.
# * This function prevents I2C reads past the bank boundaries. The DMP memory
# * is only accessible when the chip is awake.
# * @param[in] mem_addr Memory location (bank << 8 | start address)
# * @param[in] length Number of bytes to read.
# * @param[out] data Bytes read from memory.
# * @return 0 if successful.
# */
if DEBUG: print("reading at " + "0x%0.4x" % mem_addr)
data = [0x00] * length
# if not self.mpu.sensors:
# raise ValueError("No sensor is defined. DMP needs to be enabled")
tmp = [0x00] * 2
tmp[0] = mem_addr >> 8
tmp[1] = mem_addr & 0x00FF
# Check bank boundaries.
if tmp[1] + length > self.DMP_MEMORY_BANK_SIZE:
raise Exception("read -> length beyond bank memory")
# NOTE: this is different from writing a WORD, the BANK SELECT value seems to be a command rather than a REGISTER address
self.i2c.write_bytes(self.RA_BANK_SEL, 2, tmp)
data = self.i2c.read_bytes(self.RA_MEM_R_W, length)
if not data or not isinstance(data, list):
raise Exception("read data is not an array")
return data
def _mem_compare(self, data_in, data_out):
if len(data_in) != len(data_out):
raise Exception("buffer sizes do not match")
for i in range(0, len(data_in) - 1):
if data_in[i] != data_out[i]:
raise Exception(
"data does not match \r\n" + str(["0x%0.2x" % x for x in data_in]) + "\r\n" + str(["0x%0.2x" % x for x in data_out]))
return True
def enable_feature(self, mask):
# /**
# * @brief Enable DMP features.
# * The following \#define's are used in the input mask:
# * \n DMP_FEATURE_TAP
# * \n DMP_FEATURE_ANDROID_ORIENT
# * \n DMP_FEATURE_LP_QUAT
# * \n DMP_FEATURE_6X_LP_QUAT
# * \n DMP_FEATURE_GYRO_CAL
# * \n DMP_FEATURE_SEND_RAW_ACCEL
# * \n DMP_FEATURE_SEND_RAW_GYRO
# * \n NOTE: DMP_FEATURE_LP_QUAT and DMP_FEATURE_6X_LP_QUAT are mutually
# * exclusive.
# * \n NOTE: DMP_FEATURE_SEND_RAW_GYRO and DMP_FEATURE_SEND_CAL_GYRO are also
# * mutually exclusive.
# * @param[in] mask Mask of features to enable.
# * @return 0 if successful.
# */
tmp = [0x00] * 10
# /* TODO: All of these settings can probably be integrated into the default
# * DMP image.
# */
# Set integration scale factor.
tmp[0] = (self.GYRO_SF >> 24) & 0xFF
tmp[1] = (self.GYRO_SF >> 16) & 0xFF
tmp[2] = (self.GYRO_SF >> 8) & 0xFF
tmp[3] = self.GYRO_SF & 0xFF
self._write_mem(self.D_0_104, 4, tmp)
# Send sensor data to the FIFO.
tmp[0] = 0xA3
if mask & self.DMP_FEATURE_SEND_RAW_ACCEL:
tmp[1] = 0xC0
tmp[2] = 0xC8
tmp[3] = 0xC2
else:
tmp[1] = 0xA3
tmp[2] = 0xA3
tmp[3] = 0xA3
if mask & self.DMP_FEATURE_SEND_ANY_GYRO:
tmp[4] = 0xC4
tmp[5] = 0xCC
tmp[6] = 0xC6
else:
tmp[4] = 0xA3
tmp[5] = 0xA3
tmp[6] = 0xA3
tmp[7] = 0xA3
tmp[8] = 0xA3
tmp[9] = 0xA3
self._write_mem(self.CFG_15, 10, tmp)
# Send gesture data to the FIFO.
if mask & (self.DMP_FEATURE_TAP | self.DMP_FEATURE_ANDROID_ORIENT):
tmp[0] = self.DINA20
else:
tmp[0] = 0xD8
self._write_mem(self.CFG_27, 1, tmp)
if mask & self.DMP_FEATURE_GYRO_CAL:
self._enable_gyro_cal(True)
else:
self._enable_gyro_cal(False)
if mask & self.DMP_FEATURE_SEND_ANY_GYRO:
if mask & self.DMP_FEATURE_SEND_CAL_GYRO:
tmp[0] = 0xB2
tmp[1] = 0x8B
tmp[2] = 0xB6
tmp[3] = 0x9B
else:
tmp[0] = self.DINAC0
tmp[1] = self.DINA80
tmp[2] = self.DINAC2
tmp[3] = self.DINA90
self._write_mem(self.CFG_GYRO_RAW_DATA, 4, tmp)
if mask & self.DMP_FEATURE_TAP:
# Enable tap.
tmp[0] = 0xF8
self._write_mem(self.CFG_20, 1, tmp)
self.set_tap_thresh(self.TAP_XYZ, 250)
self.set_tap_axes(self.TAP_XYZ)
self.set_tap_count(1)
self.set_tap_time(100)
self.set_tap_time_multi(500)
self.set_shake_reject_thresh(self.GYRO_SF, 200)
self.set_shake_reject_time(40)
self.set_shake_reject_timeout(10)
else:
tmp[0] = 0xD8
self._write_mem(self.CFG_20, 1, tmp)
if mask & self.DMP_FEATURE_ANDROID_ORIENT:
tmp[0] = 0xD9
else:
tmp[0] = 0xD8
self._write_mem(self.CFG_ANDROID_ORIENT_INT, 1, tmp)
if mask & self.DMP_FEATURE_LP_QUAT:
self._enable_lp_quat(True)
else:
self._enable_lp_quat(False)
if mask & self.DMP_FEATURE_6X_LP_QUAT:
self._enable_6x_lp_quat(True)
else:
self._enable_6x_lp_quat(False)
# Pedometer is always enabled.
self._feature_mask = mask | self.DMP_FEATURE_PEDOMETER
self._fifo.reset()
self._packet_length = 0
if mask & self.DMP_FEATURE_SEND_RAW_ACCEL:
self._packet_length += 6
if mask & self.DMP_FEATURE_SEND_ANY_GYRO:
self._packet_length += 6
if mask & (self.DMP_FEATURE_LP_QUAT | self.DMP_FEATURE_6X_LP_QUAT):
self._packet_length += 16
if mask & (self.DMP_FEATURE_TAP | self.DMP_FEATURE_ANDROID_ORIENT):
self._packet_length += 4
# self._packet_length += 4
if DEBUG: print("Setting packet length to: " + str(self._packet_length))
return True
def get_enabled_features(self):
# /**
# * @brief Get list of currently enabled DMP features.
# * @param[out] Mask of enabled features.
# * @return 0 if successful.
# */
return self._feature_mask
def _enable_gyro_cal(self, enable):
# /**
# * @brief Calibrate the gyro data in the DMP.
# * After eight seconds of no motion, the DMP will compute gyro biases and
# * subtract them from the quaternion output. If @e dmp_enable_feature is
# * called with @e DMP_FEATURE_SEND_CAL_GYRO, the biases will also be
# * subtracted from the gyro output.
# * @param[in] enable 1 to enable gyro calibration.
# * @return 0 if successful.
# */
if enable:
regs = [0xb8, 0xaa, 0xb3, 0x8d, 0xb4, 0x98, 0x0d, 0x35, 0x5d]
self._write_mem(self.CFG_MOTION_BIAS, 9, regs)
self._gyro_cal_enabled = True
return
else:
regs = [0xb8, 0xaa, 0xaa, 0xaa, 0xb0, 0x88, 0xc3, 0xc5, 0xc7]
self._write_mem(self.CFG_MOTION_BIAS, 9, regs)
self._gyro_cal_enabled = False
return
def _enable_lp_quat(self, enable):
# /**
# * @brief Generate 3-axis quaternions from the DMP.
# * In this driver, the 3-axis and 6-axis DMP quaternion features are mutually
# * exclusive.
# * @param[in] enable 1 to enable 3-axis quaternion.
# * @return 0 if successful.
# */
if enable:
regs = [self.DINBC0, self.DINBC2, self.DINBC4, self.DINBC6]
else:
regs = [0x8B, 0x8B, 0x8B, 0x8B]
self._write_mem(self.CFG_LP_QUAT, 4, regs)
self._lp_quat_enabled = enable
return self._fifo.reset()
def _enable_6x_lp_quat(self, enable):
# /**
# * @brief Generate 6-axis quaternions from the DMP.
# * In this driver, the 3-axis and 6-axis DMP quaternion features are mutually
# * exclusive.
# * @param[in] enable 1 to enable 6-axis quaternion.
# * @return 0 if successful.
# */
if enable:
regs = [self.DINA20, self.DINA28, self.DINA30, self.DINA38]
else:
regs = [0xA3, 0xA3, 0xA3, 0xA3]
self._write_mem(self.CFG_8, 4, regs)
self._lp_6x_quat_enabled = enable
return self._fifo.reset()
'''
/**
* @brief Set tap threshold for a specific axis.
* @param[in] axis 1, 2, and 4 for XYZ accel, respectively.
* @param[in] thresh Tap threshold, in mg/ms.
* @return 0 if successful.
*/
int dmp_set_tap_thresh(unsigned char axis, unsigned short thresh)
{
unsigned char tmp[4], accel_fsr;
float scaled_thresh;
unsigned short dmp_thresh, dmp_thresh_2;
if (!(axis & TAP_XYZ) || thresh > 1600)
return -1;
scaled_thresh = (float)thresh / DMP_SAMPLE_RATE;
mpu_get_accel_fsr(&accel_fsr);
switch (accel_fsr) {
case 2:
dmp_thresh = (unsigned short)(scaled_thresh * 16384);
/* dmp_thresh * 0.75 */
dmp_thresh_2 = (unsigned short)(scaled_thresh * 12288);
break;
case 4:
dmp_thresh = (unsigned short)(scaled_thresh * 8192);
/* dmp_thresh * 0.75 */
dmp_thresh_2 = (unsigned short)(scaled_thresh * 6144);
break;
case 8:
dmp_thresh = (unsigned short)(scaled_thresh * 4096);
/* dmp_thresh * 0.75 */
dmp_thresh_2 = (unsigned short)(scaled_thresh * 3072);
break;
case 16:
dmp_thresh = (unsigned short)(scaled_thresh * 2048);
/* dmp_thresh * 0.75 */
dmp_thresh_2 = (unsigned short)(scaled_thresh * 1536);
break;
default:
return -1;
}
tmp[0] = (unsigned char)(dmp_thresh >> 8);
tmp[1] = (unsigned char)(dmp_thresh & 0xFF);
tmp[2] = (unsigned char)(dmp_thresh_2 >> 8);
tmp[3] = (unsigned char)(dmp_thresh_2 & 0xFF);
if (axis & TAP_X) {
if (mpu_write_mem(DMP_TAP_THX, 2, tmp))
return -1;
if (mpu_write_mem(D_1_36, 2, tmp+2))
return -1;
}
if (axis & TAP_Y) {
if (mpu_write_mem(DMP_TAP_THY, 2, tmp))
return -1;
if (mpu_write_mem(D_1_40, 2, tmp+2))
return -1;
}
if (axis & TAP_Z) {
if (mpu_write_mem(DMP_TAP_THZ, 2, tmp))
return -1;
if (mpu_write_mem(D_1_44, 2, tmp+2))
return -1;
}
return 0;
}
/**
* @brief Set which axes will register a tap.
* @param[in] axis 1, 2, and 4 for XYZ, respectively.
* @return 0 if successful.
*/
int dmp_set_tap_axes(unsigned char axis)
{
unsigned char tmp = 0;
if (axis & TAP_X)
tmp |= 0x30;
if (axis & TAP_Y)
tmp |= 0x0C;
if (axis & TAP_Z)
tmp |= 0x03;
return mpu_write_mem(D_1_72, 1, &tmp);
}
/**
* @brief Set minimum number of taps needed for an interrupt.
* @param[in] min_taps Minimum consecutive taps (1-4).
* @return 0 if successful.
*/
int dmp_set_tap_count(unsigned char min_taps)
{
unsigned char tmp;
if (min_taps < 1)
min_taps = 1;
else if (min_taps > 4)
min_taps = 4;
tmp = min_taps - 1;
return mpu_write_mem(D_1_79, 1, &tmp);
}
/**
* @brief Set length between valid taps.
* @param[in] time Milliseconds between taps.
* @return 0 if successful.
*/
int dmp_set_tap_time(unsigned short time)
{
unsigned short dmp_time;
unsigned char tmp[2];
dmp_time = time / (1000 / DMP_SAMPLE_RATE);
tmp[0] = (unsigned char)(dmp_time >> 8);
tmp[1] = (unsigned char)(dmp_time & 0xFF);
return mpu_write_mem(DMP_TAPW_MIN, 2, tmp);
}
/**
* @brief Set max time between taps to register as a multi-tap.
* @param[in] time Max milliseconds between taps.
* @return 0 if successful.
*/
int dmp_set_tap_time_multi(unsigned short time)
{
unsigned short dmp_time;
unsigned char tmp[2];
dmp_time = time / (1000 / DMP_SAMPLE_RATE);
tmp[0] = (unsigned char)(dmp_time >> 8);
tmp[1] = (unsigned char)(dmp_time & 0xFF);
return mpu_write_mem(D_1_218, 2, tmp);
}
/**
* @brief Set shake rejection threshold.
* If the DMP detects a gyro sample larger than @e thresh, taps are rejected.
* @param[in] sf Gyro scale factor.
* @param[in] thresh Gyro threshold in dps.
* @return 0 if successful.
*/
int dmp_set_shake_reject_thresh(long sf, unsigned short thresh)
{
unsigned char tmp[4];
long thresh_scaled = sf / 1000 * thresh;
tmp[0] = (unsigned char)(((long)thresh_scaled >> 24) & 0xFF);
tmp[1] = (unsigned char)(((long)thresh_scaled >> 16) & 0xFF);
tmp[2] = (unsigned char)(((long)thresh_scaled >> 8) & 0xFF);
tmp[3] = (unsigned char)((long)thresh_scaled & 0xFF);
return mpu_write_mem(D_1_92, 4, tmp);
}
/**
* @brief Set shake rejection time.
* Sets the length of time that the gyro must be outside of the threshold set
* by @e gyro_set_shake_reject_thresh before taps are rejected. A mandatory
* 60 ms is added to this parameter.
* @param[in] time Time in milliseconds.
* @return 0 if successful.
*/
int dmp_set_shake_reject_time(unsigned short time)
{
unsigned char tmp[2];
time /= (1000 / DMP_SAMPLE_RATE);
tmp[0] = time >> 8;
tmp[1] = time & 0xFF;
return mpu_write_mem(D_1_90,2,tmp);
}
/**
* @brief Set shake rejection timeout.
* Sets the length of time after a shake rejection that the gyro must stay
* inside of the threshold before taps can be detected again. A mandatory
* 60 ms is added to this parameter.
* @param[in] time Time in milliseconds.
* @return 0 if successful.
*/
int dmp_set_shake_reject_timeout(unsigned short time)
{
unsigned char tmp[2];
time /= (1000 / DMP_SAMPLE_RATE);
tmp[0] = time >> 8;
tmp[1] = time & 0xFF;
return mpu_write_mem(D_1_88,2,tmp);
}
/**
* @brief Get current step count.
* @param[out] count Number of steps detected.
* @return 0 if successful.
*/
int dmp_get_pedometer_step_count(unsigned long *count)
{
unsigned char tmp[4];
if (!count)
return -1;
if (mpu_read_mem(D_PEDSTD_STEPCTR, 4, tmp))
return -1;
count[0] = ((unsigned long)tmp[0] << 24) | ((unsigned long)tmp[1] << 16) |
((unsigned long)tmp[2] << 8) | tmp[3];
return 0;
}
/**
* @brief Overwrite current step count.
* WARNING: This function writes to DMP memory and could potentially encounter
* a race condition if called while the pedometer is enabled.
* @param[in] count New step count.
* @return 0 if successful.
*/
int dmp_set_pedometer_step_count(unsigned long count)
{
unsigned char tmp[4];
tmp[0] = (unsigned char)((count >> 24) & 0xFF);
tmp[1] = (unsigned char)((count >> 16) & 0xFF);
tmp[2] = (unsigned char)((count >> 8) & 0xFF);
tmp[3] = (unsigned char)(count & 0xFF);
return mpu_write_mem(D_PEDSTD_STEPCTR, 4, tmp);
}
/**
* @brief Get duration of walking time.
* @param[in] time Walk time in milliseconds.
* @return 0 if successful.
*/
int dmp_get_pedometer_walk_time(unsigned long *time)
{
unsigned char tmp[4];
if (!time)
return -1;
if (mpu_read_mem(D_PEDSTD_TIMECTR, 4, tmp))
return -1;
time[0] = (((unsigned long)tmp[0] << 24) | ((unsigned long)tmp[1] << 16) |
((unsigned long)tmp[2] << 8) | tmp[3]) * 20;
return 0;
}
/**
* @brief Overwrite current walk time.
* WARNING: This function writes to DMP memory and could potentially encounter
* a race condition if called while the pedometer is enabled.
* @param[in] time New walk time in milliseconds.
*/
int dmp_set_pedometer_walk_time(unsigned long time)
{
unsigned char tmp[4];
time /= 20;
tmp[0] = (unsigned char)((time >> 24) & 0xFF);
tmp[1] = (unsigned char)((time >> 16) & 0xFF);
tmp[2] = (unsigned char)((time >> 8) & 0xFF);
tmp[3] = (unsigned char)(time & 0xFF);
return mpu_write_mem(D_PEDSTD_TIMECTR, 4, tmp);
}
'''
def set_tap_callback(self, cb):
self._tap_cb = cb
def set_android_orient_callback(self, cb):
self._android_orient_cb = cb
def decode_gesture(self, gesture):
# /**
# * @brief Decode the four-byte gesture data and execute any callbacks.
# * @param[in] gesture Gesture data from DMP packet.
# * @return 0 if successful.
# */
android_orient = gesture[3] & 0xC0
tap = 0x3F & gesture[3]
if gesture[1] & self.INT_SRC_TAP:
direction = tap >> 3
count = (tap % 8) + 1
if self._tap_cb and callable(self._tap_cb):
self._tap_cb(direction, count)
if gesture[1] & self.INT_SRC_ANDROID_ORIENT:
if self._android_orient_cb and callable(self._android_orient_cb):
self._android_orient_cb(android_orient >> 6)
return True
def enable_tap(self, enable=True):
if enable:
self.enable_feature(self._feature_mask | self.DMP_FEATURE_TAP)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_TAP)
def enable_android_orient(self, enable=True):
if enable:
self.enable_feature(self._feature_mask | self.DMP_FEATURE_ANDROID_ORIENT)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_ANDROID_ORIENT)
def enable_lp_quat(self, enable=True):
if enable:
self.enable_feature((self._feature_mask | self.DMP_FEATURE_LP_QUAT) & ~self.DMP_FEATURE_6X_LP_QUAT)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_LP_QUAT)
def enable_lp_6x_quat(self, enable=True):
if enable:
self.enable_feature((self._feature_mask | self.DMP_FEATURE_6X_LP_QUAT) & ~self.DMP_FEATURE_LP_QUAT)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_6X_LP_QUAT)
def enable_send_raw_gyro(self, enable=True):
if enable:
self.enable_feature((self._feature_mask | self.DMP_FEATURE_SEND_RAW_GYRO) & ~self.DMP_FEATURE_SEND_CAL_GYRO)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_SEND_RAW_GYRO)
def enable_send_raw_accel(self, enable=True):
if enable:
self.enable_feature(self._feature_mask | self.DMP_FEATURE_SEND_RAW_ACCEL)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_SEND_RAW_ACCEL)
def enable_gyro_calibration(self, enable=True):
if enable:
self.enable_feature(self._feature_mask | self.DMP_FEATURE_GYRO_CAL)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_GYRO_CAL)
def enable_send_calibrated_gyro(self, enable=True):
if enable:
self.enable_feature((self._feature_mask | self.DMP_FEATURE_SEND_CAL_GYRO) & ~self.DMP_FEATURE_SEND_RAW_GYRO)
else:
self.enable_feature(self._feature_mask & ~self.DMP_FEATURE_SEND_CAL_GYRO)
def set_interrupt_mode(self, mode):
# /**
# * @brief Specify when a DMP interrupt should occur.
# * A DMP interrupt can be configured to trigger on either of the two
# * conditions below:
# * \n a. One FIFO period has elapsed (set by @e mpu_set_sample_rate).
# * \n b. A tap event has been detected.
# * @param[in] mode DMP_INT_GESTURE or DMP_INT_CONTINUOUS.
# * @return 0 if successful.
# */
regs_continuous = [0xd8, 0xb1, 0xb9, 0xf3, 0x8b, 0xa3, 0x91, 0xb6, 0x09, 0xb4, 0xd9]
regs_gesture = [0xda, 0xb1, 0xb9, 0xf3, 0x8b, 0xa3, 0x91, 0xb6, 0xda, 0xb4, 0xda]
if mode == self.DMP_INT_CONTINUOUS:
return self._write_mem(self.CFG_FIFO_ON_EVENT, 11, regs_continuous)
elif mode == self.DMP_INT_GESTURE:
return self._write_mem(self.CFG_FIFO_ON_EVENT, 11, regs_gesture)
else:
raise ValueError("Mode should be one of: {} > {}".format(self.DMP_INT_CONTINUOUS, self.DMP_INT_GESTURE))
def get_data(self, raw=False):
'''Returns all data from DMP fifo
:return: gyro, accel, quat, timestamp, sensors, more
'''
INV_TWO_POWER_NEG_30 = 9.313225746154785e-10
gyro, accel, quat, timestamp, sensors, more = self._fifo.read()
if not raw:
gyro_scale_modifier = self.mpu.gyro.scale_modifier
gyro = [x - 65536 if x >= 0x8000 else x for x in gyro]
gyro = [x / gyro_scale_modifier for x in gyro]
accel_scale_modifier = self.mpu.accelerometer.scale_modifier
accel = [x - 65536 if x >= 0x8000 else x for x in accel]
accel = [x / accel_scale_modifier * self.mpu.GRAVITIY_MS2 for x in accel]
quat = [x - 4294967296 if x >= 0x80000000 else x for x in quat]
quat = [x * INV_TWO_POWER_NEG_30 for x in quat]
return gyro, accel, quat, timestamp, sensors, more
def set_state(self, enable):
# /**
# * @brief Enable/disable DMP support.
# * @param[in] enable 1 to turn on the DMP.
# * @return 0 if successful.
# */
if self._enabled == enable:
return True
if enable:
if not self._loaded:
raise IOError("DMP not loaded")
# Disable data ready interrupt.
if DEBUG: print("Disable Data Ready interrupt")
self.mpu.set_int_enable(False)
# Disable bypass mode.
if DEBUG: print("Disable Bypass mode")
self.mpu.set_bypass_mode(False)
# Keep constant sample rate, FIFO rate controlled by DMP.
if DEBUG: print("Set MPU / FIFO sampling rate to DMP sampling rate: " + str(self._sample_rate))
self.mpu.set_sample_rate(self._sample_rate)
# Remove FIFO elements.
if DEBUG: print("Disable FIFO")
self.i2c.write_byte(self._fifo.RA_FIFO_ENABLE, 0x00)
self._enabled = True
# Enable DMP interrupt.
if DEBUG: print("Enable DMP interrupt")
self.mpu.set_int_enable(True)
if DEBUG: print("Reset FIFO")
self._fifo.reset()
else:
# Disable DMP interrupt.
if DEBUG: print("Disable DMP interrupt")
self.mpu.set_int_enable(False)
# Restore FIFO settings.
if DEBUG: print("Restore FIFO to: " + str(self._fifo.fifo_enable_mask))
tmp = self._fifo.fifo_enable_mask
self.i2c.write_byte(self._fifo.RA_FIFO_ENABLE, tmp)
self._enabled = False
if DEBUG: print("Reset FIFO")
self._fifo.reset()
return True
def get_state(self):
# /**
# * @brief Get DMP state.
# * @param[out] enabled 1 if enabled.
# * @return 0 if successful.
# */
return self._enabled
def init(self):
# Reset device
# if DEBUG: print("Resetting MPU6050...")
# self.mpu.reset()
# time.sleep(0.003) # wait after reset
# # enable sleep mode and wake cycle
# if DEBUG: print("Enabling sleep mode...")
# self.mpu.set_sleep_mode(True)
# if DEBUG: print("Enabling wake cycle...")
# self.mpu.set_wake_cycle_mode(True)
#
# # disable sleep mode
# if DEBUG: print("Disabling sleep mode...")
# self.mpu.set_sleep_mode(False)
#
# # get MPU hardware revision
# if DEBUG: print("Selecting user bank 16...")
# self.set_memory_bank(0x10, True, True)
# if DEBUG: print("Selecting memory byte 6...")
# self.set_memory_start_address(0x06)
# if DEBUG: print("Checking hardware revision...")
# hw_revision = self.read_memory_byte()
# if DEBUG: print("Revision @ user[16][6] = " + str(hw_revision))
# if DEBUG: print("Resetting memory bank selection to 0...")
# self.set_memory_bank(0, False, False)
#
# # check OTP bank valid
# if DEBUG: print("Reading OTP bank valid flag...")
# otp_valid = self.get_OTP_bank_valid()
# if DEBUG: print("OTP bank is " + ("valid" if otp_valid else "invalid"))
# get X/Y/Z gyro offsets
if DEBUG: print("Reading gyro offset values...")
offsets = [
self.mpu.gyro.x.offset,
self.mpu.gyro.y.offset,
self.mpu.gyro.z.offset
]
# # setup weird slave stuff (?)
# if DEBUG: print("Setting slave 0 address to 0x7F...")
# self.i2c.set_slave_address(0, 0x7F)
# if DEBUG: print("Disabling I2C Master mode...")
# self.i2c.set_master_mode(False)
# if DEBUG: print("Setting slave 0 address to 0x68 (self)...")
# self.i2c.set_slave_address(0, 0x68)
# if DEBUG: print("Resetting I2C Master control...")
# self.i2c.reset_master()
# time.sleep(0.02)
# load DMP code into memory banks
if DEBUG: print("Writing DMP code to MPU memory banks (" + str(self.DMP_CODE_SIZE) + " bytes)")
if self.load_motion_driver_firmware():
if DEBUG: print("Success! DMP code written and verified.")
self.set_state(enable=True)
if DEBUG: print("sample rate: " + str(self._sample_rate))
self._fifo.set_rate(self._sample_rate)
def __init__(self, bus=1, address=0x68):
"""Constructor: create an instance of the MPU6050
:param bus: smbus.SMBus( bus number )
:param address: int
"""
self.address = address
self.bus = smbus.SMBus(bus)
self._i2c = self.I2CClass(self)
self._DLPF = self.DLPFClass(self)
self._gyro = self.GyroClass(self)
self._accelerometer = self.AccelerometerClass(self)
self._temperature = self.TemperatureClass(self)
self._accel_half = 0
self._sample_rate = 0xFF
self._sensors = 0xFF
self._clock_source = 0xFF
self._int_enable = 0
self._active_low_int = 1
self._latched_int = 0
self._bypass_mode = False
self._int_motion_only = 0
self._low_power_mode = 0
self._DMP = self.DMPClass(self)
# # Wake up the MPU-6050 since it starts in sleep mode
# # and set clock select to X GYRO for lower noise
# self.bus.write_byte_data(self.address, self.PWR_MGMT_1, 0x00 | self.CLK_SEL_XGYRO)
self.set_sleep_mode(False)
@property
def i2c(self):
return self._i2c
@property
def gyro(self):
"""Gyro Object
:return: GyroClass instance
"""
return self._gyro
@property
def accelerometer(self):
"""Accelerometer Object
:return: Accelerometer instance
"""
return self._accelerometer
@property
def temperature(self):
"""Temperature Object
:return: TemperatureClass instance
"""
return self._temperature
@property
def DLPF(self):
"""Digital Low Pass Filter Object
:return: DLPFClass instance
"""
return self._DLPF
@property
def DMP(self):
"""Digital Motion Processor Object
:return: DMPClass instance
"""
return self._DMP
@property
def sensors(self):
return self._sensors
@property
def clock_source(self):
return self._clock_source
@property
def low_power_mode(self):
return self._low_power_mode
@property
def bypass_mode(self):
return self._bypass_mode
def set_sleep_mode(self, enable):
state = self.i2c.read_byte(self.PWR_MGMT_1)
if enable:
self.i2c.write_byte(self.PWR_MGMT_1, state | self.SLEEP_MODE) # set SLEEP bit to 1
else:
self.i2c.write_byte(self.PWR_MGMT_1, state & ~self.SLEEP_MODE) # set SLEEP bit to 0
def get_sleep_mode(self):
return self.i2c.read_byte(self.PWR_MGMT_1) & self.SLEEP_MODE
def set_wake_cycle_mode(self, enable):
state = self.i2c.read_byte(self.PWR_MGMT_1)
if enable:
self.i2c.write_byte(self.PWR_MGMT_1, state & ~self.SLEEP_MODE) # set SLEEP bit to 0/disabled
self.i2c.write_byte(self.PWR_MGMT_1, state | self.CYCLE_MODE) # set CYCLE bit to 1
else:
self.i2c.write_byte(self.PWR_MGMT_1, state & ~self.CYCLE_MODE) # set CYCLE bit to 0
def get_wake_cycle_mode(self):
return self.i2c.read_byte(self.PWR_MGMT_1) & self.CYCLE_MODE
def get_sample_rate(self):
# /**
# * @brief Get sampling rate.
# * @param[out] rate Current sampling rate (Hz).
# * @return 0 if successful.
# */
if not self.DMP.enabled:
raise IOError("DMP not enabled")
else:
return self._sample_rate
def set_sample_rate(self, rate):
# /**
# * @brief Set sampling rate.
# * Sampling rate must be between 4Hz and 1kHz.
# * @param[in] rate Desired sampling rate (Hz).
# * @return 0 if successful.
# */
if DEBUG: print("Setting DMP sampling rate")
if self._sensors == 0:
raise IOError("No sensors defined")
if self.DMP.enabled:
raise IOError("DMP is enabled")
else:
if self._low_power_mode != 0:
if DEBUG: print("Low power mode")
if rate and (rate <= 40):
# Just stay in low-power accel mode.
self.set_low_power_mode(rate) # mpu_lp_accel_mode(rate)
return 0
# Requested rate exceeds the allowed frequencies in LP accel mode,
# switch back to full-power mode.
#
self.set_low_power_mode(0) # mpu_lp_accel_mode(0)
if rate < 4:
rate = 4
elif rate > 1000:
rate = 1000
data = 1000 / rate - 1
self.i2c.write_byte(self.RA_RATE_DIV, data)
# if (i2c_write(st.hw->addr, st.reg->rate_div, 1, &data))
self._sample_rate = 1000 / (1 + data)
# ifdef AK89xx_SECONDARY
# mpu_set_compass_sample_rate(min(st.chip_cfg.compass_sample_rate, MAX_COMPASS_SAMPLE_RATE));
# endif
# Automatically set LPF to 1/2 sampling rate.
self.DLPF.set_frequency(self._sample_rate >> 1)
return True
def set_int_enable(self, enable):
# /**
# * @brief Enable/disable data ready interrupt.
# * If the DMP is on, the DMP interrupt is enabled. Otherwise, the data ready
# * interrupt is used.
# * @param[in] enable 1 to enable interrupt.
# * @return 0 if successful.
# */
if self.DMP.enabled:
if DEBUG: print("DMP is enabled")
if enable:
if DEBUG: print("Set DMP int enable")
tmp = self.DMP.BIT_DMP_INT_EN
else:
if DEBUG: print("Set DMP disable")
tmp = 0x00
self.i2c.write_byte(self.DMP.fifo.RA_INT_ENABLE, tmp)
self._int_enable = tmp
else:
if DEBUG: print("DMP is disabled")
if self._sensors == 0:
raise ValueError("No sensor defined")
if enable and self._int_enable:
return True
if enable:
if DEBUG: print("Set Data Ready int enable")
tmp = self.BIT_DATA_RDY_EN
else:
if DEBUG: print("Set Data Ready int disable")
tmp = 0x00
self.i2c.write_byte(self.DMP.fifo.RA_INT_ENABLE, tmp)
self._int_enable = tmp
return True
def set_bypass_mode(self, enable):
# /**
# * @brief Set device to bypass mode.
# * @param[in] bypass_on 1 to enable bypass mode.
# * @return 0 if successful.
# */
if self._bypass_mode == enable:
return True
if enable:
tmp = self.i2c.read_byte(self.DMP.fifo.RA_USER_CTRL)
tmp &= ~self.BIT_AUX_IF_EN
self.i2c.write_byte(self.DMP.fifo.RA_USER_CTRL, tmp)
time.sleep(0.003)
tmp = self.BIT_BYPASS_EN
if self._active_low_int:
tmp |= self.BIT_ACTL
if self._latched_int:
tmp |= self.BIT_LATCH_EN | self.BIT_ANY_RD_CLR
self.i2c.write_byte(self.RA_INT_PIN_CFG, tmp)
else:
# Enable I2C master mode if compass is being used.
tmp = self.i2c.read_byte(self.DMP.fifo.RA_USER_CTRL)
if self._sensors & self.INV_XYZ_COMPASS:
tmp |= self.BIT_AUX_IF_EN
else:
tmp &= ~self.BIT_AUX_IF_EN
self.i2c.write_byte(self.DMP.fifo.RA_USER_CTRL, tmp)
time.sleep(0.003)
if self._active_low_int:
tmp = self.BIT_ACTL
else:
tmp = 0
if self._latched_int:
tmp |= self.BIT_LATCH_EN | self.BIT_ANY_RD_CLR
self.i2c.write_byte(self.RA_INT_PIN_CFG, tmp)
self._bypass_mode = enable
return True
def set_int_level(self, active_low):
# /**
# * @brief Set interrupt level.
# * @param[in] active_low 1 for active low, 0 for active high.
# * @return 0 if successful.
# */
self._active_low_int = active_low
def set_int_latched(self, enable):
# /**
# * @brief Enable latched interrupts.
# * Any MPU register will clear the interrupt.
# * @param[in] enable 1 to enable, 0 to disable.
# * @return 0 if successful.
# */
if self._latched_int == enable:
return True
if enable:
tmp = self.BIT_LATCH_EN | self.BIT_ANY_RD_CLR
else:
tmp = 0
if self._bypass_mode:
tmp |= self.BIT_BYPASS_EN
if self._active_low_int:
tmp |= self.BIT_ACTL
self.i2c.write_byte(self.RA_INT_PIN_CFG, tmp)
self._latched_int = enable
return True
def get_low_power_mode(self):
return self._low_power_mode
def set_low_power_mode(self, rate):
# /**
# * @brief Enter low-power accel-only mode.
# * In low-power accel mode, the chip goes to sleep and only wakes up to sample
# * the accelerometer at one of the following frequencies:
# * \n MPU6050: 1.25Hz, 5Hz, 20Hz, 40Hz
# * \n MPU6500: 1.25Hz, 2.5Hz, 5Hz, 10Hz, 20Hz, 40Hz, 80Hz, 160Hz, 320Hz, 640Hz
# * \n If the requested rate is not one listed above, the device will be set to
# * the next highest rate. Requesting a rate above the maximum supported
# * frequency will result in an error.
# * \n To select a fractional wake-up frequency, round down the value passed to
# * @e rate.
# * @param[in] rate Minimum sampling rate, or zero to disable LP
# * accel mode.
# * @return 0 if successful.
# */
if DEBUG: print("Accel low power mode settings:")
if rate > 40:
raise ValueError("Rate " + str(rate) + " > 40 is too high")
if rate == 0:
self.set_int_latched(0) # mpu_set_int_latched(0)
tmp = 0 << 8
tmp |= self.BIT_STBY_XYZG & 0x00FF
self.i2c.write_word(self.PWR_MGMT_1, tmp) # i2c_write(st.hw->addr, st.reg->pwr_mgmt_1, 2, tmp))
self._low_power_mode = 0
return True
# /* For LP accel, we automatically configure the hardware to produce latched
# * interrupts. In LP accel mode, the hardware cycles into sleep mode before
# * it gets a chance to deassert the interrupt pin; therefore, we shift this
# * responsibility over to the MCU.
# *
# * Any register read will clear the interrupt.
# */
self.set_int_latched(True)
# mpu_set_int_latched(1)
# if defined MPU6050
tmp = [0x00] * 2
tmp[0] = self.BIT_LPA_CYCLE
if rate == 1:
tmp[1] = self.INV_LPA_1_25HZ
self.DLPF.set_frequency(5)
elif rate <= 5:
tmp[1] = self.INV_LPA_5HZ
self.DLPF.set_frequency(5) # mpu_set_lpf(5)
elif rate <= 20:
tmp[1] = self.INV_LPA_20HZ
self.DLPF.set_frequency(10) # mpu_set_lpf(10)
else:
tmp[1] = self.INV_LPA_40HZ
self.DLPF.set_frequency(20) # mpu_set_lpf(20);
tmp[1] = (tmp[1] << 6) | self.BIT_STBY_XYZG
self.i2c.write_word(self.PWR_MGMT_1, (tmp[0] << 8) | tmp[1])
# if (i2c_write(st.hw->addr, st.reg->pwr_mgmt_1, 2, tmp))
# elif defined MPU6500
# /* Set wake frequency. */
# if (rate == 1)
# tmp[0] = INV_LPA_1_25HZ;
# else if (rate == 2)
# tmp[0] = INV_LPA_2_5HZ;
# else if (rate <= 5)
# tmp[0] = INV_LPA_5HZ;
# else if (rate <= 10)
# tmp[0] = INV_LPA_10HZ;
# else if (rate <= 20)
# tmp[0] = INV_LPA_20HZ;
# else if (rate <= 40)
# tmp[0] = INV_LPA_40HZ;
# else if (rate <= 80)
# tmp[0] = INV_LPA_80HZ;
# else if (rate <= 160)
# tmp[0] = INV_LPA_160HZ;
# else if (rate <= 320)
# tmp[0] = INV_LPA_320HZ;
# else
# tmp[0] = INV_LPA_640HZ;
# if (i2c_write(st.hw->addr, st.reg->lp_accel_odr, 1, tmp))
# return -1;
# tmp[0] = BIT_LPA_CYCLE;
# if (i2c_write(st.hw->addr, st.reg->pwr_mgmt_1, 1, tmp))
# return -1;
# endif
self._sensors = self.INV_XYZ_ACCEL
self._clock_source = 0
self._low_power_mode = 1
self.DMP.fifo.configure(0)
return True
def set_sensors(self, sensors):
# /**
# * @brief Turn specific sensors on/off.
# * @e sensors can contain a combination of the following flags:
# * \n INV_X_GYRO, INV_Y_GYRO, INV_Z_GYRO
# * \n INV_XYZ_GYRO
# * \n INV_XYZ_ACCEL
# * \n INV_XYZ_COMPASS
# * @param[in] sensors Mask of sensors to wake.
# * @return 0 if successful.
# */
# unsigned char data;
# ifdef AK89xx_SECONDARY
# unsigned char user_ctrl;
# endif
if sensors & self.INV_XYZ_GYRO:
if DEBUG: print("Set PLL clock source")
data = 0 #self.CLK_SEL_PLL
elif sensors != 0:
data = 0
else:
data = self.BIT_SLEEP
self.i2c.write_byte(self.PWR_MGMT_1, data)
self._clock_source = data & ~self.BIT_SLEEP
data = 0
if not (sensors & self.INV_X_GYRO):
if DEBUG: print("Set Gyro X in standby")
data |= self.BIT_STBY_XG
if not (sensors & self.INV_Y_GYRO):
if DEBUG: print("Set Gyro Y in standby")
data |= self.BIT_STBY_YG
if not (sensors & self.INV_Z_GYRO):
if DEBUG: print("Set Gyro Z in standby")
data |= self.BIT_STBY_ZG
if not (sensors & self.INV_XYZ_ACCEL):
if DEBUG: print("Set Accel in standby")
data |= self.BIT_STBY_XYZA
if DEBUG: print("Sensor enable config: " + str(hex(data)))
self.i2c.write_byte(self.PWR_MGMT_2, data)
if sensors and (sensors != self.INV_XYZ_ACCEL):
# Latched interrupts only used in LP accel mode.
if DEBUG: print("Disable Latched interrupt")
self.set_int_latched(0) # mpu_set_int_latched(0)
# ifdef AK89xx_SECONDARY
# ifdef AK89xx_BYPASS
# if sensors & INV_XYZ_COMPASS:
# mpu_set_bypass(1)
# else:
# mpu_set_bypass(0)
# else
user_ctrl = self.i2c.read_byte(self.DMP.fifo.RA_USER_CTRL)
if DEBUG: print("User Control Register before: " + str(hex(user_ctrl)))
# Handle AKM power management.
# if sensors & self.INV_XYZ_COMPASS:
# data = self.AKM_SINGLE_MEASUREMENT
# user_ctrl |= self.DMP.BIT_AUX_IF_EN
# else:
# data = self.AKM_POWER_DOWN
# user_ctrl &= ~self.DMP.BIT_AUX_IF_EN
# self.i2c.write_byte(self.DMP.S1_D0, data)
if self.DMP.enabled:
user_ctrl |= self.DMP.BIT_DMP_EN
else:
user_ctrl &= ~self.DMP.BIT_DMP_EN
if DEBUG: print("User Control Register after: " + str(hex(user_ctrl)))
# Enable/disable I2C master mode.
self.i2c.write_byte(self.DMP.fifo.RA_USER_CTRL, user_ctrl)
# endif
# endif
self._sensors = sensors
self._low_power_mode = 0
time.sleep(0.05)
return True
def set_debug(self, enable):
global DEBUG
if enable:
DEBUG = True
else:
DEBUG = False
# Utilities
@staticmethod
def rolling_average(avg, new_val, n=300):
"""Calculate rolling average
:param avg: float -- current average
:param new_val: float -- value to add to the average
:param n: int -- number of historical values to use
:return: float -- new average
"""
avg -= float(avg) / float(n)
avg += float(new_val) / float(n)
return avg
def average(self, axes, n=300, raw=True):
if isinstance(axes, list):
avg = []
for axis in axes:
avg.append(axis[0].get_value(raw))
for i in range(0, n, 1):
val = []
for axis in axes:
val.append(axis[0].get_value(raw))
avg = [self.rolling_average(a, b, n=n) for a, b in zip(avg, val)]
return [a + axis[1] for a, axis in zip(avg, axes)]
else:
avg = axes.get_value(raw)
for i in range(0, n, 1):
avg = self.rolling_average(avg, axes.get_value(raw), n=n)
return avg
# MPU-6050 Methods
def enable_DMP(self):
self.DMP.init()
self.DMP.enable_feature(self.DMP.DMP_FEATURE_SEND_RAW_ACCEL | self.DMP.DMP_FEATURE_SEND_CAL_GYRO | self.DMP.DMP_FEATURE_GYRO_CAL | self.DMP.DMP_FEATURE_6X_LP_QUAT)
# | self.DMP.DMP_FEATURE_GYRO_CAL
def reset(self):
if DEBUG: print("Reseting MPU")
self.i2c.write_byte(self.PWR_MGMT_1, self.BIT_RESET)
time.sleep(0.1)
def calibrate(self, spec_a=10, spec_g=150):
# adjustment values: offset to add to the read_value to converge to 0
# it is 0 for all axes except for the z accelerometer which returns 1g when horizontal,
# corresponding to -16384 raw value
adjust = [0, 0, -16384, 0, 0, 0]
limits = [4095, 4095, 4095, 31, 31, 31]
# axes to calibrate (i.e. all)
axes = [
self.accelerometer.x,
self.accelerometer.y,
self.accelerometer.z,
self.gyro.x,
self.gyro.y,
self.gyro.z
]
def clamp(val, limit):
if val <= -1.0 * limit:
return -1.0 * limit
elif val >= limit:
return limit
else:
return val
def get_estimated_offsets():
"""The offset is pretty linearly related to the raw value read (except on the extremes),
We can get the slope of the 'value_read vs. offset' curve by looking at 2 offsets within a reasonable range
We picked 2/3 of the offset range here
For gyro the range is 6 bits (+/-5bits), for accel it is 15 bits but useable range is +/- 10 bits
2/3 of the range is ~ +/-2730 for accel and ~ +/-20 for gyro
set offsets"""
offsets = [2048, 2048, 2048, 16, 16, 16]
for i, axis in enumerate(axes):
axis.set_offset(-1 * offsets[i])
time.sleep(0.2)
n = 500
rolling_avg_minus = self.average([(axis, adj) for axis, adj in zip(axes, adjust)], n=n, raw=True)
if DEBUG: print("Low Offset: " + str(rolling_avg_minus))
for i, axis in enumerate(axes):
axis.set_offset(offsets[i])
time.sleep(0.2)
rolling_avg_plus = self.average([(axis, adj) for axis, adj in zip(axes, adjust)], n=n, raw=True)
if DEBUG: print("High Offset: " + str(rolling_avg_plus))
delta_avg = [a - b for a, b in zip(rolling_avg_plus, rolling_avg_minus)]
if DEBUG: print("Delta Value: " + str(delta_avg))
delta_offset = [2 * a for a in offsets] # offset - (- offset) = 2 * offset
if DEBUG: print("Delta Offset: " + str(delta_offset))
slope = [float(a) / float(b) for a, b in zip(delta_offset, delta_avg)]
if DEBUG: print("Slope:" + str(slope))
# input_offset = slope * read_value + offset_to_find => offset_to_find = input_offset - read_value * slope
for i, axis in enumerate(axes):
axis.set_offset(0)
time.sleep(0.2)
rolling_avg_zero = self.average([(axis, adj) for axis, adj in zip(axes, adjust)], n=n, raw=True)
if DEBUG: print("Zero Offset: " + str(rolling_avg_zero))
# return [int(float(a) - (float(b) * float(c))) for a, b, c in zip(offsets, slope, rolling_avg_plus)]
return [clamp(-1.0 * (float(a) * float(b)), l) for a, b, l in zip(slope, rolling_avg_zero, limits)], slope
if DEBUG: print("Calibrating with Accelerometer raw precision < " + str(spec_a) + " and Gyro raw precision < " + str(spec_g))
specs = [spec_a] * 3 + [spec_g] * 3 # array of the specs
# Observed values: accel data at zero offset is ~10x the offset / # gyro factor is related to scale at +/-250deg/s
factors = [10.0] * 3 + [131] * 3
offsets = [axis.offset for axis in axes]
if DEBUG: print("Original offsets:" + str(offsets))
offsets, slope = get_estimated_offsets()
if DEBUG: print("Estimated offsets:" + str(offsets))
for i, axis in enumerate(axes):
axis.set_offset(int(offsets[i]))
time.sleep(0.5)
n = 1000
rolling_avg = self.average([(axis, adj) for axis, adj in zip(axes, adjust)], n=n, raw=True)
if DEBUG: print("avg data:" + str(rolling_avg))
# data = [
# self.accelerometer.x.get_value(raw=True),
# self.accelerometer.y.get_value(raw=True),
# self.accelerometer.z.get_value(raw=True) - 16384,
# self.gyro.x.get_value(raw=True),
# self.gyro.y.get_value(raw=True),
# self.gyro.z.get_value(raw=True)
# ]
# rolling_avg = data
n = 1000
while True:
rolling_avg = self.average([(axis, adj) for axis, adj in zip(axes, adjust)], n=n, raw=True)
# count = 300
# for i in range(0, n, 1):
# data = [
# self.accelerometer.x.get_value(raw=True),
# self.accelerometer.y.get_value(raw=True),
# self.accelerometer.z.get_value(raw=True) - 16384,
# self.gyro.x.get_value(raw=True),
# self.gyro.y.get_value(raw=True),
# self.gyro.z.get_value(raw=True)
# ]
#
# rolling_avg = [self.rolling_average(avg, new_value) for avg, new_value in zip(rolling_avg, data)]
# # time.sleep(0.01)
# # count -= 1
# check if we meet the specs
calibrated = all([abs(val) < spec for val, spec in zip(rolling_avg, specs)])
if calibrated:
break
else:
if DEBUG: print("avg data:" + str(rolling_avg))
offsets = [clamp(float(offset) - (0.66 * float(avg) * float(s)), l) for avg, offset, s, l in
zip(rolling_avg, offsets, slope, limits)]
if DEBUG: print("offsets: " + str(offsets))
if DEBUG: print("")
# set offsets
for i, axis in enumerate(axes):
axis.set_offset(int(offsets[i]))
time.sleep(0.2)
# self.accelerometer.x.set_offset(offsets[0])
# self.accelerometer.y.set_offset(offsets[1])
# self.accelerometer.z.set_offset(offsets[2])
# self.gyro.x.set_offset(offsets[3])
# self.gyro.y.set_offset(offsets[4])
# self.gyro.z.set_offset(offsets[5])
def self_test(self):
def check(axis):
ft = axis.get_factory_trim_value()
state = axis.set_self_test_mode()
st_enabled = axis.get_value(raw=True)
axis.set_self_test_mode(state)
st_disabled = axis.get_value(raw=True)
st_response = st_enabled - st_disabled
change = 1 + (st_response - ft) / ft # formula in the doc seems wrong: it doesn't give change in % but as a factor
if abs(change) < 0.14:
if DEBUG: print(axis.name + " Self Test: Passed " + str(change))
return True
else:
if DEBUG: print(axis.name + " Self Test: Failed " + str(change))
if DEBUG: print("ST enabled: " + str(st_enabled))
if DEBUG: print("ST disabled: " + str(st_disabled))
if DEBUG: print("Factory trim: " + str(ft))
return False
return [
check(self.gyro.x),
check(self.gyro.y),
check(self.gyro.z),
check(self.accelerometer.x),
check(self.accelerometer.y),
check(self.accelerometer.z)
]
def _characterize_axis(self, axis, offset_range):
avg = axis.get_value(raw=True)
f = open(axis.name.replace(" ", "_") + ".csv", 'w')
for offset in offset_range:
axis.set_offset(offset)
n = 5
for i in range(0, n, 1):
val = axis.get_value(raw=True)
avg = self.rolling_average(avg, val, n)
f.write(str(",".join([str(offset), str(avg)])) + "\r\n")
if DEBUG: print(str(",".join([str(offset), str(avg)])))
f.close()
def characterize(self):
self._characterize_axis(self.accelerometer.x, range(-4096, 4096, 16))
self._characterize_axis(self.accelerometer.y, range(-4096, 4096, 16))
self._characterize_axis(self.accelerometer.z, range(-4096, 4096, 16))
self._characterize_axis(self.gyro.x, range(-32, 32, 1))
self._characterize_axis(self.gyro.y, range(-32, 32, 1))
self._characterize_axis(self.gyro.z, range(-32, 32, 1))
def run(self):
"""
:return:
"""
data = {"accelerometer": self.accelerometer.values, "gyro": self.gyro.values, "temperature": self.temperature.value}
print("data " + json.dumps(data, indent=4, sort_keys=True))
def run_DMP(self):
while True:
gyro, accel, quaternion, timestamp, sensors, more = self.DMP.get_data(raw=False)
print(json.dumps(
{"gyro_dmp": {"x": gyro[0],
"y": gyro[1],
"z": gyro[2]
},
"gyro": self.gyro.values,
"accel_dmp": {"x": accel[0],
"y": accel[1],
"z": accel[2]},
"accel": self.accelerometer.values,
"quat": {"w": quaternion[1],
"x": quaternion[2],
"y": quaternion[3],
"z": quaternion[0]
}
}, indent=4, sort_keys=True))
x, y, z, w = quaternion
q = Quaternion(w,x,y,z)
v = Vector(0,0,1)
u = v.get_rotated(q)
print("rotated world vector: " + str(u))
time.sleep(0.005) # need to run faster than the FIFO
def run_loop(self):
while True:
self.run()
time.sleep(0.1)
# MPU init
def init(self, enable_dmp=True):
'''
/**
* @brief Initialize hardware.
* Initial configuration:\n
* Gyro FSR: +/- 2000DPS\n
* Accel FSR +/- 2G\n
* DLPF: 42Hz\n
* FIFO rate: 50Hz\n
* Clock source: Gyro PLL\n
* FIFO: Disabled.\n
* Data ready interrupt: Disabled, active low, unlatched.
* @param[in] int_param Platform-specific parameters to interrupt API.
* @return 0 if successful.
*/
'''
data = [0x00] * 6
# Reset device.
self.reset()
# Wake up chip.
self.set_sleep_mode(False)
# self.i2c.write_byte(self.PWR_MGMT_1, 0x00)
self._accel_half = 0
# #ifdef MPU6500
# /* MPU6500 shares 4kB of memory between the DMP and the FIFO. Since the
# * first 3kB are needed by the DMP, we'll use the last 1kB for the FIFO.
# */
# data[0] = BIT_FIFO_SIZE_1024 | 0x8;
# if (i2c_write(st.hw->addr, st.reg->accel_cfg2, 1, data))
# return -1;
# #endif
# Set to invalid values to ensure no I2C writes are skipped.
# self._sensors = 0xFF
# # self.gyro.set_range(0xFF)
# # self.accelerometer.set_range(0xFF)
# # self.DLPF.set(0xFF)
# self._sample_rate = 0xFFFF
# self.DMP.fifo.set_fifo_enable_mask(0xFF)
# self._bypass_mode = 0xFF
# # ifdef AK89xx_SECONDARY
# # st.chip_cfg.compass_sample_rate = 0xFFFF
# # endif
# # mpu_set_sensors always preserves this setting.
# self._clock_source = self.CLK_SEL_PLL
# Handled in next call to mpu_set_bypass.
self._active_low_int = 1
self._latched_int = 0
self._int_motion_only = 0
self._low_power_mode = 0
# memset(&st.chip_cfg.cache, 0, sizeof(st.chip_cfg.cache))
# following is set in their own class init
# self.DMP.enabled = False
# self.DMP.loaded = 0
# self.DMP.sample_rate = 0
self.gyro.set_range(self.gyro.GYRO_RANGE_2000DEG)
self.accelerometer.set_range(self.accelerometer.ACCEL_RANGE_2G)
self.DLPF.set(self.DLPF.DLPF_CFG_5)
self.DMP.set_state(False)
self.set_sample_rate(50) # 50
self.DMP.fifo.configure(0)
# ifndef EMPL_TARGET_STM32F4
# if (int_param)
# reg_int_cb(int_param)
# endif
# ifdef AK89xx_SECONDARY
# setup_compass();
# if (mpu_set_compass_sample_rate(10))
# return -1;
# else
# Already disabled by setup_compass.
self.set_bypass_mode(False)
# if (mpu_set_bypass(0))
# return -1;
# endif
self.set_sensors(0)
# if enable_dmp:
# self.enable_DMP()
return True
def enable_sensors(self):
self.set_sensors(self.INV_XYZ_ACCEL | self.INV_XYZ_GYRO)
self.DMP.fifo.configure(self.INV_XYZ_ACCEL | self.INV_XYZ_GYRO)
if __name__ == "__main__":
mpu = MPU6050(bus=2, address=0x68)
# mpu.calibrate()
# mpu.self_test()
mpu.DLPF.set(mpu.DLPF.DLPF_CFG_5)
print("Gyro Offsets: " + str(mpu.gyro.offsets))
print("Accelerometer Offsets: " + str(mpu.accelerometer.offsets))
mpu.run_loop()
| streamnsight/mpu6050 | mpu6050/mpu6050.py | Python | mit | 147,325 |
from L500analysis.data_io.get_cluster_data import GetClusterData
from L500analysis.utils.utils import aexp2redshift
from L500analysis.plotting.tools.figure_formatting import *
from L500analysis.plotting.profiles.tools.profiles_percentile \
import *
from L500analysis.utils.constants import rbins, linear_rbins
from derived_field_functions import *
color = matplotlib.cm.afmhot_r
matplotlib.rcParams['legend.handlelength'] = 0
matplotlib.rcParams['legend.numpoints'] = 1
matplotlib.rcParams['legend.fontsize'] = 12
aexps = [1.0,0.9,0.8,0.7,0.6,0.5,0.45,0.4,0.35]
db_name = 'L500_NR_0'
db_dir = '/home/babyostrich/Documents/Repos/L500analysis/'
profiles_list = ['T_mw', 'r_mid',
'vel_gas_rad_avg',
'Tmw_cm_per_s_2_r500c',
'Vr2_cm_per_s_2_r500c',
'R/R500c']
halo_properties_list=['r500c','M_total_500c']
Tmw_Vr2_ratio=r"$\Xi=T_{mw}/V^2_{r}$"
fXz1=r"$\Xi/\Xi(z=1)$"
pa = PlotAxes(figname='Tmw_Vr2_ratio_500c',
axes=[[0.15,0.4,0.80,0.55],[0.15,0.15,0.80,0.24]],
axes_labels=[Tmw_Vr2_ratio,fXz1],
xlabel=r"$R/R_{500c}$",
ylog=[True,False],
xlim=(0.2,5),
ylims=[(1e-1,1e2),(0.4,1.6)])
TratioV2={}
plots=[TratioV2]
clkeys=['Tmw_Vr2_ratio_500c']
for aexp in aexps :
cldata = GetClusterData(aexp=aexp,db_name=db_name,
db_dir=db_dir,
profiles_list=profiles_list,
halo_properties_list=halo_properties_list)
Tmw = calculate_profiles_mean_variance(cldata['Tmw_cm_per_s_2_r500c'])
Vr2 = calculate_profiles_mean_variance(cldata['Vr2_cm_per_s_2_r500c'])
TratioV2[aexp] = get_profiles_division_mean_variance(
mean_profile1=Tmw['mean'], var_profile1=Tmw['var'],
mean_profile2=Vr2['mean'], var_profile2=Vr2['var'])
pa.axes[Tmw_Vr2_ratio].plot( rbins, TratioV2[aexp]['mean'],
color=color(aexp),ls='-',
label="$z=%3.1f$" % aexp2redshift(aexp))
for aexp in aexps :
fractional_evolution = get_profiles_division_mean_variance(
mean_profile1=TratioV2[aexp]['mean'],
var_profile1=TratioV2[aexp]['var'],
mean_profile2=TratioV2[0.5]['mean'],
var_profile2=TratioV2[0.5]['var'],
)
pa.axes[fXz1].plot( rbins, fractional_evolution['mean'],
color=color(aexp),ls='-')
pa.axes[Tmw_Vr2_ratio].tick_params(labelsize=12)
pa.axes[Tmw_Vr2_ratio].tick_params(labelsize=12)
pa.axes[fXz1].set_yticks(arange(0.6,1.4,0.2))
pa.set_legend(axes_label=Tmw_Vr2_ratio,ncol=3,loc='best', frameon=False)
pa.color_legend_texts(axes_label=Tmw_Vr2_ratio)
pa.savefig()
| cavestruz/L500analysis | plotting/profiles/T_Vr_evolution/Tmw_Vr_evolution/plot_Tmw_Vr_r500c.py | Python | mit | 2,821 |
# -*- coding: utf-8 -*-
# Copyright (c) 2009 - 2015 Detlev Offenbach <[email protected]>
#
"""
Module implementing a network access manager proxy for web pages.
"""
from __future__ import unicode_literals
from PyQt5.QtNetwork import QNetworkAccessManager, QNetworkRequest
try:
from PyQt5.QtNetwork import QSslError # __IGNORE_EXCEPTION__ __IGNORE_WARNING__
SSL_AVAILABLE = True
except ImportError:
SSL_AVAILABLE = False
class NetworkAccessManagerProxy(QNetworkAccessManager):
"""
Class implementing a network access manager proxy for web pages.
"""
primaryManager = None
def __init__(self, parent=None):
"""
Constructor
@param parent reference to the parent object (QObject)
"""
super(NetworkAccessManagerProxy, self).__init__(parent)
self.__webPage = None
def setWebPage(self, page):
"""
Public method to set the reference to a web page.
@param page reference to the web page object (HelpWebPage)
"""
assert page is not None
self.__webPage = page
def setPrimaryNetworkAccessManager(self, manager):
"""
Public method to set the primary network access manager.
@param manager reference to the network access manager object
(QNetworkAccessManager)
"""
assert manager is not None
if self.__class__.primaryManager is None:
self.__class__.primaryManager = manager
self.setCookieJar(self.__class__.primaryManager.cookieJar())
# do not steal ownership
self.cookieJar().setParent(self.__class__.primaryManager)
if SSL_AVAILABLE:
self.sslErrors.connect(self.__class__.primaryManager.sslErrors)
self.proxyAuthenticationRequired.connect(
self.__class__.primaryManager.proxyAuthenticationRequired)
self.authenticationRequired.connect(
self.__class__.primaryManager.authenticationRequired)
self.finished.connect(self.__class__.primaryManager.finished)
def createRequest(self, op, request, outgoingData=None):
"""
Public method to create a request.
@param op the operation to be performed
(QNetworkAccessManager.Operation)
@param request reference to the request object (QNetworkRequest)
@param outgoingData reference to an IODevice containing data to be sent
(QIODevice)
@return reference to the created reply object (QNetworkReply)
"""
if self.primaryManager is not None:
pageRequest = QNetworkRequest(request)
if self.__webPage is not None:
self.__webPage.populateNetworkRequest(pageRequest)
return self.primaryManager.createRequest(
op, pageRequest, outgoingData)
else:
return QNetworkAccessManager.createRequest(
self, op, request, outgoingData)
| testmana2/test | Helpviewer/Network/NetworkAccessManagerProxy.py | Python | gpl-3.0 | 3,013 |
#!/usr/bin/env python
from setuptools import setup, find_packages
__version__ = "1.4"
setup(
name="awscodedeploy",
version=__version__,
description="CLI for Code Deploy supporting docker-compose",
author="Location Labs",
author_email="[email protected]",
url="http://locationlabs.com",
packages=find_packages(exclude=["*.tests"]),
setup_requires=[
"nose>=1.3.7"
],
install_requires=[
"awscli>=1.9.5",
"awsenv>=1.7",
"PyYAML>=3.11",
"termcolor>=1.1.0",
],
tests_require=[
"PyHamcrest>=1.8.5",
"mock>=1.0.1",
"coverage>=4.0.1",
],
test_suite="awscodedeploy.tests",
entry_points={
"console_scripts": [
"aws-code-deploy = awscodedeploy.main:main",
]
}
)
| locationlabs/aws-code-deploy | setup.py | Python | apache-2.0 | 813 |
# Given an array S of n integers, are there elements a, b, c in S such that a + b + c = 0?
# Find all unique triplets in the array which gives the sum of zero.
# Elements in a triplet (a,b,c) must be in non-descending order. (ie, a <= b <= c)
# The solution set must not contain duplicate triplets.
# Solution:
# This problem is the extension of the problem below:
# Given a set S of n integers, find all pairs of integers of a and b in S
# such that a + b = k?
# The above problem can be solved in O(n) time, assuming that the set S is already sorted. By
# using two index first and last, each pointing to the first and last element, we look at the
# element pointed by first, which we call A. We know that we need to find B = k - A, the
# complement of A. If the element pointed by last is less than B, we know that the choice is
# to increment pointer first by one step. Similarly, if the element pointed by last is greater
# than B, we decrement pointer last by one step. We are progressively refining the sum step by
# step. Since each step we move a pointer one step, there are at most n steps, which gives the
# complexity of O(n).
# By incorporating the solution above, we can solve the 3sum problem in O(n^2) time, which is
# a straight forward extension.
'''
Created on 2013-5-19
@author: Yubin Bai
'''
class Solution:
# @return an integer
def threeSumClosest(self, num, target):
num.sort()
size = len(num)
result = [1 << 33, -1, -1, -1] # a large number
for first in range(size - 2):
left = first + 1
right = size - 1
while left < right:
curr = num[first] + num[left] + num[right]
distance = abs(curr - target)
if distance < result[0]:
result = [distance, num[first], num[left], num[right]]
if curr < target:
left += 1
else:
right -= 1
return result[1] + result[2] + result[3]
# if __name__ == '__main__':
# data = [0,0,0]
# target = 1
# s = Solution()
# print(s.threeSumClosest(data, target))
| asraf209/leetcode | src/3SumClosest/main.py | Python | gpl-3.0 | 2,156 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('printy', '0001_initial'),
]
operations = [
migrations.CreateModel(
name='PostItModel',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('width', models.FloatField()),
('height', models.FloatField()),
],
),
migrations.AlterField(
model_name='postit',
name='print_page',
field=models.ForeignKey(related_name='posts', to='printy.PrintPage'),
),
migrations.AddField(
model_name='printpage',
name='post_it_model',
field=models.ForeignKey(default=1, to='printy.PostItModel'),
preserve_default=False,
),
]
| jdsolucoes/Ppostit | printy/migrations/0002_auto_20150921_2215.py | Python | apache-2.0 | 967 |
import AnnounceSharePlugin
| OliverCole/ZeroNet | plugins/AnnounceShare/__init__.py | Python | gpl-2.0 | 27 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
# HORTON: Helpful Open-source Research TOol for N-fermion systems.
# Copyright (C) 2011-2017 The HORTON Development Team
#
# This file is part of HORTON.
#
# HORTON is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 3
# of the License, or (at your option) any later version.
#
# HORTON is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, see <http://www.gnu.org/licenses/>
#
# --
from nose.tools import assert_raises
from horton import * # pylint: disable=wildcard-import,unused-wildcard-import
def test_becke_n2_hfs_sto3g():
fn_fchk = context.get_fn('test/n2_hfs_sto3g.fchk')
mol = IOData.from_file(fn_fchk)
rtf = ExpRTransform(1e-3, 1e1, 100)
rgrid = RadialGrid(rtf)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens)
bp.do_populations()
assert abs(bp['populations'] - 7).max() < 1e-4
bp.do_charges()
assert abs(bp['charges']).max() < 1e-4
bp.clear()
with assert_raises(KeyError):
bp['charges']
bp.do_charges()
assert abs(bp['populations'] - 7).max() < 1e-4
assert abs(bp['charges']).max() < 1e-4
def test_becke_nonlocal_lih_hf_321g():
fn_fchk = context.get_fn('test/li_h_3-21G_hf_g09.fchk')
mol = IOData.from_file(fn_fchk)
rtf = ExpRTransform(1e-3, 1e1, 100)
rgrid = RadialGrid(rtf)
dm_full = mol.get_dm_full()
grid1 = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='only')
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid1.points)
bp1 = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid1, moldens)
grid2 = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, (rgrid, 110), random_rotate=False, mode='discard')
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid2.points)
bp2 = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid2, moldens, local=False)
bp1.do_charges()
bp2.do_charges()
assert abs(bp1['charges'] - bp2['charges']).max() < 5e-4
def check_becke_azirine(key, expected):
fn_fchk = context.get_fn('test/2h-azirine-%s.fchk' % key)
mol = IOData.from_file(fn_fchk)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens)
bp.do_charges()
c = bp['charges']
assert abs(c[0] - expected[0]) < 1e-3
assert abs(c[2] - expected[1]) < 1e-3
assert abs(c[5] - expected[2]) < 1e-3
def test_becke_azirine_ccd():
check_becke_azirine('cc', [-0.0656538087277, -0.0770555290299, 0.123503410725])
def test_becke_azirine_cis():
check_becke_azirine('ci', [-0.122893896731, -0.266685240737, 0.137147967309])
def test_becke_azirine_mp2():
check_becke_azirine('mp2', [-0.0656579068849, -0.0761190062373, 0.126890127581])
def test_becke_azirine_mp3():
check_becke_azirine('mp3', [-0.0665919182085, -0.0769654765789, 0.125587673579])
def test_becke_ch3_hf_sto3g():
fn_fchk = context.get_fn('test/ch3_hf_sto3g.fchk')
mol = IOData.from_file(fn_fchk)
grid = BeckeMolGrid(mol.coordinates, mol.numbers, mol.pseudo_numbers, random_rotate=False, mode='only')
dm_full = mol.get_dm_full()
dm_spin = mol.get_dm_spin()
moldens = mol.obasis.compute_grid_density_dm(dm_full, grid.points)
spindens = mol.obasis.compute_grid_density_dm(dm_spin, grid.points)
bp = BeckeWPart(mol.coordinates, mol.numbers, mol.pseudo_numbers, grid, moldens, spindens)
bp.do_all()
sc = bp['spin_charges']
assert abs(sc - [1.08458698, -0.02813376, -0.02813376, -0.02815979]).max() < 1e-3
| FarnazH/horton | horton/part/test/test_becke.py | Python | gpl-3.0 | 4,428 |
#!/usr/bin/env python
#coding=utf-8
from django.shortcuts import render_to_response
def index(request):
data = {'request':request}
return render_to_response('index.html', data)
| JuanbingTeam/djangobbs | djangobbs/index.py | Python | apache-2.0 | 197 |
#!/usr/bin/env python
"""
This tests the ability to render GPU resident data in VTK.
"""
import sys
import vtk
from vtk.test import Testing
from PistonTestCommon import *
class TestRendering(Testing.vtkTest):
def testRendering(self):
global args
renderer = vtk.vtkRenderer()
renwin = vtk.vtkRenderWindow()
renwin.AddRenderer(renderer)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renwin)
renwin.Render()
if "GPURender" in args:
print "Testing GPU direct render path"
vtk.vtkPistonMapper.InitCudaGL(renwin)
else:
print "Testing CPU indirect render path"
src = vtk.vtkSphereSource()
d2p = vtk.vtkDataSetToPiston()
d2p.SetInputConnection(src.GetOutputPort())
mapper = vtk.vtkPistonMapper()
mapper.SetInputConnection(d2p.GetOutputPort())
mapper.Update() #TODO: shouldn't need this
actor = vtk.vtkActor()
actor.SetMapper(mapper)
renderer.AddActor(actor)
renderer.ResetCamera()
renwin.Render()
img_file = "TestRendering.png"
Testing.compareImage(renwin, Testing.getAbsImagePath(img_file))
if Testing.isInteractive():
iren.Start()
if __name__ == "__main__":
global args
args = parseArgs()
Testing.main([(TestRendering, 'test')])
| HopeFOAM/HopeFOAM | ThirdParty-0.1/ParaView-5.0.1/VTK/Accelerators/Piston/Testing/Python/TestRendering.py | Python | gpl-3.0 | 1,290 |
import _surface
import chimera
try:
import chimera.runCommand
except:
pass
from VolumePath import markerset as ms
try:
from VolumePath import Marker_Set, Link
new_marker_set=Marker_Set
except:
from VolumePath import volume_path_dialog
d= volume_path_dialog(True)
new_marker_set= d.new_marker_set
marker_sets={}
surf_sets={}
if "particle_0 geometry" not in marker_sets:
s=new_marker_set('particle_0 geometry')
marker_sets["particle_0 geometry"]=s
s= marker_sets["particle_0 geometry"]
mark=s.place_marker((11373, 1728.13, 2526.72), (0.7, 0.7, 0.7), 890.203)
if "particle_1 geometry" not in marker_sets:
s=new_marker_set('particle_1 geometry')
marker_sets["particle_1 geometry"]=s
s= marker_sets["particle_1 geometry"]
mark=s.place_marker((10259.4, 2429.11, 3723.81), (0.7, 0.7, 0.7), 792.956)
if "particle_2 geometry" not in marker_sets:
s=new_marker_set('particle_2 geometry')
marker_sets["particle_2 geometry"]=s
s= marker_sets["particle_2 geometry"]
mark=s.place_marker((8532.69, 2758.2, 2960.41), (0.7, 0.7, 0.7), 856.786)
if "particle_3 geometry" not in marker_sets:
s=new_marker_set('particle_3 geometry')
marker_sets["particle_3 geometry"]=s
s= marker_sets["particle_3 geometry"]
mark=s.place_marker((9825.94, 1135.07, 1915.57), (0.7, 0.7, 0.7), 963.679)
if "particle_4 geometry" not in marker_sets:
s=new_marker_set('particle_4 geometry')
marker_sets["particle_4 geometry"]=s
s= marker_sets["particle_4 geometry"]
mark=s.place_marker((8864.47, 1070.22, 618.553), (0.7, 0.7, 0.7), 761.442)
if "particle_5 geometry" not in marker_sets:
s=new_marker_set('particle_5 geometry')
marker_sets["particle_5 geometry"]=s
s= marker_sets["particle_5 geometry"]
mark=s.place_marker((6980.33, 2671, 607.184), (0.7, 0.7, 0.7), 961.183)
if "particle_6 geometry" not in marker_sets:
s=new_marker_set('particle_6 geometry')
marker_sets["particle_6 geometry"]=s
s= marker_sets["particle_6 geometry"]
mark=s.place_marker((6195.69, 4157.38, 303.329), (0.7, 0.7, 0.7), 753.151)
if "particle_7 geometry" not in marker_sets:
s=new_marker_set('particle_7 geometry')
marker_sets["particle_7 geometry"]=s
s= marker_sets["particle_7 geometry"]
mark=s.place_marker((6609.67, 3545.04, -210.293), (1, 0.7, 0), 1098.07)
if "particle_8 geometry" not in marker_sets:
s=new_marker_set('particle_8 geometry')
marker_sets["particle_8 geometry"]=s
s= marker_sets["particle_8 geometry"]
mark=s.place_marker((5721.24, 5834.87, 871.259), (0.7, 0.7, 0.7), 1010.42)
if "particle_9 geometry" not in marker_sets:
s=new_marker_set('particle_9 geometry')
marker_sets["particle_9 geometry"]=s
s= marker_sets["particle_9 geometry"]
mark=s.place_marker((6201.03, 7254.05, 117.531), (1, 0.7, 0), 821.043)
if "particle_10 geometry" not in marker_sets:
s=new_marker_set('particle_10 geometry')
marker_sets["particle_10 geometry"]=s
s= marker_sets["particle_10 geometry"]
mark=s.place_marker((5645.43, 8268.96, 1609.79), (0.7, 0.7, 0.7), 873.876)
if "particle_11 geometry" not in marker_sets:
s=new_marker_set('particle_11 geometry')
marker_sets["particle_11 geometry"]=s
s= marker_sets["particle_11 geometry"]
mark=s.place_marker((6394.9, 7902.04, 2465.86), (0.7, 0.7, 0.7), 625.532)
if "particle_12 geometry" not in marker_sets:
s=new_marker_set('particle_12 geometry')
marker_sets["particle_12 geometry"]=s
s= marker_sets["particle_12 geometry"]
mark=s.place_marker((7011.82, 8106.65, 3867.87), (0.7, 0.7, 0.7), 880.474)
if "particle_13 geometry" not in marker_sets:
s=new_marker_set('particle_13 geometry')
marker_sets["particle_13 geometry"]=s
s= marker_sets["particle_13 geometry"]
mark=s.place_marker((8277.34, 7554.86, 3180), (0.7, 0.7, 0.7), 659.161)
if "particle_14 geometry" not in marker_sets:
s=new_marker_set('particle_14 geometry')
marker_sets["particle_14 geometry"]=s
s= marker_sets["particle_14 geometry"]
mark=s.place_marker((10001.2, 8459.06, 4337.94), (0.7, 0.7, 0.7), 831.745)
if "particle_15 geometry" not in marker_sets:
s=new_marker_set('particle_15 geometry')
marker_sets["particle_15 geometry"]=s
s= marker_sets["particle_15 geometry"]
mark=s.place_marker((11030.8, 7840.88, 7191.3), (0.7, 0.7, 0.7), 803.065)
if "particle_16 geometry" not in marker_sets:
s=new_marker_set('particle_16 geometry')
marker_sets["particle_16 geometry"]=s
s= marker_sets["particle_16 geometry"]
mark=s.place_marker((9790.37, 6419.19, 7692.39), (0.7, 0.7, 0.7), 610.262)
if "particle_17 geometry" not in marker_sets:
s=new_marker_set('particle_17 geometry')
marker_sets["particle_17 geometry"]=s
s= marker_sets["particle_17 geometry"]
mark=s.place_marker((8981.52, 7521.64, 7751.38), (0.7, 0.7, 0.7), 741.265)
if "particle_18 geometry" not in marker_sets:
s=new_marker_set('particle_18 geometry')
marker_sets["particle_18 geometry"]=s
s= marker_sets["particle_18 geometry"]
mark=s.place_marker((8220.56, 7937.02, 6324.34), (0.7, 0.7, 0.7), 748.625)
if "particle_19 geometry" not in marker_sets:
s=new_marker_set('particle_19 geometry')
marker_sets["particle_19 geometry"]=s
s= marker_sets["particle_19 geometry"]
mark=s.place_marker((7969.62, 9244.24, 5681.34), (0.7, 0.7, 0.7), 677.181)
if "particle_20 geometry" not in marker_sets:
s=new_marker_set('particle_20 geometry')
marker_sets["particle_20 geometry"]=s
s= marker_sets["particle_20 geometry"]
mark=s.place_marker((6422.64, 7890.12, 4392.43), (0.7, 0.7, 0.7), 616.015)
if "particle_21 geometry" not in marker_sets:
s=new_marker_set('particle_21 geometry')
marker_sets["particle_21 geometry"]=s
s= marker_sets["particle_21 geometry"]
mark=s.place_marker((7520.07, 8550.61, 5993.54), (0.7, 0.7, 0.7), 653.154)
if "particle_22 geometry" not in marker_sets:
s=new_marker_set('particle_22 geometry')
marker_sets["particle_22 geometry"]=s
s= marker_sets["particle_22 geometry"]
mark=s.place_marker((7047.03, 8185.55, 6463.58), (0.7, 0.7, 0.7), 595.33)
if "particle_23 geometry" not in marker_sets:
s=new_marker_set('particle_23 geometry')
marker_sets["particle_23 geometry"]=s
s= marker_sets["particle_23 geometry"]
mark=s.place_marker((7378.79, 7775.83, 7641.96), (0.7, 0.7, 0.7), 627.901)
if "particle_24 geometry" not in marker_sets:
s=new_marker_set('particle_24 geometry')
marker_sets["particle_24 geometry"]=s
s= marker_sets["particle_24 geometry"]
mark=s.place_marker((8530.81, 7008.83, 7696.77), (0.7, 0.7, 0.7), 663.941)
if "particle_25 geometry" not in marker_sets:
s=new_marker_set('particle_25 geometry')
marker_sets["particle_25 geometry"]=s
s= marker_sets["particle_25 geometry"]
mark=s.place_marker((9913.91, 6961.43, 8384.84), (0.7, 0.7, 0.7), 663.899)
if "particle_26 geometry" not in marker_sets:
s=new_marker_set('particle_26 geometry')
marker_sets["particle_26 geometry"]=s
s= marker_sets["particle_26 geometry"]
mark=s.place_marker((8860.24, 7431.45, 7335.88), (0.7, 0.7, 0.7), 644.694)
if "particle_27 geometry" not in marker_sets:
s=new_marker_set('particle_27 geometry')
marker_sets["particle_27 geometry"]=s
s= marker_sets["particle_27 geometry"]
mark=s.place_marker((8090.35, 6968.45, 5332.62), (0.7, 0.7, 0.7), 896.802)
if "particle_28 geometry" not in marker_sets:
s=new_marker_set('particle_28 geometry')
marker_sets["particle_28 geometry"]=s
s= marker_sets["particle_28 geometry"]
mark=s.place_marker((6851.21, 7706.77, 5771.66), (0.7, 0.7, 0.7), 576.38)
if "particle_29 geometry" not in marker_sets:
s=new_marker_set('particle_29 geometry')
marker_sets["particle_29 geometry"]=s
s= marker_sets["particle_29 geometry"]
mark=s.place_marker((5952.06, 8094.26, 4905.62), (0.7, 0.7, 0.7), 635.092)
if "particle_30 geometry" not in marker_sets:
s=new_marker_set('particle_30 geometry')
marker_sets["particle_30 geometry"]=s
s= marker_sets["particle_30 geometry"]
mark=s.place_marker((5799.93, 7527.38, 5476.16), (0.7, 0.7, 0.7), 651.505)
if "particle_31 geometry" not in marker_sets:
s=new_marker_set('particle_31 geometry')
marker_sets["particle_31 geometry"]=s
s= marker_sets["particle_31 geometry"]
mark=s.place_marker((6126, 7861.56, 3801.22), (0.7, 0.7, 0.7), 718.042)
if "particle_32 geometry" not in marker_sets:
s=new_marker_set('particle_32 geometry')
marker_sets["particle_32 geometry"]=s
s= marker_sets["particle_32 geometry"]
mark=s.place_marker((5802.45, 8900.88, 5188.73), (0.7, 0.7, 0.7), 726.714)
if "particle_33 geometry" not in marker_sets:
s=new_marker_set('particle_33 geometry')
marker_sets["particle_33 geometry"]=s
s= marker_sets["particle_33 geometry"]
mark=s.place_marker((6338.03, 7969.54, 6239.43), (0.7, 0.7, 0.7), 673.585)
if "particle_34 geometry" not in marker_sets:
s=new_marker_set('particle_34 geometry')
marker_sets["particle_34 geometry"]=s
s= marker_sets["particle_34 geometry"]
mark=s.place_marker((7091.35, 8316.54, 6768.38), (0.7, 0.7, 0.7), 598.418)
if "particle_35 geometry" not in marker_sets:
s=new_marker_set('particle_35 geometry')
marker_sets["particle_35 geometry"]=s
s= marker_sets["particle_35 geometry"]
mark=s.place_marker((8333.31, 8746.99, 7453.63), (0.7, 0.7, 0.7), 693.382)
if "particle_36 geometry" not in marker_sets:
s=new_marker_set('particle_36 geometry')
marker_sets["particle_36 geometry"]=s
s= marker_sets["particle_36 geometry"]
mark=s.place_marker((6988.8, 7884.05, 5598.72), (0.7, 0.7, 0.7), 804.038)
if "particle_37 geometry" not in marker_sets:
s=new_marker_set('particle_37 geometry')
marker_sets["particle_37 geometry"]=s
s= marker_sets["particle_37 geometry"]
mark=s.place_marker((7476.25, 8193.36, 7405.65), (0.7, 0.7, 0.7), 816.178)
if "particle_38 geometry" not in marker_sets:
s=new_marker_set('particle_38 geometry')
marker_sets["particle_38 geometry"]=s
s= marker_sets["particle_38 geometry"]
mark=s.place_marker((7390.96, 7143.03, 7047.55), (0.7, 0.7, 0.7), 776.628)
if "particle_39 geometry" not in marker_sets:
s=new_marker_set('particle_39 geometry')
marker_sets["particle_39 geometry"]=s
s= marker_sets["particle_39 geometry"]
mark=s.place_marker((6904.97, 8626.37, 7274.7), (0.7, 0.7, 0.7), 750.656)
if "particle_40 geometry" not in marker_sets:
s=new_marker_set('particle_40 geometry')
marker_sets["particle_40 geometry"]=s
s= marker_sets["particle_40 geometry"]
mark=s.place_marker((5613.73, 8187.52, 6292.66), (0.7, 0.7, 0.7), 709.625)
if "particle_41 geometry" not in marker_sets:
s=new_marker_set('particle_41 geometry')
marker_sets["particle_41 geometry"]=s
s= marker_sets["particle_41 geometry"]
mark=s.place_marker((4568.56, 9275.14, 5206.84), (0.7, 0.7, 0.7), 927.681)
if "particle_42 geometry" not in marker_sets:
s=new_marker_set('particle_42 geometry')
marker_sets["particle_42 geometry"]=s
s= marker_sets["particle_42 geometry"]
mark=s.place_marker((3976.98, 10301.6, 7563.46), (0.7, 0.7, 0.7), 1088.21)
if "particle_43 geometry" not in marker_sets:
s=new_marker_set('particle_43 geometry')
marker_sets["particle_43 geometry"]=s
s= marker_sets["particle_43 geometry"]
mark=s.place_marker((3836.02, 9880.54, 5747.76), (0.7, 0.7, 0.7), 736.147)
if "particle_44 geometry" not in marker_sets:
s=new_marker_set('particle_44 geometry')
marker_sets["particle_44 geometry"]=s
s= marker_sets["particle_44 geometry"]
mark=s.place_marker((5037.13, 8933.57, 6425.4), (0.7, 0.7, 0.7), 861.101)
if "particle_45 geometry" not in marker_sets:
s=new_marker_set('particle_45 geometry')
marker_sets["particle_45 geometry"]=s
s= marker_sets["particle_45 geometry"]
mark=s.place_marker((4504.93, 7641.84, 5010.54), (0.7, 0.7, 0.7), 924.213)
if "particle_46 geometry" not in marker_sets:
s=new_marker_set('particle_46 geometry')
marker_sets["particle_46 geometry"]=s
s= marker_sets["particle_46 geometry"]
mark=s.place_marker((3183.5, 6754.03, 6210.95), (0.7, 0.7, 0.7), 881.828)
if "particle_47 geometry" not in marker_sets:
s=new_marker_set('particle_47 geometry')
marker_sets["particle_47 geometry"]=s
s= marker_sets["particle_47 geometry"]
mark=s.place_marker((2216.87, 8382.44, 7025.17), (0.7, 0.7, 0.7), 927.681)
if "particle_48 geometry" not in marker_sets:
s=new_marker_set('particle_48 geometry')
marker_sets["particle_48 geometry"]=s
s= marker_sets["particle_48 geometry"]
mark=s.place_marker((2138.87, 6825.93, 5977.47), (0.7, 0.7, 0.7), 831.576)
if "particle_49 geometry" not in marker_sets:
s=new_marker_set('particle_49 geometry')
marker_sets["particle_49 geometry"]=s
s= marker_sets["particle_49 geometry"]
mark=s.place_marker((2829.52, 5877.95, 4507.91), (0.7, 0.7, 0.7), 859.494)
if "particle_50 geometry" not in marker_sets:
s=new_marker_set('particle_50 geometry')
marker_sets["particle_50 geometry"]=s
s= marker_sets["particle_50 geometry"]
mark=s.place_marker((1938.63, 6667.8, 4552.73), (0.7, 0.7, 0.7), 704.845)
if "particle_51 geometry" not in marker_sets:
s=new_marker_set('particle_51 geometry')
marker_sets["particle_51 geometry"]=s
s= marker_sets["particle_51 geometry"]
mark=s.place_marker((3329.23, 6425.84, 3600.9), (0.7, 0.7, 0.7), 804.461)
if "particle_52 geometry" not in marker_sets:
s=new_marker_set('particle_52 geometry')
marker_sets["particle_52 geometry"]=s
s= marker_sets["particle_52 geometry"]
mark=s.place_marker((4922.93, 5946.57, 2935.68), (0.7, 0.7, 0.7), 934.111)
if "particle_53 geometry" not in marker_sets:
s=new_marker_set('particle_53 geometry')
marker_sets["particle_53 geometry"]=s
s= marker_sets["particle_53 geometry"]
mark=s.place_marker((4403.72, 5955.71, 1475.72), (0.7, 0.7, 0.7), 988.339)
if "particle_54 geometry" not in marker_sets:
s=new_marker_set('particle_54 geometry')
marker_sets["particle_54 geometry"]=s
s= marker_sets["particle_54 geometry"]
mark=s.place_marker((3630.22, 6008.52, 1582.26), (1, 0.7, 0), 803.7)
if "particle_55 geometry" not in marker_sets:
s=new_marker_set('particle_55 geometry')
marker_sets["particle_55 geometry"]=s
s= marker_sets["particle_55 geometry"]
mark=s.place_marker((3480.44, 5567.3, 3624.98), (0.7, 0.7, 0.7), 812.118)
if "particle_56 geometry" not in marker_sets:
s=new_marker_set('particle_56 geometry')
marker_sets["particle_56 geometry"]=s
s= marker_sets["particle_56 geometry"]
mark=s.place_marker((3945.15, 3453.93, 3835.48), (0.7, 0.7, 0.7), 1177.93)
if "particle_57 geometry" not in marker_sets:
s=new_marker_set('particle_57 geometry')
marker_sets["particle_57 geometry"]=s
s= marker_sets["particle_57 geometry"]
mark=s.place_marker((3717.07, 1650.17, 5600.99), (0.7, 0.7, 0.7), 1038.21)
if "particle_58 geometry" not in marker_sets:
s=new_marker_set('particle_58 geometry')
marker_sets["particle_58 geometry"]=s
s= marker_sets["particle_58 geometry"]
mark=s.place_marker((3663.76, 1306.4, 6075.99), (1, 0.7, 0), 758.016)
if "particle_59 geometry" not in marker_sets:
s=new_marker_set('particle_59 geometry')
marker_sets["particle_59 geometry"]=s
s= marker_sets["particle_59 geometry"]
mark=s.place_marker((3990.02, 761.7, 5522.78), (0.7, 0.7, 0.7), 824.046)
if "particle_60 geometry" not in marker_sets:
s=new_marker_set('particle_60 geometry')
marker_sets["particle_60 geometry"]=s
s= marker_sets["particle_60 geometry"]
mark=s.place_marker((3399.52, 1631.45, 5369.95), (0.7, 0.7, 0.7), 793.379)
if "particle_61 geometry" not in marker_sets:
s=new_marker_set('particle_61 geometry')
marker_sets["particle_61 geometry"]=s
s= marker_sets["particle_61 geometry"]
mark=s.place_marker((2713.75, 1178.27, 5209.31), (0.7, 0.7, 0.7), 1011.56)
if "particle_62 geometry" not in marker_sets:
s=new_marker_set('particle_62 geometry')
marker_sets["particle_62 geometry"]=s
s= marker_sets["particle_62 geometry"]
mark=s.place_marker((3810.28, 2650, 4668.6), (0.7, 0.7, 0.7), 1097.01)
if "particle_63 geometry" not in marker_sets:
s=new_marker_set('particle_63 geometry')
marker_sets["particle_63 geometry"]=s
s= marker_sets["particle_63 geometry"]
mark=s.place_marker((2777.87, 1076.28, 4581.24), (0.7, 0.7, 0.7), 851.626)
if "particle_64 geometry" not in marker_sets:
s=new_marker_set('particle_64 geometry')
marker_sets["particle_64 geometry"]=s
s= marker_sets["particle_64 geometry"]
mark=s.place_marker((1873.05, -660.377, 5160.6), (0.7, 0.7, 0.7), 869.434)
if "particle_65 geometry" not in marker_sets:
s=new_marker_set('particle_65 geometry')
marker_sets["particle_65 geometry"]=s
s= marker_sets["particle_65 geometry"]
mark=s.place_marker((932.413, 883.584, 5086.19), (0.7, 0.7, 0.7), 818.463)
if "particle_66 geometry" not in marker_sets:
s=new_marker_set('particle_66 geometry')
marker_sets["particle_66 geometry"]=s
s= marker_sets["particle_66 geometry"]
mark=s.place_marker((784.351, 339.071, 6656.75), (0.7, 0.7, 0.7), 759.539)
if "particle_67 geometry" not in marker_sets:
s=new_marker_set('particle_67 geometry')
marker_sets["particle_67 geometry"]=s
s= marker_sets["particle_67 geometry"]
mark=s.place_marker((2119.43, 1753.82, 5176.13), (0.7, 0.7, 0.7), 1088.59)
if "particle_68 geometry" not in marker_sets:
s=new_marker_set('particle_68 geometry')
marker_sets["particle_68 geometry"]=s
s= marker_sets["particle_68 geometry"]
mark=s.place_marker((2093.33, -222.816, 5756.59), (0.7, 0.7, 0.7), 822.312)
if "particle_69 geometry" not in marker_sets:
s=new_marker_set('particle_69 geometry')
marker_sets["particle_69 geometry"]=s
s= marker_sets["particle_69 geometry"]
mark=s.place_marker((1527.28, -885.659, 6357.32), (0.7, 0.7, 0.7), 749.81)
if "particle_70 geometry" not in marker_sets:
s=new_marker_set('particle_70 geometry')
marker_sets["particle_70 geometry"]=s
s= marker_sets["particle_70 geometry"]
mark=s.place_marker((2286.52, -55.4312, 7428.72), (0.7, 0.7, 0.7), 764.488)
for k in surf_sets.keys():
chimera.openModels.add([surf_sets[k]])
| batxes/4Cin | SHH_WT_models/SHH_WT_models_final_output_0.1_-0.1_11000/SHH_WT_models11702.py | Python | gpl-3.0 | 17,574 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# ==============================================================================
"""TensorFlow API compatibility tests.
This test ensures all changes to the public API of TensorFlow are intended.
If this test fails, it means a change has been made to the public API. Backwards
incompatible changes are not allowed. You can run the test with
"--update_goldens" flag set to "True" to update goldens when making changes to
the public TF python API.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import argparse
import os
import re
import sys
import unittest
import tensorflow as tf
from google.protobuf import message
from google.protobuf import text_format
from tensorflow.python.lib.io import file_io
from tensorflow.python.platform import resource_loader
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
from tensorflow.tools.api.lib import api_objects_pb2
from tensorflow.tools.api.lib import python_object_to_proto_visitor
from tensorflow.tools.common import public_api
from tensorflow.tools.common import traverse
# FLAGS defined at the bottom:
FLAGS = None
# DEFINE_boolean, update_goldens, default False:
_UPDATE_GOLDENS_HELP = """
Update stored golden files if API is updated. WARNING: All API changes
have to be authorized by TensorFlow leads.
"""
# DEFINE_boolean, verbose_diffs, default True:
_VERBOSE_DIFFS_HELP = """
If set to true, print line by line diffs on all libraries. If set to
false, only print which libraries have differences.
"""
_API_GOLDEN_FOLDER_V1 = 'tensorflow/tools/api/golden/v1'
_API_GOLDEN_FOLDER_V2 = 'tensorflow/tools/api/golden/v2'
_TEST_README_FILE = 'tensorflow/tools/api/tests/README.txt'
_UPDATE_WARNING_FILE = 'tensorflow/tools/api/tests/API_UPDATE_WARNING.txt'
def _KeyToFilePath(key, api_version):
"""From a given key, construct a filepath.
Filepath will be inside golden folder for api_version.
"""
def _ReplaceCapsWithDash(matchobj):
match = matchobj.group(0)
return '-%s' % (match.lower())
case_insensitive_key = re.sub('([A-Z]{1})', _ReplaceCapsWithDash, key)
api_folder = (
_API_GOLDEN_FOLDER_V2 if api_version == 2 else _API_GOLDEN_FOLDER_V1)
return os.path.join(api_folder, '%s.pbtxt' % case_insensitive_key)
def _FileNameToKey(filename):
"""From a given filename, construct a key we use for api objects."""
def _ReplaceDashWithCaps(matchobj):
match = matchobj.group(0)
return match[1].upper()
base_filename = os.path.basename(filename)
base_filename_without_ext = os.path.splitext(base_filename)[0]
api_object_key = re.sub(
'((-[a-z]){1})', _ReplaceDashWithCaps, base_filename_without_ext)
return api_object_key
def _VerifyNoSubclassOfMessageVisitor(path, parent, unused_children):
"""A Visitor that crashes on subclasses of generated proto classes."""
# If the traversed object is a proto Message class
if not (isinstance(parent, type) and
issubclass(parent, message.Message)):
return
if parent is message.Message:
return
# Check that it is a direct subclass of Message.
if message.Message not in parent.__bases__:
raise NotImplementedError(
'Object tf.%s is a subclass of a generated proto Message. '
'They are not yet supported by the API tools.' % path)
class ApiCompatibilityTest(test.TestCase):
def __init__(self, *args, **kwargs):
super(ApiCompatibilityTest, self).__init__(*args, **kwargs)
golden_update_warning_filename = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_UPDATE_WARNING_FILE)
self._update_golden_warning = file_io.read_file_to_string(
golden_update_warning_filename)
test_readme_filename = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_TEST_README_FILE)
self._test_readme_message = file_io.read_file_to_string(
test_readme_filename)
def _AssertProtoDictEquals(self,
expected_dict,
actual_dict,
verbose=False,
update_goldens=False,
additional_missing_object_message='',
api_version=2):
"""Diff given dicts of protobufs and report differences a readable way.
Args:
expected_dict: a dict of TFAPIObject protos constructed from golden
files.
actual_dict: a ict of TFAPIObject protos constructed by reading from the
TF package linked to the test.
verbose: Whether to log the full diffs, or simply report which files were
different.
update_goldens: Whether to update goldens when there are diffs found.
additional_missing_object_message: Message to print when a symbol is
missing.
api_version: TensorFlow API version to test.
"""
diffs = []
verbose_diffs = []
expected_keys = set(expected_dict.keys())
actual_keys = set(actual_dict.keys())
only_in_expected = expected_keys - actual_keys
only_in_actual = actual_keys - expected_keys
all_keys = expected_keys | actual_keys
# This will be populated below.
updated_keys = []
for key in all_keys:
diff_message = ''
verbose_diff_message = ''
# First check if the key is not found in one or the other.
if key in only_in_expected:
diff_message = 'Object %s expected but not found (removed). %s' % (
key, additional_missing_object_message)
verbose_diff_message = diff_message
elif key in only_in_actual:
diff_message = 'New object %s found (added).' % key
verbose_diff_message = diff_message
else:
# Do not truncate diff
self.maxDiffs = None # pylint: disable=invalid-name
# Now we can run an actual proto diff.
try:
self.assertProtoEquals(expected_dict[key], actual_dict[key])
except AssertionError as e:
updated_keys.append(key)
diff_message = 'Change detected in python object: %s.' % key
verbose_diff_message = str(e)
# All difference cases covered above. If any difference found, add to the
# list.
if diff_message:
diffs.append(diff_message)
verbose_diffs.append(verbose_diff_message)
# If diffs are found, handle them based on flags.
if diffs:
diff_count = len(diffs)
logging.error(self._test_readme_message)
logging.error('%d differences found between API and golden.', diff_count)
messages = verbose_diffs if verbose else diffs
for i in range(diff_count):
print('Issue %d\t: %s' % (i + 1, messages[i]), file=sys.stderr)
if update_goldens:
# Write files if requested.
logging.warning(self._update_golden_warning)
# If the keys are only in expected, some objects are deleted.
# Remove files.
for key in only_in_expected:
filepath = _KeyToFilePath(key, api_version)
file_io.delete_file(filepath)
# If the files are only in actual (current library), these are new
# modules. Write them to files. Also record all updates in files.
for key in only_in_actual | set(updated_keys):
filepath = _KeyToFilePath(key, api_version)
file_io.write_string_to_file(
filepath, text_format.MessageToString(actual_dict[key]))
else:
# Fail if we cannot fix the test by updating goldens.
self.fail('%d differences found between API and golden.' % diff_count)
else:
logging.info('No differences found between API and golden.')
def testNoSubclassOfMessage(self):
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
# Skip compat.v1 and compat.v2 since they are validated in separate tests.
visitor.private_map['tf.compat'] = ['v1', 'v2']
traverse.traverse(tf, visitor)
def testNoSubclassOfMessageV1(self):
if not hasattr(tf.compat, 'v1'):
return
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
traverse.traverse(tf.compat.v1, visitor)
def testNoSubclassOfMessageV2(self):
if not hasattr(tf.compat, 'v2'):
return
visitor = public_api.PublicAPIVisitor(_VerifyNoSubclassOfMessageVisitor)
visitor.do_not_descend_map['tf'].append('contrib')
traverse.traverse(tf.compat.v2, visitor)
def _checkBackwardsCompatibility(
self, root, golden_file_pattern, api_version,
additional_private_map=None):
# Extract all API stuff.
visitor = python_object_to_proto_visitor.PythonObjectToProtoVisitor()
public_api_visitor = public_api.PublicAPIVisitor(visitor)
public_api_visitor.do_not_descend_map['tf'].append('contrib')
public_api_visitor.do_not_descend_map['tf.GPUOptions'] = [
'Experimental']
if additional_private_map:
public_api_visitor.private_map.update(additional_private_map)
traverse.traverse(root, public_api_visitor)
proto_dict = visitor.GetProtos()
# Read all golden files.
golden_file_list = file_io.get_matching_files(golden_file_pattern)
def _ReadFileToProto(filename):
"""Read a filename, create a protobuf from its contents."""
ret_val = api_objects_pb2.TFAPIObject()
text_format.Merge(file_io.read_file_to_string(filename), ret_val)
return ret_val
golden_proto_dict = {
_FileNameToKey(filename): _ReadFileToProto(filename)
for filename in golden_file_list
}
# Diff them. Do not fail if called with update.
# If the test is run to update goldens, only report diffs but do not fail.
self._AssertProtoDictEquals(
golden_proto_dict,
proto_dict,
verbose=FLAGS.verbose_diffs,
update_goldens=FLAGS.update_goldens,
api_version=api_version)
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibility(self):
api_version = 1
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf,
golden_file_pattern,
api_version,
# Skip compat.v1 and compat.v2 since they are validated
# in separate tests.
additional_private_map={'tf.compat': ['v1', 'v2']})
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibilityV1(self):
if not hasattr(tf.compat, 'v1'):
return
api_version = 1
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf.compat.v1, golden_file_pattern, api_version)
@unittest.skipUnless(
sys.version_info.major == 2,
'API compabitility test goldens are generated using python2.')
def testAPIBackwardsCompatibilityV2(self):
if not hasattr(tf.compat, 'v2'):
return
api_version = 2
golden_file_pattern = os.path.join(
resource_loader.get_root_dir_with_all_resources(),
_KeyToFilePath('*', api_version))
self._checkBackwardsCompatibility(
tf.compat.v2, golden_file_pattern, api_version)
if __name__ == '__main__':
parser = argparse.ArgumentParser()
parser.add_argument(
'--update_goldens', type=bool, default=False, help=_UPDATE_GOLDENS_HELP)
parser.add_argument(
'--verbose_diffs', type=bool, default=True, help=_VERBOSE_DIFFS_HELP)
FLAGS, unparsed = parser.parse_known_args()
# Now update argv, so that unittest library does not get confused.
sys.argv = [sys.argv[0]] + unparsed
test.main()
| ZhangXinNan/tensorflow | tensorflow/tools/api/tests/api_compatibility_test.py | Python | apache-2.0 | 12,607 |
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# Copyright 2011 Justin Santa Barbara
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Generic Node base class for all workers that run on hosts."""
import abc
import collections
import copy
import errno
import io
import logging
import os
import random
import signal
import six
import sys
import time
import eventlet
from eventlet import event
from oslo_concurrency import lockutils
from oslo_service import eventlet_backdoor
from oslo_service._i18n import _LE, _LI, _LW
from oslo_service import _options
from oslo_service import systemd
from oslo_service import threadgroup
LOG = logging.getLogger(__name__)
def list_opts():
"""Entry point for oslo-config-generator."""
return [(None, copy.deepcopy(_options.eventlet_backdoor_opts +
_options.service_opts))]
def _is_daemon():
# The process group for a foreground process will match the
# process group of the controlling terminal. If those values do
# not match, or ioctl() fails on the stdout file handle, we assume
# the process is running in the background as a daemon.
# http://www.gnu.org/software/bash/manual/bashref.html#Job-Control-Basics
try:
is_daemon = os.getpgrp() != os.tcgetpgrp(sys.stdout.fileno())
except io.UnsupportedOperation:
# Could not get the fileno for stdout, so we must be a daemon.
is_daemon = True
except OSError as err:
if err.errno == errno.ENOTTY:
# Assume we are a daemon because there is no terminal.
is_daemon = True
else:
raise
return is_daemon
def _is_sighup_and_daemon(signo):
if not (SignalHandler().is_sighup_supported and signo == signal.SIGHUP):
# Avoid checking if we are a daemon, because the signal isn't
# SIGHUP.
return False
return _is_daemon()
def _check_service_base(service):
if not isinstance(service, ServiceBase):
raise TypeError("Service %(service)s must an instance of %(base)s!"
% {'service': service, 'base': ServiceBase})
@six.add_metaclass(abc.ABCMeta)
class ServiceBase(object):
"""Base class for all services."""
@abc.abstractmethod
def start(self):
"""Start service."""
@abc.abstractmethod
def stop(self):
"""Stop service."""
@abc.abstractmethod
def wait(self):
"""Wait for service to complete."""
@abc.abstractmethod
def reset(self):
"""Reset service.
Called in case service running in daemon mode receives SIGHUP.
"""
class Singleton(type):
_instances = {}
_semaphores = lockutils.Semaphores()
def __call__(cls, *args, **kwargs):
with lockutils.lock('singleton_lock', semaphores=cls._semaphores):
if cls not in cls._instances:
cls._instances[cls] = super(Singleton, cls).__call__(
*args, **kwargs)
return cls._instances[cls]
@six.add_metaclass(Singleton)
class SignalHandler(object):
def __init__(self, *args, **kwargs):
super(SignalHandler, self).__init__(*args, **kwargs)
# Map all signal names to signal integer values and create a
# reverse mapping (for easier + quick lookup).
self._ignore_signals = ('SIG_DFL', 'SIG_IGN')
self._signals_by_name = dict((name, getattr(signal, name))
for name in dir(signal)
if name.startswith("SIG")
and name not in self._ignore_signals)
self.signals_to_name = dict(
(sigval, name)
for (name, sigval) in self._signals_by_name.items())
self.is_sighup_supported = 'SIGHUP' in self._signals_by_name
self._signal_handlers = collections.defaultdict(set)
self.clear()
def clear(self):
for sig in self._signal_handlers:
signal.signal(sig, signal.SIG_DFL)
self._signal_handlers.clear()
def add_handlers(self, signals, handler):
for sig in signals:
self.add_handler(sig, handler)
def add_handler(self, sig, handler):
if sig == "SIGHUP" and not self.is_sighup_supported:
return
signo = self._signals_by_name[sig]
self._signal_handlers[signo].add(handler)
signal.signal(signo, self._handle_signals)
def _handle_signals(self, signo, frame):
for handler in self._signal_handlers[signo]:
handler(signo, frame)
class Launcher(object):
"""Launch one or more services and wait for them to complete."""
def __init__(self, conf):
"""Initialize the service launcher.
:returns: None
"""
self.conf = conf
conf.register_opts(_options.service_opts)
self.services = Services()
self.backdoor_port = (
eventlet_backdoor.initialize_if_enabled(self.conf))
def launch_service(self, service):
"""Load and start the given service.
:param service: The service you would like to start, must be an
instance of :class:`oslo_service.service.ServiceBase`
:returns: None
"""
_check_service_base(service)
service.backdoor_port = self.backdoor_port
self.services.add(service)
def stop(self):
"""Stop all services which are currently running.
:returns: None
"""
self.services.stop()
def wait(self):
"""Wait until all services have been stopped, and then return.
:returns: None
"""
self.services.wait()
def restart(self):
"""Reload config files and restart service.
:returns: None
"""
self.conf.reload_config_files()
self.services.restart()
class SignalExit(SystemExit):
def __init__(self, signo, exccode=1):
super(SignalExit, self).__init__(exccode)
self.signo = signo
class ServiceLauncher(Launcher):
"""Runs one or more service in a parent process."""
def __init__(self, conf):
"""Constructor.
:param conf: an instance of ConfigOpts
"""
super(ServiceLauncher, self).__init__(conf)
def _handle_signal(self, signo, frame):
"""Set signal handlers and raise an exception.
:param signo: signal number
:param frame: current stack frame
:raises SignalExit
"""
# Allow the process to be killed again and die from natural causes
SignalHandler().clear()
raise SignalExit(signo)
def handle_signal(self):
"""Set self._handle_signal as a signal handler."""
SignalHandler().add_handlers(
('SIGTERM', 'SIGHUP', 'SIGINT'),
self._handle_signal)
def _wait_for_exit_or_signal(self, ready_callback=None):
status = None
signo = 0
if self.conf.log_options:
LOG.debug('Full set of CONF:')
self.conf.log_opt_values(LOG, logging.DEBUG)
try:
if ready_callback:
ready_callback()
super(ServiceLauncher, self).wait()
except SignalExit as exc:
signame = SignalHandler().signals_to_name[exc.signo]
LOG.info(_LI('Caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
status = exc.code
finally:
self.stop()
return status, signo
def wait(self, ready_callback=None):
"""Wait for a service to terminate and restart it on SIGHUP.
:returns: termination status
"""
systemd.notify_once()
SignalHandler().clear()
while True:
self.handle_signal()
status, signo = self._wait_for_exit_or_signal(ready_callback)
if not _is_sighup_and_daemon(signo):
return status
self.restart()
class ServiceWrapper(object):
def __init__(self, service, workers):
self.service = service
self.workers = workers
self.children = set()
self.forktimes = []
class ProcessLauncher(object):
"""Launch a service with a given number of workers."""
def __init__(self, conf, wait_interval=0.01):
"""Constructor.
:param conf: an instance of ConfigOpts
:param wait_interval: The interval to sleep for between checks
of child process exit.
"""
self.conf = conf
conf.register_opts(_options.service_opts)
self.children = {}
self.sigcaught = None
self.running = True
self.wait_interval = wait_interval
self.launcher = None
rfd, self.writepipe = os.pipe()
self.readpipe = eventlet.greenio.GreenPipe(rfd, 'r')
self.signal_handler = SignalHandler()
self.handle_signal()
def handle_signal(self):
"""Add instance's signal handlers to class handlers."""
self.signal_handler.add_handlers(('SIGTERM', 'SIGHUP'),
self._handle_signal)
self.signal_handler.add_handler('SIGINT', self._fast_exit)
def _handle_signal(self, signo, frame):
"""Set signal handlers.
:param signo: signal number
:param frame: current stack frame
"""
self.sigcaught = signo
self.running = False
# Allow the process to be killed again and die from natural causes
self.signal_handler.clear()
def _fast_exit(self, signo, frame):
LOG.info(_LI('Caught SIGINT signal, instantaneous exiting'))
os._exit(1)
def _pipe_watcher(self):
# This will block until the write end is closed when the parent
# dies unexpectedly
self.readpipe.read(1)
LOG.info(_LI('Parent process has died unexpectedly, exiting'))
if self.launcher:
self.launcher.stop()
sys.exit(1)
def _child_process_handle_signal(self):
# Setup child signal handlers differently
def _sigterm(*args):
SignalHandler().clear()
self.launcher.stop()
def _sighup(*args):
SignalHandler().clear()
raise SignalExit(signal.SIGHUP)
self.signal_handler.clear()
# Parent signals with SIGTERM when it wants us to go away.
self.signal_handler.add_handler('SIGTERM', _sigterm)
self.signal_handler.add_handler('SIGHUP', _sighup)
self.signal_handler.add_handler('SIGINT', self._fast_exit)
def _child_wait_for_exit_or_signal(self, launcher):
status = 0
signo = 0
# NOTE(johannes): All exceptions are caught to ensure this
# doesn't fallback into the loop spawning children. It would
# be bad for a child to spawn more children.
try:
launcher.wait()
except SignalExit as exc:
signame = self.signal_handler.signals_to_name[exc.signo]
LOG.info(_LI('Child caught %s, exiting'), signame)
status = exc.code
signo = exc.signo
except SystemExit as exc:
status = exc.code
except BaseException:
LOG.exception(_LE('Unhandled exception'))
status = 2
return status, signo
def _child_process(self, service):
self._child_process_handle_signal()
# Reopen the eventlet hub to make sure we don't share an epoll
# fd with parent and/or siblings, which would be bad
eventlet.hubs.use_hub()
# Close write to ensure only parent has it open
os.close(self.writepipe)
# Create greenthread to watch for parent to close pipe
eventlet.spawn_n(self._pipe_watcher)
# Reseed random number generator
random.seed()
launcher = Launcher(self.conf)
launcher.launch_service(service)
return launcher
def _start_child(self, wrap):
if len(wrap.forktimes) > wrap.workers:
# Limit ourselves to one process a second (over the period of
# number of workers * 1 second). This will allow workers to
# start up quickly but ensure we don't fork off children that
# die instantly too quickly.
if time.time() - wrap.forktimes[0] < wrap.workers:
LOG.info(_LI('Forking too fast, sleeping'))
time.sleep(1)
wrap.forktimes.pop(0)
wrap.forktimes.append(time.time())
pid = os.fork()
if pid == 0:
self.launcher = self._child_process(wrap.service)
while True:
self._child_process_handle_signal()
status, signo = self._child_wait_for_exit_or_signal(
self.launcher)
if not _is_sighup_and_daemon(signo):
self.launcher.wait()
break
self.launcher.restart()
os._exit(status)
LOG.info(_LI('Started child %d'), pid)
wrap.children.add(pid)
self.children[pid] = wrap
return pid
def launch_service(self, service, workers=1):
"""Launch a service with a given number of workers.
:param service: a service to launch, must be an instance of
:class:`oslo_service.service.ServiceBase`
:param workers: a number of processes in which a service
will be running
"""
_check_service_base(service)
wrap = ServiceWrapper(service, workers)
LOG.info(_LI('Starting %d workers'), wrap.workers)
while self.running and len(wrap.children) < wrap.workers:
self._start_child(wrap)
def _wait_child(self):
try:
# Don't block if no child processes have exited
pid, status = os.waitpid(0, os.WNOHANG)
if not pid:
return None
except OSError as exc:
if exc.errno not in (errno.EINTR, errno.ECHILD):
raise
return None
if os.WIFSIGNALED(status):
sig = os.WTERMSIG(status)
LOG.info(_LI('Child %(pid)d killed by signal %(sig)d'),
dict(pid=pid, sig=sig))
else:
code = os.WEXITSTATUS(status)
LOG.info(_LI('Child %(pid)s exited with status %(code)d'),
dict(pid=pid, code=code))
if pid not in self.children:
LOG.warning(_LW('pid %d not in child list'), pid)
return None
wrap = self.children.pop(pid)
wrap.children.remove(pid)
return wrap
def _respawn_children(self):
while self.running:
wrap = self._wait_child()
if not wrap:
# Yield to other threads if no children have exited
# Sleep for a short time to avoid excessive CPU usage
# (see bug #1095346)
eventlet.greenthread.sleep(self.wait_interval)
continue
while self.running and len(wrap.children) < wrap.workers:
self._start_child(wrap)
def wait(self):
"""Loop waiting on children to die and respawning as necessary."""
systemd.notify_once()
if self.conf.log_options:
LOG.debug('Full set of CONF:')
self.conf.log_opt_values(LOG, logging.DEBUG)
try:
while True:
self.handle_signal()
self._respawn_children()
# No signal means that stop was called. Don't clean up here.
if not self.sigcaught:
return
signame = self.signal_handler.signals_to_name[self.sigcaught]
LOG.info(_LI('Caught %s, stopping children'), signame)
if not _is_sighup_and_daemon(self.sigcaught):
break
self.conf.reload_config_files()
for service in set(
[wrap.service for wrap in self.children.values()]):
service.reset()
for pid in self.children:
os.kill(pid, signal.SIGHUP)
self.running = True
self.sigcaught = None
except eventlet.greenlet.GreenletExit:
LOG.info(_LI("Wait called after thread killed. Cleaning up."))
self.stop()
def stop(self):
"""Terminate child processes and wait on each."""
self.running = False
LOG.debug("Stop services.")
for service in set(
[wrap.service for wrap in self.children.values()]):
service.stop()
LOG.debug("Killing children.")
for pid in self.children:
try:
os.kill(pid, signal.SIGTERM)
except OSError as exc:
if exc.errno != errno.ESRCH:
raise
# Wait for children to die
if self.children:
LOG.info(_LI('Waiting on %d children to exit'), len(self.children))
while self.children:
self._wait_child()
class Service(ServiceBase):
"""Service object for binaries running on hosts."""
def __init__(self, threads=1000):
self.tg = threadgroup.ThreadGroup(threads)
# signal that the service is done shutting itself down:
self._done = event.Event()
def reset(self):
"""Reset a service in case it received a SIGHUP."""
# NOTE(Fengqian): docs for Event.reset() recommend against using it
self._done = event.Event()
def start(self):
"""Start a service."""
def stop(self, graceful=False):
"""Stop a service.
:param graceful: indicates whether to wait for all threads to finish
or terminate them instantly
"""
self.tg.stop(graceful)
self.tg.wait()
# Signal that service cleanup is done:
if not self._done.ready():
self._done.send()
def wait(self):
"""Wait for a service to shut down."""
self._done.wait()
class Services(object):
def __init__(self):
self.services = []
self.tg = threadgroup.ThreadGroup()
self.done = event.Event()
def add(self, service):
"""Add a service to a list and create a thread to run it.
:param service: service to run
"""
self.services.append(service)
self.tg.add_thread(self.run_service, service, self.done)
def stop(self):
"""Wait for graceful shutdown of services and kill the threads."""
for service in self.services:
service.stop()
# Each service has performed cleanup, now signal that the run_service
# wrapper threads can now die:
if not self.done.ready():
self.done.send()
# reap threads:
self.tg.stop()
def wait(self):
"""Wait for services to shut down."""
for service in self.services:
service.wait()
self.tg.wait()
def restart(self):
"""Reset services and start them in new threads."""
self.stop()
self.done = event.Event()
for restart_service in self.services:
restart_service.reset()
self.tg.add_thread(self.run_service, restart_service, self.done)
@staticmethod
def run_service(service, done):
"""Service start wrapper.
:param service: service to run
:param done: event to wait on until a shutdown is triggered
:returns: None
"""
service.start()
done.wait()
def launch(conf, service, workers=1):
"""Launch a service with a given number of workers.
:param conf: an instance of ConfigOpts
:param service: a service to launch, must be an instance of
:class:`oslo_service.service.ServiceBase`
:param workers: a number of processes in which a service will be running
:returns: instance of a launcher that was used to launch the service
"""
if workers is None or workers == 1:
launcher = ServiceLauncher(conf)
launcher.launch_service(service)
else:
launcher = ProcessLauncher(conf)
launcher.launch_service(service, workers=workers)
return launcher
| poznyakandrey/oslo.service | oslo_service/service.py | Python | apache-2.0 | 20,955 |
"""Implementation of treadmill admin CLI API invocation.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from __future__ import unicode_literals
import inspect
import io
import click
import decorator
import jsonschema
from treadmill import authz as authz_mod
from treadmill import cli
from treadmill import context
from treadmill import plugin_manager
from treadmill import utils
from treadmill import yamlwrapper as yaml
from treadmill import api as api_mod
def make_command(parent, name, func):
"""Make a command using reflection on the function."""
# Disable "Too many branches" warning.
#
# pylint: disable=R0912
argspec = decorator.getargspec(func)
args = list(argspec.args)
defaults = argspec.defaults
if defaults is None:
defaults = []
else:
defaults = list(defaults)
@parent.command(name=name, help=func.__doc__)
def command(*args, **kwargs):
"""Constructs a command handler."""
try:
if 'rsrc' in kwargs:
with io.open(kwargs['rsrc'], 'rb') as fd:
kwargs['rsrc'] = yaml.load(stream=fd)
formatter = cli.make_formatter(None)
cli.out(formatter(func(*args, **kwargs)))
except jsonschema.exceptions.ValidationError as input_err:
click.echo(input_err, err=True)
except jsonschema.exceptions.RefResolutionError as res_error:
click.echo(res_error, err=True)
except authz_mod.AuthorizationError as auth_err:
click.echo('Not authorized.', err=True)
click.echo(auth_err, err=True)
except TypeError as type_err:
click.echo(type_err, err=True)
while defaults:
arg = args.pop()
defarg = defaults.pop()
if defarg is not None:
argtype = type(defarg)
else:
argtype = str
if defarg == ():
# redefinition of the type from tuple to list.
argtype = cli.LIST
defarg = None
click.option('--' + arg, default=defarg, type=argtype)(command)
if not args:
return
arg = args.pop(0)
click.argument(arg)(command)
while args:
if len(args) == 1:
arg = args.pop(0)
click.argument(
arg,
type=click.Path(exists=True, readable=True)
)(command)
else:
arg = args.pop(0)
click.argument(arg)(command)
if args:
raise click.UsageError('Non-standard API: %s, %r' % (name, argspec))
def make_resource_group(ctx, parent, resource_type, api=None):
"""Make click group for a resource type."""
if api is None:
mod = plugin_manager.load('treadmill.api', resource_type)
if not mod:
return
try:
api_cls = getattr(mod, 'API')
api = ctx.build_api(api_cls)
except AttributeError:
return
@parent.group(name=resource_type, help=api.__doc__)
def _rsrc_group():
"""Creates a CLI group for the given resource type."""
pass
for verb in dir(api):
if verb.startswith('__'):
continue
func = getattr(api, verb)
if inspect.isclass(func):
make_resource_group(ctx, _rsrc_group, verb, func)
elif inspect.isfunction(func):
make_command(_rsrc_group, verb, func)
else:
pass
def init():
"""Constructs parent level CLI group."""
ctx = api_mod.Context()
@click.group(name='invoke')
@click.option('--authz', required=False)
@click.option('--cell', required=True,
envvar='TREADMILL_CELL',
callback=cli.handle_context_opt,
expose_value=False)
def invoke_grp(authz):
"""Directly invoke Treadmill API without REST."""
if authz is not None:
ctx.authorizer = authz_mod.ClientAuthorizer(
utils.get_current_username, authz
)
else:
ctx.authorizer = authz_mod.NullAuthorizer()
if cli.OUTPUT_FORMAT is None:
raise click.BadParameter('must use --outfmt [json|yaml]')
for resource in sorted(plugin_manager.names('treadmill.api')):
# TODO: for now, catch the ContextError as endpoint.py and state.py are
# calling context.GLOBAL.zk.conn, which fails, as cell is not set yet
try:
make_resource_group(ctx, invoke_grp, resource)
except context.ContextError:
pass
return invoke_grp
| bretttegart/treadmill | lib/python/treadmill/cli/admin/invoke.py | Python | apache-2.0 | 4,629 |
# -*- coding: utf-8 -*-
# Generated by Django 1.11.3 on 2017-10-12 22:15
from __future__ import unicode_literals
from django.db import migrations
def add_sector_teams(apps, schema_editor):
SectorTeam = apps.get_model('fdi', 'SectorTeam')
sector_teams = {'Aero': 'Aerospace',
'AESC': 'Advanced Engineering and Supply Chains',
'Agri': 'Agriculture',
'Auto': 'Automotive',
'BPS': 'Business and Professional Services',
'Chem': 'Chemicals',
'Creative': 'Creative',
'D&S': 'Defence and Security',
'F&D': 'Food and Drink',
'FS': 'Financial Services',
'Infrastructure': 'Infrastructure',
'Life S': 'Life Sciences',
'Marine': 'Marine',
'Nuclear': 'Nuclear Energy',
'O&G': 'Oil and Gas',
'Rail': 'Railways',
'Renew': 'Renewable Energy',
'Retail': 'Retail',
'Space': 'Space',
'Tech': 'Technology',
'Other': 'Other', }
for name, description in sector_teams.items():
SectorTeam(name=name, description=description).save()
def add_sector_team_sector_mapping(apps, schema_editor):
SectorTeam = apps.get_model('fdi', 'SectorTeam')
Sector = apps.get_model('fdi', 'Sector')
SectorTeamSector = apps.get_model('fdi', 'SectorTeamSector')
mapping = {'Aero': 'Aerospace : Aircraft Design',
'Aero': 'Aerospace : Component Manufacturing : Engines',
'Aero': 'Aerospace : Component Manufacturing',
'Aero': 'Aerospace : Maintenance',
'Aero': 'Aerospace : Manufacturing and Assembly : Aircraft',
'Aero': 'Aerospace : Manufacturing and Assembly : Helicopters',
'Aero': 'Aerospace : Manufacturing and Assembly : UAVs',
'Aero': 'Aerospace : Manufacturing and Assembly',
'Aero': 'Aerospace',
'AESC': 'Advanced Engineering',
'AESC': 'Mechanical Electrical and Process Engineering',
'AESC': 'Metallurgical Process Plant',
'AESC': 'Metals, Minerals and Materials : Ceramics',
'AESC': 'Metals, Minerals and Materials : Composite Materials',
'AESC': 'Metals, Minerals and Materials : Elastomers and Rubbers',
'AESC': 'Metals, Minerals and Materials : Metals',
'AESC': 'Metals, Minerals and Materials : Minerals',
'AESC': 'Metals, Minerals and Materials : Plastics',
'AESC': 'Metals, Minerals and Materials',
'AESC': 'Textiles, Interior Textiles and Carpets',
'Agri': 'Agriculture, Horticulture and Fisheries',
'Auto': 'Automotive : Automotive Maintenance',
'Auto': 'Automotive : Automotive Retail',
'Auto': 'Automotive : Component Manufacturing : Bodies and Coachwork',
'Auto': 'Automotive : Component Manufacturing : Electronic Components',
'Auto': 'Automotive : Component Manufacturing : Engines and Transmission',
'Auto': 'Automotive : Component Manufacturing : Tyres',
'Auto': 'Automotive : Component Manufacturing',
'Auto': 'Automotive : Design Engineering',
'Auto': 'Automotive : Manufacturing and Assembly : Agricultural Machinery',
'Auto': 'Automotive : Manufacturing and Assembly : Bicycles',
'Auto': 'Automotive : Manufacturing and Assembly : Caravans',
'Auto': 'Automotive : Manufacturing and Assembly : Cars',
'Auto': 'Automotive : Manufacturing and Assembly : Containers',
'Auto': 'Automotive : Manufacturing and Assembly : Invalid Carriages',
'Auto': 'Automotive : Manufacturing and Assembly : Lorries',
'Auto': 'Automotive : Manufacturing and Assembly : Motorcycles',
'Auto': 'Automotive : Manufacturing and Assembly : Trailers',
'Auto': 'Automotive : Manufacturing and Assembly : Vans',
'Auto': 'Automotive : Manufacturing and Assembly',
'Auto': 'Automotive : Motorsport',
'Auto': 'Automotive',
'BPS': 'Business (and Consumer) Services : Commercial Real Estate Services',
'BPS': 'Business (and Consumer) Services : Contact Centres',
'BPS': 'Business (and Consumer) Services : HR Services',
'BPS': 'Business (and Consumer) Services : Marketing Services : Market Research',
'BPS': 'Business (and Consumer) Services : Marketing Services',
'BPS': 'Business (and Consumer) Services : Shared Service Centres',
'BPS': 'Business (and Consumer) Services',
'Chem': 'Chemicals : Agricultural Chemicals',
'Chem': 'Chemicals : Basic Chemicals',
'Chem': 'Chemicals : Cleaning Preparations',
'Chem': 'Chemicals : Miscellaneous Chemicals',
'Chem': 'Chemicals : Paint, Coating and Adhesive Products',
'Chem': 'Chemicals : Synthetic Materials',
'Chem': 'Chemicals',
'Creative': 'Creative and Media : Architecture',
'Creative': 'Creative and Media : Art, Design and Creativity : Artistic and Literary Creation',
'Creative': 'Creative and Media : Art, Design and Creativity : Arts Facilities Operation',
'Creative': 'Creative and Media : Art, Design and Creativity : Design',
'Creative': 'Creative and Media : Art, Design and Creativity : Fashion',
'Creative': 'Creative and Media : Art, Design and Creativity : Live Theatrical Presentations',
'Creative': 'Creative and Media : Art, Design and Creativity',
'Creative': 'Creative and Media : Creative and Media Distribution : Film and Video',
'Creative': 'Creative and Media : Creative and Media Distribution',
'Creative': 'Creative and Media : Creative and Media Equipment : Musical Instrument Manufacture',
'Creative': 'Creative and Media : Creative and Media Equipment : Photo and Cinema Equipment',
'Creative': 'Creative and Media : Creative and Media Equipment',
'Creative': 'Creative and Media : Creative and Media Retail : Antiques and Antiquities',
'Creative': 'Creative and Media : Creative and Media Retail : Art',
'Creative': 'Creative and Media : Creative and Media Retail : Books, Newspapers and Stationery',
'Creative': 'Creative and Media : Creative and Media Retail',
'Creative': 'Creative and Media : Creative and Media Wholesaling : Multimedia Sales',
'Creative': 'Creative and Media : Creative and Media Wholesaling : Musical Instruments',
'Creative': 'Creative and Media : Creative and Media Wholesaling : Photographic Goods',
'Creative': 'Creative and Media : Creative and Media Wholesaling',
'Creative': 'Creative and Media : Events and Attractions',
'Creative': 'Creative and Media : Media : Advertising',
'Creative': 'Creative and Media : Media : Film, Photography and Animation',
'Creative': 'Creative and Media : Media : Music',
'Creative': 'Creative and Media : Media : Publishing',
'Creative': 'Creative and Media : Media : TV and Radio',
'Creative': 'Creative and Media : Media : Video Games',
'Creative': 'Creative and Media : Media Reproduction : Printing',
'Creative': 'Creative and Media : Media Reproduction : Reproduction',
'Creative': 'Creative and Media : Media Reproduction',
'Creative': 'Creative and Media : Media',
'Creative': 'Creative and Media',
'D&S': 'Defence and Security',
'D&S': 'Defence',
'D&S': 'Security',
'F&D': 'Food and Drink : Bakery Products',
'F&D': 'Food and Drink : Beverages and Alcoholic Drinks',
'F&D': 'Food and Drink : Brewing',
'F&D': 'Food and Drink : Dairy Products',
'F&D': 'Food and Drink : Food and Drink Manufacturing',
'F&D': 'Food and Drink : Frozen and Chilled Foods',
'F&D': 'Food and Drink : Fruit and Vegetables',
'F&D': 'Food and Drink : Meat Products',
'F&D': 'Food and Drink : Pet Food',
'F&D': 'Food and Drink : Ready Meals',
'F&D': 'Food and Drink : Secondary Food Processing',
'F&D': 'Food and Drink : Tobacco Products',
'F&D': 'Food and Drink',
'FS': 'Financial Services (including Professional Services) : Asset Management',
'FS': 'Financial Services (including Professional Services) : Banking : Commercial Banking',
'FS': 'Financial Services (including Professional Services) : Banking : Investment Banking',
'FS': 'Financial Services (including Professional Services) : Banking : Private Banking',
'FS': 'Financial Services (including Professional Services) : Banking : Retail Banking',
'FS': 'Financial Services (including Professional Services) : Banking',
'FS': 'Financial Services (including Professional Services) : Capital Markets : Hedge Funds',
'FS': 'Financial Services (including Professional Services) : Capital Markets : Private Equity',
'FS': 'Financial Services (including Professional Services) : Capital Markets : Venture Capital',
'FS': 'Financial Services (including Professional Services) : Capital Markets',
'FS': 'Financial Services (including Professional Services) : Foreign Exchange',
'FS': 'Financial Services (including Professional Services) : Insurance : Commercial Insurance',
'FS': 'Financial Services (including Professional Services) : Insurance : Home Insurance',
'FS': 'Financial Services (including Professional Services) : Insurance : Life Insurance',
'FS': 'Financial Services (including Professional Services) : Insurance : Motor Insurance',
'FS': 'Financial Services (including Professional Services) : Insurance : Travel Insurance',
'FS': 'Financial Services (including Professional Services) : Insurance',
'FS': 'Financial Services (including Professional Services) : Listings',
'FS': 'Financial Services (including Professional Services) : Professional Services : Accountancy Services',
'FS': 'Financial Services (including Professional Services) : Professional Services : Legal Services',
'FS': 'Financial Services (including Professional Services) : Professional Services : Management Consultancy',
'FS': 'Financial Services (including Professional Services) : Professional Services',
'FS': 'Financial Services (including Professional Services)',
'FS': 'Software and Computer Services Business to Business (B2B) : Financial Applications',
'Infrastructure': 'Airports',
'Infrastructure': 'Construction',
'Infrastructure': 'Environment : Air Pollution and Noise Control',
'Infrastructure': 'Environment : Environmental Monitoring',
'Infrastructure': 'Environment : Fuel Cells',
'Infrastructure': 'Environment : Marine Pollution Control',
'Infrastructure': 'Environment : Sanitation and Remediation',
'Infrastructure': 'Environment : Waste Management : Hazardous Waste Management',
'Infrastructure': 'Environment : Waste Management : Non-Metal Waste and Scrap Recycling',
'Infrastructure': 'Environment : Waste Management : Sewage Collection and Treatment',
'Infrastructure': 'Environment : Waste Management',
'Infrastructure': 'Environment : Waste to Energy',
'Infrastructure': 'Environment : Water Management',
'Infrastructure': 'Environment and Water',
'Infrastructure': 'Environment',
'Infrastructure': 'Global Sports Projects : Major Events',
'Infrastructure': 'Global Sports Projects',
'Infrastructure': 'Mass Transport',
'Infrastructure': 'Mining',
'Infrastructure': 'Ports and Logistics',
'Infrastructure': 'Water',
'Life S': 'Biotechnology and Pharmaceuticals : Bio and Pharma Marketing and Sales : Bio and Pharma Retail',
'Life S': 'Biotechnology and Pharmaceuticals : Bio and Pharma Marketing and Sales : Bio and Pharma Wholesale',
'Life S': 'Biotechnology and Pharmaceuticals : Bio and Pharma Marketing and Sales',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Agribio',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Biodiagnostics',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Biomanufacturing',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Bioremediation',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Biotherapeutics',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Industrialbio',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology : Platform Technologies',
'Life S': 'Biotechnology and Pharmaceuticals : Biotechnology',
'Life S': 'Biotechnology and Pharmaceuticals : Clinical Trials',
'Life S': 'Biotechnology and Pharmaceuticals : Lab Services : Contract Research',
'Life S': 'Biotechnology and Pharmaceuticals : Lab Services : Reagents, Consumables and Instruments',
'Life S': 'Biotechnology and Pharmaceuticals : Lab Services',
'Life S': 'Biotechnology and Pharmaceuticals : Pharmaceuticals : Basic Pharmaceutical Products',
'Life S': 'Biotechnology and Pharmaceuticals : Pharmaceuticals : Drug Discovery',
'Life S': 'Biotechnology and Pharmaceuticals : Pharmaceuticals : Drug Manufacture',
'Life S': 'Biotechnology and Pharmaceuticals : Pharmaceuticals : Neutraceuticals',
'Life S': 'Biotechnology and Pharmaceuticals : Pharmaceuticals',
'Life S': 'Biotechnology and Pharmaceuticals : Vaccines',
'Life S': 'Biotechnology and Pharmaceuticals',
'Life S': 'Healthcare and Medical : Healthcare Marketing and Sales : Healthcare Retail',
'Life S': 'Healthcare and Medical : Healthcare Marketing and Sales : Healthcare Wholesale',
'Life S': 'Healthcare and Medical : Healthcare Marketing and Sales',
'Life S': 'Healthcare and Medical : Healthcare Services : Dentists',
'Life S': 'Healthcare and Medical : Healthcare Services : Medical Practice',
'Life S': 'Healthcare and Medical : Healthcare Services : Nursing Homes',
'Life S': 'Healthcare and Medical : Healthcare Services : Private Sector',
'Life S': 'Healthcare and Medical : Healthcare Services : Public Sector',
'Life S': 'Healthcare and Medical : Healthcare Services : Vets',
'Life S': 'Healthcare and Medical : Healthcare Services',
'Life S': 'Healthcare and Medical : Medical Consumables',
'Life S': 'Healthcare and Medical : Medical Devices and Systems : Optical Precision Instruments',
'Life S': 'Healthcare and Medical : Medical Devices and Systems',
'Life S': 'Healthcare and Medical : Medical Equipment : Dental Aesthetics',
'Life S': 'Healthcare and Medical : Medical Equipment : Glass',
'Life S': 'Healthcare and Medical : Medical Equipment : Spectacles and Unmounted Lenses',
'Life S': 'Healthcare and Medical : Medical Equipment',
'Life S': 'Healthcare and Medical : Medical Lab Services',
'Life S': 'Healthcare and Medical',
'Life S': 'Life Sciences',
'Marine': 'Marine',
'Nuclear': 'Power : Nuclear : Nuclear De-commissiong',
'Nuclear': 'Power : Nuclear',
'O&G': 'Oil and Gas',
'Other': 'Clothing, Footwear and Fashion : Clothing : Workwear',
'Other': 'Clothing, Footwear and Fashion : Clothing',
'Other': 'Clothing, Footwear and Fashion : Footwear',
'Other': 'Clothing, Footwear and Fashion',
'Other': 'Education and Training',
'Other': 'Energy',
'Other': 'Giftware, Jewellery and Tableware',
'Other': 'Household Goods, Furniture and Furnishings',
'Other': 'Leisure and Tourism : Gaming : Casino Gambling',
'Other': 'Leisure and Tourism : Gaming : Mass-Market Gambling',
'Other': 'Leisure and Tourism : Gaming',
'Other': 'Leisure and Tourism : Sports and Leisure Infrastructure',
'Other': 'Leisure and Tourism',
'Other': 'Power',
'Rail': 'Railways',
'Renew': 'Renewable Energy : Biomass',
'Renew': 'Renewable Energy : Geothermal',
'Renew': 'Renewable Energy : Hydro',
'Renew': 'Renewable Energy : Solar',
'Renew': 'Renewable Energy : Tidal',
'Renew': 'Renewable Energy : Wave',
'Renew': 'Renewable Energy : Wind : Renewable energy: Wind: Offshore',
'Renew': 'Renewable Energy : Wind : Renewable energy: Wind: Onshore',
'Renew': 'Renewable Energy : Wind',
'Renew': 'Renewable Energy',
'Retail': 'Retail',
'Space': 'Aerospace : Manufacturing and Assembly : Space Technology',
'Tech': 'Communications : Broadband',
'Tech': 'Communications : Communications Wholesale',
'Tech': 'Communications : Convergent',
'Tech': 'Communications : Fixed Line',
'Tech': 'Communications : Mobile : 3G Services',
'Tech': 'Communications : Mobile : GSM',
'Tech': 'Communications : Mobile',
'Tech': 'Communications : Retail',
'Tech': 'Communications : Wireless : Wi-Fi',
'Tech': 'Communications : Wireless : Wi-Max',
'Tech': 'Communications : Wireless',
'Tech': 'Communications',
'Tech': 'Electronics and IT Hardware : Electronic Instruments',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Broadcasting',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Component Technologies',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Computing',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Display Technologies',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Network Technologies',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies : Security Technologies',
'Tech': 'Electronics and IT Hardware : Electronics and IT Technologies',
'Tech': 'Electronics and IT Hardware',
'Tech': 'ICT',
'Tech': 'Security : Cyber Security',
'Tech': 'Software and Computer Services Business to Business (B2B) : Biometrics',
'Tech': 'Software and Computer Services Business to Business (B2B) : E-Procurement',
'Tech': 'Software and Computer Services Business to Business (B2B) : Healthcare Applications',
'Tech': 'Software and Computer Services Business to Business (B2B) : Industry Applications',
'Tech': 'Software and Computer Services Business to Business (B2B) : Online Retailing',
'Tech': 'Software and Computer Services Business to Business (B2B) : Security Related Software',
'Tech': 'Software and Computer Services Business to Business (B2B) : Support Services : Equipment Maintenance and Repair',
'Tech': 'Software and Computer Services Business to Business (B2B) : Support Services : Internet Service Providers',
'Tech': 'Software and Computer Services Business to Business (B2B) : Support Services',
'Tech': 'Software and Computer Services Business to Business (B2B)',
}
for team_name, sector_name in mapping.items():
sector_team = SectorTeam.objects.get(name=team_name)
sector = Sector.objects.get(name=sector_name)
SectorTeamSector(team=sector_team, sector=sector).save()
class Migration(migrations.Migration):
dependencies = [
('fdi', '0019_auto_20171016_1038'),
]
operations = [
migrations.RunPython(add_sector_teams),
migrations.RunPython(add_sector_team_sector_mapping),
]
| UKTradeInvestment/export-wins-data | fdi/migrations/0020_auto_20171012_2215.py | Python | gpl-3.0 | 21,669 |
#!/usr/bin/env python
import werkzeug
import datetime
import hashlib
import uuid
import sys
from sqlalchemy import Integer, Column, String, DateTime, Enum
from sqlalchemy import create_engine, Text
from sqlalchemy.orm import Session
from sqlalchemy.ext.declarative import declarative_base, declared_attr
# TODO fetch this from a config
engine = create_engine("sqlite:////tmp/quotepy.sqlite")
session = Session(engine)
class Base(object):
"""Base class which provides automated table names
and a primary key column."""
@declared_attr
def __tablename__(cls):
return cls.__name__.lower()
id = Column(Integer, primary_key=True)
Base = declarative_base(cls=Base)
class HasDates(object):
"""Define attributes present on all dated content."""
pub_date = Column(DateTime)
chg_date = Column(DateTime)
class Quote(HasDates, Base):
raw = Column(Text)
# We also need an accepted date
acc_date = Column(DateTime)
# We could have votes in a separate table but meh
score = Column(Integer, default=0)
status = Column(Enum("pending", "accepted", "removed"))
def _create_id(self):
# XXX This should organically grow as more is used, probably depending
# on how often collissions occur.
# Aside from that we should never repeat hashes which have been used before
# without keeping the pastes in the database.
return hashlib.sha224(str(uuid.uuid4())).hexdigest()[:8]
def __init__(self, raw):
self.pub_date = datetime.datetime.utcnow()
self.chg_date = datetime.datetime.utcnow()
self.raw = raw
self.status = "pending"
def __repr__(self):
return "<Quote(quote_id=%s)>" % (self.quote_id,)
| x89/quotepy | quotepy/models.py | Python | mit | 1,736 |
# Copyright 2020 Akretion Renato Lima <[email protected]>
# License AGPL-3.0 or later (https://www.gnu.org/licenses/agpl).
{
"name": "Sale MRP BOM",
"category": "Sale",
"license": "AGPL-3",
"author": "Akretion, Odoo Community Association (OCA)",
"version": "14.0.1.0.0",
"website": "https://github.com/OCA/sale-workflow",
"summary": "Allows define a BOM in the sales lines.",
"depends": [
"sale_stock",
"mrp",
],
"data": [
"security/security.xml",
"views/sale_order.xml",
"views/sale_order_line.xml",
],
"installable": True,
}
| OCA/sale-workflow | sale_mrp_bom/__manifest__.py | Python | agpl-3.0 | 625 |
"""
Role Assigner cog.
Randomly assigns roles to users.
"""
import os # Used to create folder path.
import random
import itertools
import discord
from discord.ext import commands
from __main__ import send_cmd_help # pylint: disable=no-name-in-module
from .utils import config, checks # pylint: disable=relative-beyond-top-level
SAVE_FOLDER = "data/lui-cogs/roleAssigner"
MAX_LENGTH = 2000 # For a message
def checkFolder():
"""Used to create the data folder at first startup"""
if not os.path.exists(SAVE_FOLDER):
print("Creating {} folder...".format(SAVE_FOLDER))
os.makedirs(SAVE_FOLDER)
class RoleAssigner:
"""Randomly assign roles to users."""
def __init__(self, bot):
self.bot = bot
self.config = config.Config("settings.json",
cogname="lui-cogs/roleAssigner")
self.roles = self.config.get("roles")
@checks.mod_or_permissions(manage_messages=True)
@commands.group(name="roleassigner", aliases=["ra"], pass_context=True,
no_pm=True)
async def roleAssigner(self, ctx):
"""Role assigner, one role per user from a list of roles."""
if ctx.invoked_subcommand is None:
await send_cmd_help(ctx)
@roleAssigner.command(name="add", pass_context=False)
async def raAdd(self, roleName: discord.Role):
"""Add a role to be randomly assigned."""
if not self.roles:
self.roles = []
elif roleName.id in self.roles:
await self.bot.say(":warning: **Role Assigner - Add:** The role "
"already exists in the list!")
return
self.roles.append(roleName.id)
await self.config.put("roles", self.roles)
await self.bot.say(":white_check_mark: **Role Assigner - Add:** Role "
"added")
@roleAssigner.command(name="remove", pass_context=False,
aliases=["del", "rm"])
async def raRemove(self, roleName: discord.Role):
"""Remove a role to be randomly assigned."""
if not self.roles: # pylint: disable=no-else-return
await self.bot.say(":warning: **Role Assigner - Remove:** There are "
"no roles on the list. Please add one first!")
return
elif roleName.id not in self.roles:
await self.bot.say(":warning: **Role Assigner - Remove:** The role "
"doesn't exist on the list!")
return
self.roles.remove(roleName.id)
await self.config.put("roles", self.roles)
await self.bot.say(":white_check_mark: **Role Assigner - Remove:** "
"Role removed.")
@roleAssigner.command(name="list", pass_context=True)
async def raList(self, ctx):
"""List roles for random assignment."""
msg = ":information_source: **Role Assigner - List:** One of the " \
"following roles will be assigned to each user:\n"
if not self.roles:
await self.bot.say(":warning: **Role Assigner - List:** No roles "
"added!")
return
msg += "```\n"
for roleId in self.roles:
roleName = discord.utils.get(ctx.message.server.roles, id=roleId)
msg += "{}\n".format(roleName)
msg += "```"
await self.bot.say(msg)
@roleAssigner.command(name="assign", pass_context=True)
async def raAssign(self, ctx, role: discord.Role = None):
"""
Randomly assign roles to users.
Optionally apply to a subset of users with a certain role.
"""
users = ctx.message.server.members
if role:
users = [user for user in users if role in user.roles]
numberOfRoles = len(self.roles)
msgId = await self.bot.say(":hourglass: **Role Assigner - Assign:** "
"Assigning roles, please wait...")
roles = []
roleList = ctx.message.server.roles
for roleId in self.roles:
roleObject = discord.utils.get(roleList, id=roleId)
roles.append(roleObject)
for index, user in enumerate(users):
anyRoles = [i for i in user.roles if i in roles]
if not anyRoles:
await self.bot.add_roles(user, roles[index % numberOfRoles])
msg = ":white_check_mark: **Role Assigner - Assign:** Roles assigned"
if role:
msg += " to users with the {} role.".format(role.name)
else:
msg += "."
await self.bot.edit_message(msgId, msg)
@roleAssigner.command(name="unassign", pass_context=True)
async def raUnassign(self, ctx, role: discord.Role = None):
"""Remove roles on the list from ALL users"""
users = ctx.message.server.members
if role:
users = [user for user in users if role in user.roles]
msgId = await self.bot.say(":hourglass: **Role Assigner - Unassign:** "
"Unassigning roles, please wait...")
roles = []
roleList = ctx.message.server.roles
for roleId in self.roles:
roleObject = discord.utils.get(roleList, id=roleId)
roles.append(roleObject)
for userObject, roleObject in itertools.product(users, roles):
if roleObject in userObject.roles:
await self.bot.remove_roles(userObject, roleObject)
msg = ":white_check_mark: **Role Assigner - Unassign:** Roles removed"
if role:
msg += " from users with the {} role.".format(role.name)
else:
msg += "."
await self.bot.edit_message(msgId, msg)
@roleAssigner.command(name="random", pass_context=True)
async def raRandom(self, ctx, fromRole: discord.Role, number: int,
assignRole: discord.Role,
excludeFromRole: discord.Role = None):
"""Assign a role to some users from a certain role.
Pick `number` of users from fromRole at random, and assign assignRole to
those users.
"""
if number <= 0:
await self.bot.say(":negative_squared_cross_mark: **Role Assigner - "
"Random:** Please enter a positive number!")
return
users = ctx.message.server.members
if excludeFromRole:
eligibleUsers = [user for user in users if fromRole in user.roles and
excludeFromRole not in user.roles and assignRole not
in user.roles]
else:
eligibleUsers = [user for user in users if fromRole in user.roles and
assignRole not in user.roles]
if number > len(eligibleUsers):
# Assign role to all eligible users.
picked = eligibleUsers
else:
# Randomize and select the first `number` users.
random.shuffle(eligibleUsers)
picked = eligibleUsers[0:number]
if not picked:
await self.bot.say(":negative_squared_cross_mark: **Role Assigner - "
"Random:** Nobody was eligible to be assigned!")
return
status = await self.bot.say(":hourglass: **Role Assigner - Random:** Randomly "
"picking users from the role **{}** and assigning "
"them to the role **{}**. Please wait...\n"
"Users being assigned:"
.format(fromRole.name, assignRole.name))
msg = "**|** "
for user in picked:
await self.bot.add_roles(user, assignRole)
if len(msg) > MAX_LENGTH:
await self.bot.say(msg)
msg = "**|** "
msg += "{} **|** ".format(user.name)
await self.bot.say(msg)
msg = (":white_check_mark: **Role Assigner - Random:** The following users "
"were picked from the **{}** role and assigned to the role **{}**:"
.format(fromRole.name, assignRole.name))
await self.bot.edit_message(status, msg)
def setup(bot):
"""Add the cog to the bot."""
checkFolder()
bot.add_cog(RoleAssigner(bot))
| Injabie3/Red-DiscordBot | cogs/role_assigner.py | Python | gpl-3.0 | 8,368 |
# Playlist.py
#
# reads all available playlists, adjusts song paths, removes not copied songs,
# writes resulting playlist to destination
import mlsSong as sng
import config
import glob
import os
import sys
import codecs
def Playlist():
# get a list of all playlists
playlists = glob.glob(config.SOURCE_PLAYLISTFOLDER + "\\*.m3u*")
# keep only the file name
for (i, playlist) in enumerate(playlists):
(filepath, filename) = os.path.split(playlist)
playlists[i] = filename
# Winamp fail: playlists are saved with pretty random-looking names.
# Look up the new names in a look-up file. Playlists that are not found
# won't be copied.
for oldPlaylist in playlists:
newPlaylist = ""
for lutPlaylist in config.PLAYLIST_LUT:
print oldPlaylist
print lutPlaylist[0]
if lutPlaylist[0] == oldPlaylist:
newPlaylist = lutPlaylist[1]
print "Playlist name conversion: from", oldPlaylist, "to", newPlaylist
break
if newPlaylist == "":
print "No playlist name conversion found for", oldPlaylist
break
# "s" as in Source_playlist
# -------------------------
# open source playlist
try:
s = codecs.open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r', encoding='UTF-8')
## s = open(config.SOURCE_PLAYLISTFOLDER + "\\" + oldPlaylist, 'r')
except:
print "Playlist", oldPlaylist, "could not be read!"
continue
# "d" as in Destination_playlist
# ------------------------------
# check if destination playlist file already exists
try:
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'r')
except:
# file does not exist, create it
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w')
else:
# file already exists, delete it and create a new one
d.close()
os.remove(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist)
d = open(config.DEST_PLAYLISTFOLDER + "\\" + newPlaylist, 'w')
# write header line
d.write("#EXTM3U\n")
# read first line, it should be '#EXTM3U'
b = s.readline()
print b
if b == '#EXTM3U\r\n':
print "EXTM3U playlist."
extm3u = True
else:
extm3u = False
# I'm pretty sure b is already the first song, so don't read another
# line before properly processing it
skipFirst = True
for lines in s:
if extm3u:
a = s.readline() # 'EXTINF:' song.trackLength,Artist - Title
# This line can be left unchanged.
if not skipFirst:
b = s.readline() # file path: strip SOURCE_MUSICFOLDER, replace it with DEST_MUSICFOLDER
print b
b = b.replace(config.SOURCE_MUSICFOLDER, config.DEST_MUSICFOLDER)
print b
else:
skipFirst = False
# process b:
# - if b is a relative path, convert it to absolute
# ... TO DO
# - find song, where config.songList[x].fileNameOld = b
# ... TO DO
# - if config.songList[x].added == 0: continue (song was not copied; don't add it to playlist)
# ... TO DO
# write new path to b
b = config.songList[x].fileNameNew + "\n"
if not extm3u:
# create line a
a = "EXTINF:" + config.songList[x].trackLength + ","
a = a + config.songList[x].trackArtist + " - "
a = a + config.songList[x].trackTitle + "\n"
d.write(a)
d.write(b)
s.close()
d.close()
| RalpH-himself/MusicLibrarySyncForMSC | mlsPlaylist.py | Python | gpl-3.0 | 3,927 |
# encoding: utf-8
from __future__ import unicode_literals
from .mtv import MTVServicesInfoExtractor
class SouthParkIE(MTVServicesInfoExtractor):
IE_NAME = 'southpark.cc.com'
_VALID_URL = r'https?://(?:www\.)?(?P<url>southpark\.cc\.com/(?:clips|full-episodes)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southparkstudios.com/feeds/video-player/mrss'
_TESTS = [{
'url': 'http://southpark.cc.com/clips/104437/bat-daded#tab=featured',
'info_dict': {
'id': 'a7bff6c2-ed00-11e0-aca6-0026b9414f30',
'ext': 'mp4',
'title': 'South Park|Bat Daded',
'description': 'Randy disqualifies South Park by getting into a fight with Bat Dad.',
'timestamp': 1112760000,
'upload_date': '20050406',
},
}]
class SouthParkEsIE(SouthParkIE):
IE_NAME = 'southpark.cc.com:español'
_VALID_URL = r'https?://(?:www\.)?(?P<url>southpark\.cc\.com/episodios-en-espanol/(?P<id>.+?)(\?|#|$))'
_LANG = 'es'
_TESTS = [{
'url': 'http://southpark.cc.com/episodios-en-espanol/s01e01-cartman-consigue-una-sonda-anal#source=351c1323-0b96-402d-a8b9-40d01b2e9bde&position=1&sort=!airdate',
'info_dict': {
'title': 'Cartman Consigue Una Sonda Anal',
'description': 'Cartman Consigue Una Sonda Anal',
},
'playlist_count': 4,
}]
class SouthParkDeIE(SouthParkIE):
IE_NAME = 'southpark.de'
_VALID_URL = r'https?://(?:www\.)?(?P<url>southpark\.de/(?:clips|alle-episoden)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southpark.de/feeds/video-player/mrss/'
_TESTS = [{
'url': 'http://www.southpark.de/clips/uygssh/the-government-wont-respect-my-privacy#tab=featured',
'info_dict': {
'id': '85487c96-b3b9-4e39-9127-ad88583d9bf2',
'ext': 'mp4',
'title': 'South Park|The Government Won\'t Respect My Privacy',
'description': 'Cartman explains the benefits of "Shitter" to Stan, Kyle and Craig.',
'timestamp': 1380160800,
'upload_date': '20130926',
},
}, {
# non-ASCII characters in initial URL
'url': 'http://www.southpark.de/alle-episoden/s18e09-hashtag-aufwärmen',
'info_dict': {
'title': 'Hashtag „Aufwärmen“',
'description': 'Kyle will mit seinem kleinen Bruder Ike Videospiele spielen. Als der nicht mehr mit ihm spielen will, hat Kyle Angst, dass er die Kids von heute nicht mehr versteht.',
},
'playlist_count': 3,
}, {
# non-ASCII characters in redirect URL
'url': 'http://www.southpark.de/alle-episoden/s18e09',
'info_dict': {
'title': 'Hashtag „Aufwärmen“',
'description': 'Kyle will mit seinem kleinen Bruder Ike Videospiele spielen. Als der nicht mehr mit ihm spielen will, hat Kyle Angst, dass er die Kids von heute nicht mehr versteht.',
},
'playlist_count': 3,
}]
class SouthParkNlIE(SouthParkIE):
IE_NAME = 'southpark.nl'
_VALID_URL = r'https?://(?:www\.)?(?P<url>southpark\.nl/(?:clips|full-episodes)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southpark.nl/feeds/video-player/mrss/'
_TESTS = [{
'url': 'http://www.southpark.nl/full-episodes/s18e06-freemium-isnt-free',
'info_dict': {
'title': 'Freemium Isn\'t Free',
'description': 'Stan is addicted to the new Terrance and Phillip mobile game.',
},
'playlist_mincount': 3,
}]
class SouthParkDkIE(SouthParkIE):
IE_NAME = 'southparkstudios.dk'
_VALID_URL = r'https?://(?:www\.)?(?P<url>southparkstudios\.dk/(?:clips|full-episodes)/(?P<id>.+?)(\?|#|$))'
_FEED_URL = 'http://www.southparkstudios.dk/feeds/video-player/mrss/'
_TESTS = [{
'url': 'http://www.southparkstudios.dk/full-episodes/s18e07-grounded-vindaloop',
'info_dict': {
'title': 'Grounded Vindaloop',
'description': 'Butters is convinced he\'s living in a virtual reality.',
},
'playlist_mincount': 3,
}]
| Rudloff/youtube-dl | youtube_dl/extractor/southpark.py | Python | unlicense | 4,108 |
#!/usr/bin/python
#
# Copyright 2008, Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code sample creates a new campaign with ad scheduling."""
import SOAPpy
# Provide AdWords login information.
email = 'INSERT_LOGIN_EMAIL_HERE'
password = 'INSERT_PASSWORD_HERE'
client_email = 'INSERT_CLIENT_LOGIN_EMAIL_HERE'
useragent = 'INSERT_COMPANY_NAME: AdWords API Python Sample Code'
developer_token = 'INSERT_DEVELOPER_TOKEN_HERE'
application_token = 'INSERT_APPLICATION_TOKEN_HERE'
# Define SOAP headers.
headers = SOAPpy.Types.headerType()
headers.email = email
headers.password = password
headers.clientEmail = client_email
headers.useragent = useragent
headers.developerToken = developer_token
headers.applicationToken = application_token
# Set up service connection. To view XML request/response, change value of
# campaign_service.config.debug to 1. To send requests to production
# environment, replace "sandbox.google.com" with "adwords.google.com".
namespace = 'https://sandbox.google.com/api/adwords/v12'
campaign_service = SOAPpy.SOAPProxy(namespace + '/CampaignService',
header=headers)
campaign_service.config.debug = 0
# Create new campaign structure with ad scheduling set to show ads on Monday,
# Wednesday, and Friday from 8:00am to 5:00pm. Each bid is multiplied by 1.0.
interval_template = """
<intervals>
<day>%s</day>
<endHour>%s</endHour>
<endMinute>%s</endMinute>
<multiplier>%s</multiplier>
<startHour>%s</startHour>
<startMinute>%s</startMinute>
</intervals>"""
schedule_template = """
%s
<status>%s</status>"""
days = ['Monday', 'Wednesday', 'Friday']
intervals = ''
for index in range(len(days)):
intervals += interval_template % (days[index], '17', '0', '1.0', '8', '0')
schedule = SOAPpy.Types.untypedType(schedule_template % (intervals, 'Enabled'))
# Create new campaign structure.
campaign = {
'name': 'Sample Campaign',
'budgetAmount': SOAPpy.Types.untypedType('100000'),
'budgetPeriod': SOAPpy.Types.untypedType('Daily'),
'geoTargeting': {'countryTargets': {'countries': ['US']}},
'languageTargeting': {'languages': ['en']},
'schedule': schedule
}
# Add campaign.
campaign = campaign_service.addCampaign(campaign)
# Display new campaign.
print 'New campaign with name "%s" and id "%s" was created.' % \
(campaign['name'], campaign['id'])
| chrisjowen/nLess | lib/PEG_GrammarExplorer/PEG_GrammarExplorer/PegSamples/python_2_5_2/input/adwords/awapi_python_samples_1.0.0/src/add_campaign.py | Python | apache-2.0 | 2,901 |
# Authors:
# Rob Crittenden <[email protected]>
# Pavel Zuna <[email protected]>
#
# Copyright (C) 2008 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
Test the `ipaserver/plugins/automount.py' module.
"""
import textwrap
import tempfile
import shutil
import pytest
from ipalib import api
from ipalib import errors
from ipapython.dn import DN
import six
from nose.tools import raises, assert_raises # pylint: disable=E0611
from ipatests.test_xmlrpc.xmlrpc_test import XMLRPC_test, assert_attr_equal
from ipatests.util import assert_deepequal
if six.PY3:
unicode = str
class MockTextui(list):
"""Collects output lines"""
# Extend the mock object if other textui methods are called
def print_plain(self, line):
self.append(unicode(line))
class AutomountTest(XMLRPC_test):
"""Provides common functionality for automount tests"""
locname = u'testlocation'
tofiles_output = '' # To be overridden
def check_tofiles(self):
"""Check automountlocation_tofiles output against self.tofiles_output
"""
res = api.Command['automountlocation_tofiles'](self.locname)
mock_ui = MockTextui()
command = api.Command['automountlocation_tofiles']
command.output_for_cli(mock_ui, res, self.locname, version=u'2.88')
expected_output = self.tofiles_output
assert_deepequal(expected_output, u'\n'.join(mock_ui))
def check_import_roundtrip(self):
"""Check automountlocation_tofiles/automountlocation_import roundtrip
Loads self.tofiles_output (which should correspond to
automountlocation_tofiles output), then checks the resulting map
against tofiles_output again.
Do not use this if the test creates maps that aren't connected to
auto.master -- these can't be imported successfully.
"""
conf_directory = tempfile.mkdtemp()
# Parse the tofiles_output into individual files, replace /etc/ by
# our temporary directory name
current_file = None
for line in self.tofiles_output.splitlines():
line = line.replace('/etc/', '%s/' % conf_directory)
if line.startswith(conf_directory) and line.endswith(':'):
current_file = open(line.rstrip(':'), 'w')
elif '--------' in line:
current_file.close()
elif line.startswith('maps not connected to '):
break
else:
current_file.write(line + '\n')
assert current_file is not None, ('The input file does not contain any'
'records of files to be opened.')
current_file.close()
self.failsafe_add(api.Object.automountlocation, self.locname)
try:
# Feed the files to automountlocation_import & check
master_file = u'%s/auto.master' % conf_directory
automountlocation_import = api.Command['automountlocation_import']
res = automountlocation_import(self.locname, master_file,
version=u'2.88')
assert_deepequal(dict(
result=dict(
keys=lambda k: k,
maps=lambda m: m,
skipped=(),
duplicatemaps=(),
duplicatekeys=(),
)), res)
self.check_tofiles()
finally:
res = api.Command['automountlocation_del'](self.locname)['result']
assert res
assert not res['failed']
# Success; delete the temporary directory
shutil.rmtree(conf_directory)
@pytest.mark.tier1
class test_automount(AutomountTest):
"""
Test the `automount` plugin.
"""
mapname = u'testmap'
keyname = u'testkey'
keyname_rename = u'testkey_rename'
keyname2 = u'testkey2'
description = u'description of map'
info = u'ro'
newinfo = u'rw'
map_kw = {'automountmapname': mapname, 'description': description, 'raw': True}
key_kw = {'automountkey': keyname, 'automountinformation': info, 'raw': True}
key_kw2 = {'automountkey': keyname2, 'automountinformation': info, 'raw': True}
tofiles_output = textwrap.dedent(u"""
/etc/auto.master:
/-\t/etc/auto.direct
---------------------------
/etc/auto.direct:
maps not connected to /etc/auto.master:
---------------------------
/etc/testmap:
testkey2\tro
""").strip()
def test_0_automountlocation_add(self):
"""
Test adding a location `xmlrpc.automountlocation_add` method.
"""
ret = self.failsafe_add(
api.Object.automountlocation, self.locname
)
entry = ret['result']
assert_attr_equal(entry, 'cn', self.locname)
def test_1_automountmap_add(self):
"""
Test adding a map `xmlrpc.automountmap_add` method.
"""
res = api.Command['automountmap_add'](self.locname, **self.map_kw)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
def test_2_automountkey_add(self):
"""
Test adding a key using `xmlrpc.automountkey_add` method.
"""
res = api.Command['automountkey_add'](self.locname, self.mapname, **self.key_kw2)['result']
assert res
assert_attr_equal(res, 'automountkey', self.keyname2)
def test_3_automountkey_add(self):
"""
Test adding a key using `xmlrpc.automountkey_add` method.
"""
res = api.Command['automountkey_add'](self.locname, self.mapname, **self.key_kw)['result']
assert res
assert_attr_equal(res, 'automountkey', self.keyname)
@raises(errors.DuplicateEntry)
def test_4_automountkey_add(self):
"""
Test adding a duplicate key using `xmlrpc.automountkey_add` method.
"""
api.Command['automountkey_add'](
self.locname, self.mapname, **self.key_kw)
def test_5_automountmap_show(self):
"""
Test the `xmlrpc.automountmap_show` method.
"""
res = api.Command['automountmap_show'](self.locname, self.mapname, raw=True)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
def test_6_automountmap_find(self):
"""
Test the `xmlrpc.automountmap_find` method.
"""
res = api.Command['automountmap_find'](self.locname, self.mapname, raw=True)['result']
assert_attr_equal(res[0], 'automountmapname', self.mapname)
def test_7_automountkey_show(self):
"""
Test the `xmlrpc.automountkey_show` method.
"""
showkey_kw={'automountkey': self.keyname, 'automountinformation' : self.info, 'raw': True}
res = api.Command['automountkey_show'](self.locname, self.mapname, **showkey_kw)['result']
assert res
assert_attr_equal(res, 'automountkey', self.keyname)
assert_attr_equal(res, 'automountinformation', self.info)
def test_8_automountkey_find(self):
"""
Test the `xmlrpc.automountkey_find` method.
"""
res = api.Command['automountkey_find'](self.locname, self.mapname, raw=True)['result']
assert res
assert len(res) == 2
assert_attr_equal(res[0], 'automountkey', self.keyname)
assert_attr_equal(res[0], 'automountinformation', self.info)
def test_9_automountkey_mod(self):
"""
Test the `xmlrpc.automountkey_mod` method.
"""
self.key_kw['newautomountinformation'] = self.newinfo
self.key_kw['rename'] = self.keyname_rename
res = api.Command['automountkey_mod'](self.locname, self.mapname, **self.key_kw)['result']
assert res
assert_attr_equal(res, 'automountinformation', self.newinfo)
assert_attr_equal(res, 'automountkey', self.keyname_rename)
def test_a1_automountmap_mod(self):
"""
Test the `xmlrpc.automountmap_mod` method.
"""
mod_kw = {'description': u'new description'}
res = api.Command['automountmap_mod'](self.locname, self.mapname, **mod_kw)['result']
assert res
assert_attr_equal(res, 'description', 'new description')
def test_a2_automountmap_tofiles(self):
"""
Test the `automountlocation_tofiles` command.
"""
res = api.Command['automountlocation_tofiles'](self.locname,
version=u'2.88')
assert_deepequal(dict(
result=dict(
keys={'auto.direct': ()},
orphanmaps=(dict(
dn=DN(('automountmapname', self.mapname),
('cn', self.locname),
('cn', 'automount'), api.env.basedn),
description=(u'new description',),
automountmapname=(u'testmap',)),),
orphankeys=[(
dict(
dn=DN(('description', self.keyname2),
('automountmapname', 'testmap'),
('cn', self.locname),
('cn', 'automount'), api.env.basedn),
automountkey=(self.keyname2,),
description=(self.keyname2,),
automountinformation=(u'ro',),
),
dict(
dn=DN(('description', self.keyname_rename),
('automountmapname', 'testmap'),
('cn', self.locname),
('cn', 'automount'), api.env.basedn),
automountkey=(self.keyname_rename,),
description=(self.keyname_rename,),
automountinformation=(u'rw',),
))],
maps=(
dict(
dn=DN(('description', '/- auto.direct'),
('automountmapname', 'auto.master'),
('cn', self.locname),
('cn', 'automount'), api.env.basedn),
automountkey=(u'/-',),
description=(u'/- auto.direct',),
automountinformation=(u'auto.direct',)
),
))), res)
# Also check the CLI output
self.check_tofiles()
def test_b_automountkey_del(self):
"""
Test the `xmlrpc.automountkey_del` method.
"""
delkey_kw={'automountkey': self.keyname_rename, 'automountinformation' : self.newinfo}
res = api.Command['automountkey_del'](self.locname, self.mapname, **delkey_kw)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountkey_show'](self.locname, self.mapname, **delkey_kw)
def test_c_automountlocation_del(self):
"""
Test the `xmlrpc.automountlocation_del` method.
"""
res = api.Command['automountlocation_del'](self.locname)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountlocation_show'](self.locname)
def test_d_automountmap_del(self):
"""
Test that the `xmlrpc.automountlocation_del` method removes all maps and keys
"""
# Verify that the second key we added is gone
key_kw = {'automountkey': self.keyname2, 'automountinformation': self.info, 'raw': True}
with assert_raises(errors.NotFound):
api.Command['automountkey_show'](self.locname, self.mapname, **key_kw)
@pytest.mark.tier1
class test_automount_direct(AutomountTest):
"""
Test the `automount` plugin indirect map functionality.
"""
mapname = u'auto.direct2'
keyname = u'/-'
direct_kw = { 'key' : keyname }
tofiles_output = textwrap.dedent(u"""
/etc/auto.master:
/-\t/etc/auto.direct
/-\t/etc/auto.direct2
---------------------------
/etc/auto.direct:
---------------------------
/etc/auto.direct2:
maps not connected to /etc/auto.master:
""").strip()
def test_0_automountlocation_add(self):
"""
Test adding a location.
"""
res = api.Command['automountlocation_add'](self.locname, raw=True)['result']
assert res
assert_attr_equal(res, 'cn', self.locname)
def test_1_automountmap_add_direct(self):
"""
Test adding a second direct map with a different info
"""
res = api.Command['automountmap_add_indirect'](self.locname, self.mapname, **self.direct_kw)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
@raises(errors.DuplicateEntry)
def test_2_automountmap_add_duplicate(self):
"""
Test adding a duplicate direct map.
"""
api.Command['automountmap_add_indirect'](
self.locname, self.mapname, **self.direct_kw)
def test_2a_automountmap_tofiles(self):
"""Test the `automountmap_tofiles` command"""
self.check_tofiles()
def test_3_automountlocation_del(self):
"""
Remove the location.
"""
res = api.Command['automountlocation_del'](self.locname)['result']
assert res
assert not res['failed']
# Verity that it is gone
with assert_raises(errors.NotFound):
api.Command['automountlocation_show'](self.locname)
def test_z_import_roundtrip(self):
"""Check automountlocation_tofiles/automountlocation_import roundtrip
"""
self.check_import_roundtrip()
@pytest.mark.tier1
class test_automount_indirect(AutomountTest):
"""
Test the `automount` plugin indirect map functionality.
"""
mapname = u'auto.home'
keyname = u'/home'
parentmap = u'auto.master'
map_kw = {'key': keyname, 'parentmap': parentmap, 'raw': True}
key_kw = {'automountkey': keyname, 'automountinformation': mapname}
tofiles_output = textwrap.dedent(u"""
/etc/auto.master:
/-\t/etc/auto.direct
/home\t/etc/auto.home
---------------------------
/etc/auto.direct:
---------------------------
/etc/auto.home:
maps not connected to /etc/auto.master:
""").strip()
def test_0_automountlocation_add(self):
"""
Test adding a location.
"""
res = api.Command['automountlocation_add'](self.locname, raw=True)['result']
assert res
assert_attr_equal(res, 'cn', self.locname)
def test_1_automountmap_add_indirect(self):
"""
Test adding an indirect map.
"""
res = api.Command['automountmap_add_indirect'](self.locname, self.mapname, **self.map_kw)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
@raises(errors.DuplicateEntry)
def test_1a_automountmap_add_indirect(self):
"""
Test adding a duplicate indirect map.
"""
api.Command['automountmap_add_indirect'](self.locname, self.mapname, **self.map_kw)
def test_2_automountmap_show(self):
"""
Test the `xmlrpc.automountmap_show` method.
"""
res = api.Command['automountmap_show'](self.locname, self.mapname, raw=True)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
def test_2a_automountmap_tofiles(self):
"""Test the `automountmap_tofiles` command"""
self.check_tofiles()
def test_3_automountkey_del(self):
"""
Remove the indirect key /home.
"""
res = api.Command['automountkey_del'](self.locname, self.parentmap, **self.key_kw)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountkey_show'](self.locname, self.parentmap, **self.key_kw)
def test_4_automountmap_del(self):
"""
Remove the indirect map for auto.home.
"""
res = api.Command['automountmap_del'](self.locname, self.mapname)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountmap_show'](self.locname, self.mapname)
def test_5_automountlocation_del(self):
"""
Remove the location.
"""
res = api.Command['automountlocation_del'](self.locname)['result']
assert res
assert not res['failed']
# Verity that it is gone
with assert_raises(errors.NotFound):
api.Command['automountlocation_show'](self.locname)
def test_z_import_roundtrip(self):
"""Check automountlocation_tofiles/automountlocation_import roundtrip
"""
self.check_import_roundtrip()
@pytest.mark.tier1
class test_automount_indirect_no_parent(AutomountTest):
"""
Test the `automount` plugin Indirect map function.
"""
mapname = u'auto.home'
keyname = u'/home'
mapname2 = u'auto.direct2'
keyname2 = u'direct2'
parentmap = u'auto.master'
map_kw = {'key': keyname, 'raw': True}
map_kw2 = {'key': keyname2, 'raw': True}
tofiles_output = textwrap.dedent(u"""
/etc/auto.master:
/-\t/etc/auto.direct
/home\t/etc/auto.home
---------------------------
/etc/auto.direct:
---------------------------
/etc/auto.home:
direct2\t-fstype=autofs ldap:auto.direct2
maps not connected to /etc/auto.master:
---------------------------
/etc/auto.direct2:
""").strip()
def test_0_automountlocation_add(self):
"""
Test adding a location.
"""
res = api.Command['automountlocation_add'](self.locname, raw=True)['result']
assert res
assert_attr_equal(res, 'cn', self.locname)
def test_1_automountmap_add_indirect(self):
"""
Test adding an indirect map with default parent.
"""
res = api.Command['automountmap_add_indirect'](self.locname, self.mapname, **self.map_kw)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname)
def test_2_automountkey_show(self):
"""
Test the `xmlrpc.automountkey_show` method with default parent.
"""
showkey_kw = {'automountkey': self.keyname, 'automountinformation': self.mapname, 'raw': True}
res = api.Command['automountkey_show'](self.locname, self.parentmap, **showkey_kw)['result']
assert res
assert_attr_equal(res, 'automountkey', self.keyname)
def test_2a_automountmap_add_indirect(self):
"""
Test adding an indirect map with default parent.
"""
res = api.Command['automountmap_add_indirect'](self.locname,
u'auto.direct2', parentmap=self.mapname, **self.map_kw2)['result']
assert res
assert_attr_equal(res, 'automountmapname', self.mapname2)
def test_2b_automountmap_tofiles(self):
"""Test the `automountmap_tofiles` command"""
self.check_tofiles()
def test_3_automountkey_del(self):
"""
Remove the indirect key /home.
"""
delkey_kw={'automountkey': self.keyname, 'automountinformation': self.mapname}
res = api.Command['automountkey_del'](self.locname, self.parentmap, **delkey_kw)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountkey_show'](self.locname, self.parentmap, **delkey_kw)
def test_4_automountmap_del(self):
"""
Remove the indirect map for auto.home.
"""
res = api.Command['automountmap_del'](self.locname, self.mapname)['result']
assert res
assert not res['failed']
# Verify that it is gone
with assert_raises(errors.NotFound):
api.Command['automountmap_show'](self.locname, self.mapname)
def test_5_automountlocation_del(self):
"""
Remove the location.
"""
res = api.Command['automountlocation_del'](self.locname)['result']
assert res
assert not res['failed']
# Verity that it is gone
with assert_raises(errors.NotFound):
api.Command['automountlocation_show'](self.locname)
| ofayans/freeipa | ipatests/test_xmlrpc/test_automount_plugin.py | Python | gpl-3.0 | 21,419 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0019_auto_20150216_0813'),
]
operations = [
migrations.AddField(
model_name='event',
name='japanese_t1_points',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='japanese_t1_rank',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='japanese_t2_points',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='japanese_t2_rank',
field=models.PositiveIntegerField(null=True),
preserve_default=True,
),
migrations.AddField(
model_name='event',
name='note',
field=models.CharField(max_length=200, null=True, blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='accept_friend_requests',
field=models.BooleanField(default=True, verbose_name='Accept friend requests'),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='best_girl',
field=models.CharField(max_length=200, null=True, verbose_name='Best girl', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='color',
field=models.CharField(blank=True, max_length=6, null=True, verbose_name='Color', choices=[(b'Smile', 'Smile'), (b'Pure', 'Pure'), (b'Cool', 'Cool'), (b'All', 'All')]),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='description',
field=models.TextField(help_text='Write whatever you want. You can add formatting and links using Markdown.', null=True, verbose_name='Description'),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='location',
field=models.CharField(help_text='The city you live in.', max_length=200, null=True, verbose_name='Location', blank=True),
preserve_default=True,
),
migrations.AlterField(
model_name='userpreferences',
name='private',
field=models.BooleanField(default=False, help_text='If your profile is private, people will only see your center.', verbose_name='Private Profile'),
preserve_default=True,
),
]
| SchoolIdolTomodachi/SchoolIdolAPI | api/migrations/0020_auto_20150217_1849.py | Python | apache-2.0 | 2,998 |
import gtk
class AcceleratorGroup:
"""Accelerator group provides customizable keyboard shortcuts for plugins
with automatically generated configuration."""
def __init__(self, application):
self._application = application
self._manager = self._application.accelerator_manager
self._active = False
self._name = None
self._title = None
self._window = None
self._menus = []
# accelerator containers
self._methods = {}
self._primary = {}
self._secondary = {}
self._paths = {}
self._disabled = []
# method name cache
self._method_names = {}
# accelerator implementation
self._accel_group = None
def _register_group(self):
"""Register group with manager"""
self._manager.register_group(self)
def _create_group(self):
"""Create group and connect accelerators"""
self._accel_group = gtk.AccelGroup()
# create accelerators
self._create_accelerators()
self._create_accelerators(primary=False)
# connect paths
self._connect_paths()
# register group with manager
self._register_group()
def _connect_paths(self):
"""Connect accelerator paths with callbacks"""
for method_name, path in self._paths.items():
callback = self._methods[method_name]['callback']
self._accel_group.connect_by_path(path, callback)
def _create_accelerators(self, primary=True):
"""Create accelerators from specified list"""
accelerator_list = (self._secondary, self._primary)[primary]
# connect all methods in list
for method_name in self._methods.keys():
if method_name in self._disabled:
continue # skip if method is disabled
# try to get saved key combination from manager
accelerator = self._manager.get_accelerator(self._name, method_name, primary)
# if we don't have saved key combination, use default
if accelerator is None and method_name in accelerator_list:
accelerator = accelerator_list[method_name]
# finally connect accelerator to specified method
if accelerator is not None and accelerator[0] > 0:
keyval = accelerator[0]
modifier = accelerator[1]
# create method name cache based on key combination
label = gtk.accelerator_get_label(keyval, modifier)
self._method_names[label] = method_name
# connect accelerator
self._accel_group.connect_group(keyval, modifier, 0, self._handle_activate)
def _handle_activate(self, group, widget, keyval, modifier):
"""Handle accelerator activation"""
label = gtk.accelerator_get_label(keyval, modifier)
name = self._method_names[label]
data = self._methods[name]['data']
callback_method = self._methods[name]['callback']
# call user method
if data is None:
result = callback_method(widget, label)
else:
result = callback_method(widget, data)
return result
def activate(self, window):
"""Activate accelerator group for specified window"""
if not self._active:
self._window = window
# connect accelerators if they are not already
if self._accel_group is None:
self._create_group()
# add accelerator group to specified window
self._window.add_accel_group(self._accel_group)
# activate menus
for menu in self._menus:
menu.set_accel_group(self._accel_group)
self._active = True
def deactivate(self):
"""Deactivate accelerator group"""
if self._active:
# remove accelerator group from window
self._window.remove_accel_group(self._accel_group)
# deactivate menus
for menu in self._menus:
menu.set_accel_group(None)
self._active = False
def invalidate(self):
"""Force reloading accelerators"""
pass
def set_name(self, name):
"""Set accelerator group name"""
self._name = name.replace(' ', '_')
def get_name(self):
"""Get group name"""
return self._name
def set_title(self, title):
"""Set accelerator group title"""
self._title = title
def add_method(self, name, title, callback, data=None):
"""Add new method to group"""
self._methods[name] = {
'title': title,
'callback': callback,
'data': data
}
def add_menu(self, menu):
"""Add menu to be connected with accelerator group on activate"""
self._menus.append(menu)
def set_accelerator(self, name, keyval, modifier):
"""Set primary accelerator for specified method name"""
self._primary[name] = (keyval, modifier)
def set_alt_accelerator(self, name, keyval, modifier):
"""Set secondary accelerator for specified method name"""
self._secondary[name] = (keyval, modifier)
def set_path(self, name, path):
"""Set activation path for specified method name"""
self._paths[name] = path
def get_accelerator(self, name, primary=True):
"""Get accelerator for specified method"""
result = None
group = (self._secondary, self._primary)[primary]
if name in group:
result = group[name]
return result
def get_method_title(self, name):
"""Get title for specified accelerator"""
result = None
if name in self._methods:
result = self._methods[name]['title']
return result
def reset_accelerator(self, name):
"""Resets accelerator shortcuts"""
if name in self._primary:
del self._primary[name]
if name in self._secondary:
del self._secondary[name]
# remove any cache
for label in self._method_names:
if self._method_names[label] == name:
del self._method_names[label]
def disable_accelerator(self, name):
"""Disable specified accelerator"""
self._disabled.append(name)
def trigger_accelerator(self, keyval, modifier):
"""Manually trigger accelerator"""
result = False
modifier = modifier & gtk.accelerator_get_default_mod_mask() # filter out unneeded mods
label = gtk.accelerator_get_label(keyval, modifier)
# trigger accelerator only if we have method connected
if label in self._method_names:
result = self._handle_activate(self._accel_group, self._window, keyval, modifier)
return result
def get_collisions(self, keyval, modifier):
"""Get list of colliding accelerators"""
result = []
# check for collisions in primary accelerators
for name, data in self._primary.items():
if cmp((keyval, modifier), data) == 0:
result.append((self, name, True))
# check for collisions in secondary accelerators
for name, data in self._secondary.items():
if cmp((keyval, modifier), data) == 0:
result.append((self, name, False))
return result
| Hammer2900/SunflowerX | application/accelerator_group.py | Python | gpl-3.0 | 7,451 |
# -*- coding: utf-8 -*-
"""ANTS Apply Transforms interface
Change directory to provide relative paths for doctests
>>> import os
>>> filepath = os.path.dirname( os.path.realpath( __file__ ) )
>>> datadir = os.path.realpath(os.path.join(filepath, '../../testing/data'))
>>> os.chdir(datadir)
"""
from __future__ import print_function, division, unicode_literals, absolute_import
import os
from ..base import TraitedSpec, File, traits, InputMultiPath
from .base import ANTSCommand, ANTSCommandInputSpec
class AverageAffineTransformInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True,
position=0, desc='image dimension (2 or 3)')
output_affine_transform = File(argstr='%s', mandatory=True, position=1,
desc='Outputfname.txt: the name of the resulting transform.')
transforms = InputMultiPath(File(exists=True), argstr='%s', mandatory=True,
position=3, desc='transforms to average')
class AverageAffineTransformOutputSpec(TraitedSpec):
affine_transform = File(exists=True, desc='average transform file')
class AverageAffineTransform(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageAffineTransform
>>> avg = AverageAffineTransform()
>>> avg.inputs.dimension = 3
>>> avg.inputs.transforms = ['trans.mat', 'func_to_struct.mat']
>>> avg.inputs.output_affine_transform = 'MYtemplatewarp.mat'
>>> avg.cmdline # doctest: +ALLOW_UNICODE
'AverageAffineTransform 3 MYtemplatewarp.mat trans.mat func_to_struct.mat'
"""
_cmd = 'AverageAffineTransform'
input_spec = AverageAffineTransformInputSpec
output_spec = AverageAffineTransformOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageAffineTransform, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['affine_transform'] = os.path.abspath(
self.inputs.output_affine_transform)
return outputs
class AverageImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', mandatory=True,
position=0, desc='image dimension (2 or 3)')
output_average_image = File(
"average.nii", argstr='%s', position=1, usedefault=True, hash_files=False,
desc='the name of the resulting image.')
normalize = traits.Bool(
argstr="%d", mandatory=True, position=2,
desc='Normalize: if true, the 2nd image is divided by its mean. '
'This will select the largest image to average into.')
images = InputMultiPath(
File(exists=True), argstr='%s', mandatory=True, position=3,
desc='image to apply transformation to (generally a coregistered functional)')
class AverageImagesOutputSpec(TraitedSpec):
output_average_image = File(exists=True, desc='average image file')
class AverageImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import AverageImages
>>> avg = AverageImages()
>>> avg.inputs.dimension = 3
>>> avg.inputs.output_average_image = "average.nii.gz"
>>> avg.inputs.normalize = True
>>> avg.inputs.images = ['rc1s1.nii', 'rc1s1.nii']
>>> avg.cmdline # doctest: +ALLOW_UNICODE
'AverageImages 3 average.nii.gz 1 rc1s1.nii rc1s1.nii'
"""
_cmd = 'AverageImages'
input_spec = AverageImagesInputSpec
output_spec = AverageImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(AverageImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_average_image'] = os.path.realpath(
self.inputs.output_average_image)
return outputs
class MultiplyImagesInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True, position=0,
desc='image dimension (2 or 3)')
first_input = File(argstr='%s', exists=True,
mandatory=True, position=1, desc='image 1')
second_input = traits.Either(
File(exists=True), traits.Float, argstr='%s', mandatory=True, position=2,
desc='image 2 or multiplication weight')
output_product_image = File(argstr='%s', mandatory=True, position=3,
desc='Outputfname.nii.gz: the name of the resulting image.')
class MultiplyImagesOutputSpec(TraitedSpec):
output_product_image = File(exists=True, desc='average image file')
class MultiplyImages(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import MultiplyImages
>>> test = MultiplyImages()
>>> test.inputs.dimension = 3
>>> test.inputs.first_input = 'moving2.nii'
>>> test.inputs.second_input = 0.25
>>> test.inputs.output_product_image = "out.nii"
>>> test.cmdline # doctest: +ALLOW_UNICODE
'MultiplyImages 3 moving2.nii 0.25 out.nii'
"""
_cmd = 'MultiplyImages'
input_spec = MultiplyImagesInputSpec
output_spec = MultiplyImagesOutputSpec
def _format_arg(self, opt, spec, val):
return super(MultiplyImages, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['output_product_image'] = os.path.abspath(
self.inputs.output_product_image)
return outputs
class CreateJacobianDeterminantImageInputSpec(ANTSCommandInputSpec):
imageDimension = traits.Enum(3, 2, argstr='%d', usedefault=False, mandatory=True,
position=0, desc='image dimension (2 or 3)')
deformationField = File(argstr='%s', exists=True, mandatory=True,
position=1, desc='deformation transformation file')
outputImage = File(argstr='%s', mandatory=True,
position=2,
desc='output filename')
doLogJacobian = traits.Enum(0, 1, argstr='%d', position=3,
desc='return the log jacobian')
useGeometric = traits.Enum(0, 1, argstr='%d', position=4,
desc='return the geometric jacobian')
class CreateJacobianDeterminantImageOutputSpec(TraitedSpec):
jacobian_image = File(exists=True, desc='jacobian image')
class CreateJacobianDeterminantImage(ANTSCommand):
"""
Examples
--------
>>> from nipype.interfaces.ants import CreateJacobianDeterminantImage
>>> jacobian = CreateJacobianDeterminantImage()
>>> jacobian.inputs.imageDimension = 3
>>> jacobian.inputs.deformationField = 'ants_Warp.nii.gz'
>>> jacobian.inputs.outputImage = 'out_name.nii.gz'
>>> jacobian.cmdline # doctest: +ALLOW_UNICODE
'CreateJacobianDeterminantImage 3 ants_Warp.nii.gz out_name.nii.gz'
"""
_cmd = 'CreateJacobianDeterminantImage'
input_spec = CreateJacobianDeterminantImageInputSpec
output_spec = CreateJacobianDeterminantImageOutputSpec
def _format_arg(self, opt, spec, val):
return super(CreateJacobianDeterminantImage, self)._format_arg(opt, spec, val)
def _list_outputs(self):
outputs = self._outputs().get()
outputs['jacobian_image'] = os.path.abspath(
self.inputs.outputImage)
return outputs
class AffineInitializerInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, usedefault=True, position=0, argstr='%s',
desc='dimension')
fixed_image = File(exists=True, mandatory=True, position=1, argstr='%s',
desc='reference image')
moving_image = File(exists=True, mandatory=True, position=2, argstr='%s',
desc='moving image')
out_file = File('transform.mat', usedefault=True, position=3, argstr='%s',
desc='output transform file')
# Defaults in antsBrainExtraction.sh -> 15 0.1 0 10
search_factor = traits.Float(15.0, usedefault=True, position=4, argstr='%f',
desc='increments (degrees) for affine search')
radian_fraction = traits.Range(0.0, 1.0, value=0.1, usedefault=True, position=5,
argstr='%f', desc='search this arc +/- principal axes')
principal_axes = traits.Bool(
False, usedefault=True, position=6, argstr='%d',
desc='whether the rotation is searched around an initial principal axis alignment.')
local_search = traits.Int(
10, usedefault=True, position=7, argstr='%d',
desc=' determines if a local optimization is run at each search point for the set '
'number of iterations')
class AffineInitializerOutputSpec(TraitedSpec):
out_file = File(desc='output transform file')
class AffineInitializer(ANTSCommand):
"""
Initialize an affine transform (as in antsBrainExtraction.sh)
>>> from nipype.interfaces.ants import AffineInitializer
>>> init = AffineInitializer()
>>> init.inputs.fixed_image = 'fixed1.nii'
>>> init.inputs.moving_image = 'moving1.nii'
>>> init.cmdline # doctest: +ALLOW_UNICODE
'antsAffineInitializer 3 fixed1.nii moving1.nii transform.mat 15.000000 0.100000 0 10'
"""
_cmd = 'antsAffineInitializer'
input_spec = AffineInitializerInputSpec
output_spec = AffineInitializerOutputSpec
def _list_outputs(self):
return {'out_file': os.path.abspath(self.inputs.out_file)}
class ComposeMultiTransformInputSpec(ANTSCommandInputSpec):
dimension = traits.Enum(3, 2, argstr='%d', usedefault=True, position=0,
desc='image dimension (2 or 3)')
output_transform = File(argstr='%s', position=1, name_source=['transforms'],
name_template='%s_composed', keep_ext=True,
desc='the name of the resulting transform.')
reference_image = File(argstr='%s', position=2,
desc='Reference image (only necessary when output is warpfield)')
transforms = InputMultiPath(File(exists=True), argstr='%s', mandatory=True,
position=3, desc='transforms to average')
class ComposeMultiTransformOutputSpec(TraitedSpec):
output_transform = File(exists=True, desc='Composed transform file')
class ComposeMultiTransform(ANTSCommand):
"""
Take a set of transformations and convert them to a single transformation matrix/warpfield.
Examples
--------
>>> from nipype.interfaces.ants import ComposeMultiTransform
>>> compose_transform = ComposeMultiTransform()
>>> compose_transform.inputs.dimension = 3
>>> compose_transform.inputs.transforms = ['struct_to_template.mat', 'func_to_struct.mat']
>>> compose_transform.cmdline # doctest: +ALLOW_UNICODE
'ComposeMultiTransform 3 struct_to_template_composed struct_to_template.mat func_to_struct.mat'
"""
_cmd = 'ComposeMultiTransform'
input_spec = ComposeMultiTransformInputSpec
output_spec = ComposeMultiTransformOutputSpec
| mick-d/nipype | nipype/interfaces/ants/utils.py | Python | bsd-3-clause | 11,113 |
# Copyright (c) 2013 OpenStack Foundation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from debtcollector import moves
from neutron_lib import constants as n_const
from oslo_db import exception as db_exc
from oslo_log import log
from oslo_utils import uuidutils
import six
from sqlalchemy import or_
from sqlalchemy.orm import exc
from neutron._i18n import _, _LE
from neutron.callbacks import events
from neutron.callbacks import registry
from neutron.callbacks import resources
from neutron.db.models import securitygroup as sg_models
from neutron.db import models_v2
from neutron.db import segments_db
from neutron.extensions import portbindings
from neutron import manager
from neutron.plugins.ml2 import models
from neutron.services.segments import exceptions as seg_exc
LOG = log.getLogger(__name__)
# limit the number of port OR LIKE statements in one query
MAX_PORTS_PER_QUERY = 500
# The API methods from segments_db
add_network_segment = moves.moved_function(
segments_db.add_network_segment, 'add_network_segment', __name__,
version='Newton', removal_version='Ocata')
get_network_segments = moves.moved_function(
segments_db.get_network_segments, 'get_network_segments', __name__,
version='Newton', removal_version='Ocata')
get_networks_segments = moves.moved_function(
segments_db.get_networks_segments, 'get_networks_segments', __name__,
version='Newton', removal_version='Ocata')
get_segment_by_id = moves.moved_function(
segments_db.get_segment_by_id, 'get_segment_by_id', __name__,
version='Newton', removal_version='Ocata')
get_dynamic_segment = moves.moved_function(
segments_db.get_dynamic_segment, 'get_dynamic_segment', __name__,
version='Newton', removal_version='Ocata')
delete_network_segment = moves.moved_function(
segments_db.delete_network_segment, 'delete_network_segment', __name__,
version='Newton', removal_version='Ocata')
def add_port_binding(session, port_id):
with session.begin(subtransactions=True):
record = models.PortBinding(
port_id=port_id,
vif_type=portbindings.VIF_TYPE_UNBOUND)
session.add(record)
return record
def get_locked_port_and_binding(session, port_id):
"""Get port and port binding records for update within transaction."""
try:
# REVISIT(rkukura): We need the Port and PortBinding records
# to both be added to the session and locked for update. A
# single joined query should work, but the combination of left
# outer joins and postgresql doesn't seem to work.
port = (session.query(models_v2.Port).
enable_eagerloads(False).
filter_by(id=port_id).
with_lockmode('update').
one())
binding = (session.query(models.PortBinding).
enable_eagerloads(False).
filter_by(port_id=port_id).
with_lockmode('update').
one())
return port, binding
except exc.NoResultFound:
return None, None
def set_binding_levels(session, levels):
if levels:
for level in levels:
session.add(level)
LOG.debug("For port %(port_id)s, host %(host)s, "
"set binding levels %(levels)s",
{'port_id': levels[0].port_id,
'host': levels[0].host,
'levels': levels})
else:
LOG.debug("Attempted to set empty binding levels")
def get_binding_levels(session, port_id, host):
if host:
result = (session.query(models.PortBindingLevel).
filter_by(port_id=port_id, host=host).
order_by(models.PortBindingLevel.level).
all())
LOG.debug("For port %(port_id)s, host %(host)s, "
"got binding levels %(levels)s",
{'port_id': port_id,
'host': host,
'levels': result})
return result
def clear_binding_levels(session, port_id, host):
if host:
(session.query(models.PortBindingLevel).
filter_by(port_id=port_id, host=host).
delete())
LOG.debug("For port %(port_id)s, host %(host)s, "
"cleared binding levels",
{'port_id': port_id,
'host': host})
def ensure_distributed_port_binding(session, port_id, host, router_id=None):
record = (session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).first())
if record:
return record
try:
with session.begin(subtransactions=True):
record = models.DistributedPortBinding(
port_id=port_id,
host=host,
router_id=router_id,
vif_type=portbindings.VIF_TYPE_UNBOUND,
vnic_type=portbindings.VNIC_NORMAL,
status=n_const.PORT_STATUS_DOWN)
session.add(record)
return record
except db_exc.DBDuplicateEntry:
LOG.debug("Distributed Port %s already bound", port_id)
return (session.query(models.DistributedPortBinding).
filter_by(port_id=port_id, host=host).one())
def delete_distributed_port_binding_if_stale(session, binding):
if not binding.router_id and binding.status == n_const.PORT_STATUS_DOWN:
with session.begin(subtransactions=True):
LOG.debug("Distributed port: Deleting binding %s", binding)
session.delete(binding)
def get_port(session, port_id):
"""Get port record for update within transaction."""
with session.begin(subtransactions=True):
try:
record = (session.query(models_v2.Port).
enable_eagerloads(False).
filter(models_v2.Port.id.startswith(port_id)).
one())
return record
except exc.NoResultFound:
return
except exc.MultipleResultsFound:
LOG.error(_LE("Multiple ports have port_id starting with %s"),
port_id)
return
def get_port_from_device_mac(context, device_mac):
LOG.debug("get_port_from_device_mac() called for mac %s", device_mac)
qry = context.session.query(models_v2.Port).filter_by(
mac_address=device_mac)
return qry.first()
def get_ports_and_sgs(context, port_ids):
"""Get ports from database with security group info."""
# break large queries into smaller parts
if len(port_ids) > MAX_PORTS_PER_QUERY:
LOG.debug("Number of ports %(pcount)s exceeds the maximum per "
"query %(maxp)s. Partitioning queries.",
{'pcount': len(port_ids), 'maxp': MAX_PORTS_PER_QUERY})
return (get_ports_and_sgs(context, port_ids[:MAX_PORTS_PER_QUERY]) +
get_ports_and_sgs(context, port_ids[MAX_PORTS_PER_QUERY:]))
LOG.debug("get_ports_and_sgs() called for port_ids %s", port_ids)
if not port_ids:
# if port_ids is empty, avoid querying to DB to ask it for nothing
return []
ports_to_sg_ids = get_sg_ids_grouped_by_port(context, port_ids)
return [make_port_dict_with_security_groups(port, sec_groups)
for port, sec_groups in six.iteritems(ports_to_sg_ids)]
def get_sg_ids_grouped_by_port(context, port_ids):
sg_ids_grouped_by_port = {}
sg_binding_port = sg_models.SecurityGroupPortBinding.port_id
with context.session.begin(subtransactions=True):
# partial UUIDs must be individually matched with startswith.
# full UUIDs may be matched directly in an IN statement
partial_uuids = set(port_id for port_id in port_ids
if not uuidutils.is_uuid_like(port_id))
full_uuids = set(port_ids) - partial_uuids
or_criteria = [models_v2.Port.id.startswith(port_id)
for port_id in partial_uuids]
if full_uuids:
or_criteria.append(models_v2.Port.id.in_(full_uuids))
query = context.session.query(
models_v2.Port,
sg_models.SecurityGroupPortBinding.security_group_id)
query = query.outerjoin(sg_models.SecurityGroupPortBinding,
models_v2.Port.id == sg_binding_port)
query = query.filter(or_(*or_criteria))
for port, sg_id in query:
if port not in sg_ids_grouped_by_port:
sg_ids_grouped_by_port[port] = []
if sg_id:
sg_ids_grouped_by_port[port].append(sg_id)
return sg_ids_grouped_by_port
def make_port_dict_with_security_groups(port, sec_groups):
plugin = manager.NeutronManager.get_plugin()
port_dict = plugin._make_port_dict(port)
port_dict['security_groups'] = sec_groups
port_dict['security_group_rules'] = []
port_dict['security_group_source_groups'] = []
port_dict['fixed_ips'] = [ip['ip_address']
for ip in port['fixed_ips']]
return port_dict
def get_port_binding_host(session, port_id):
try:
with session.begin(subtransactions=True):
query = (session.query(models.PortBinding).
filter(models.PortBinding.port_id.startswith(port_id)).
one())
except exc.NoResultFound:
LOG.debug("No binding found for port %(port_id)s",
{'port_id': port_id})
return
except exc.MultipleResultsFound:
LOG.error(_LE("Multiple ports have port_id starting with %s"),
port_id)
return
return query.host
def generate_distributed_port_status(session, port_id):
# an OR'ed value of status assigned to parent port from the
# distributedportbinding bucket
query = session.query(models.DistributedPortBinding)
final_status = n_const.PORT_STATUS_BUILD
for bind in query.filter(models.DistributedPortBinding.port_id == port_id):
if bind.status == n_const.PORT_STATUS_ACTIVE:
return bind.status
elif bind.status == n_const.PORT_STATUS_DOWN:
final_status = bind.status
return final_status
def get_distributed_port_binding_by_host(session, port_id, host):
with session.begin(subtransactions=True):
binding = (session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(port_id),
models.DistributedPortBinding.host == host).first())
if not binding:
LOG.debug("No binding for distributed port %(port_id)s with host "
"%(host)s", {'port_id': port_id, 'host': host})
return binding
def get_distributed_port_bindings(session, port_id):
with session.begin(subtransactions=True):
bindings = (session.query(models.DistributedPortBinding).
filter(models.DistributedPortBinding.port_id.startswith(
port_id)).all())
if not bindings:
LOG.debug("No bindings for distributed port %s", port_id)
return bindings
def is_dhcp_active_on_any_subnet(context, subnet_ids):
if not subnet_ids:
return False
return bool(context.session.query(models_v2.Subnet).
enable_eagerloads(False).filter_by(enable_dhcp=True).
filter(models_v2.Subnet.id.in_(subnet_ids)).count())
def _prevent_segment_delete_with_port_bound(resource, event, trigger,
context, segment):
"""Raise exception if there are any ports bound with segment_id."""
segment_id = segment['id']
query = context.session.query(models_v2.Port)
query = query.join(
models.PortBindingLevel,
models.PortBindingLevel.port_id == models_v2.Port.id)
query = query.filter(models.PortBindingLevel.segment_id == segment_id)
port_ids = [p.id for p in query]
# There are still some ports in the segment, segment should not be deleted
# TODO(xiaohhui): Should we delete the dhcp port automatically here?
if port_ids:
reason = _("The segment is still bound with port(s) "
"%s") % ", ".join(port_ids)
raise seg_exc.SegmentInUse(segment_id=segment_id, reason=reason)
def subscribe():
registry.subscribe(_prevent_segment_delete_with_port_bound,
resources.SEGMENT,
events.BEFORE_DELETE)
subscribe()
| sebrandon1/neutron | neutron/plugins/ml2/db.py | Python | apache-2.0 | 12,959 |
"""
Extra functions for build-in datasets
"""
import torchvision.transforms as transforms
def build_transforms(normalize=True, center_crop=None, image_size=None,
random_crop=None, flip=None, random_resize_crop=None,
random_sized_crop=None, use_sobel=False):
"""
Args:
normalize:
center_crop:
image_size:
random_crop:
flip:
random_resize_crop:
random_sized_crop:
use_sobel:
Returns:
"""
transform_ = []
if random_resize_crop:
transform_.append(transforms.RandomResizedCrop(random_resize_crop))
elif random_crop:
transform_.append(transforms.RandomCrop(random_crop))
elif center_crop:
transform_.append(transforms.CenterCrop(center_crop))
elif random_sized_crop:
transform_.append(transforms.RandomSizedCrop(random_sized_crop))
if image_size:
if isinstance(image_size, int):
image_size = (image_size, image_size)
transform_.append(transforms.Resize(image_size))
if flip:
transform_.append(transforms.RandomHorizontalFlip())
transform_.append(transforms.ToTensor())
if normalize:
if isinstance(normalize, transforms.Normalize):
transform_.append(normalize)
else:
transform_.append(transforms.Normalize(*normalize))
transform = transforms.Compose(transform_)
return transform
| rdevon/cortex | cortex/built_ins/datasets/utils.py | Python | bsd-3-clause | 1,457 |
#!/usr/bin/python
#
# Copyright (C) 2007 Saket Sathe
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
#
# $LastChangedBy: xaeroman $
# $LastChangedDate: 2007-07-31 17:42:11 +0200 (Tue, 31 Jul 2007) $
# $LastChangedRevision: 56 $
#
#
""" Module for handling common system related things (for ex. logging)
@namespace System
\defgroup System System
"""
__all__ = ['PyGelLogging']
| xaeroman/pygrel | pygel/System/__init__.py | Python | apache-2.0 | 907 |
from __future__ import absolute_import, print_function
import logging
from datetime import time
from managers.models import Manager
from workshift.models import Semester, WorkshiftPool, WorkshiftType, \
RegularWorkshift
from workshift.utils import get_year_season, get_semester_start_end, \
make_manager_workshifts
# Items of form (title, description, quick_tips, rateable)
WORKSHIFT_TYPES = [
("Kitchen / Dinning Room Clean",
"""<ol>
<li>Put away any perishables left out.</li>
<li>clear all dishes from dining room into dishroom.</li>
<li>Throw away yesterday's paper.</li>
<li>Put away your cleaning supplies.<li>
<li>Clean and sanitize all counters and tables with sponge and a spray bottle.</li>
</ol>
""", "", True),
("Food Put Away",
"""<ol>
<li>Put away food.</li>
<li>Place opened food in containers and label containers.</li>
</ol>
""", "", True),
("Pots",
"""<ol>
<li>Wash and sanitize all pots.</li>
<li>Clean up pots area after all pots are washed and sanitized.</li>
<li>Soak any pots that you can't wash in soap water.</li>
<li>Clean out all scraps from the disposals.</li>
<li>Allow pots to air dry.</li>
</ol>
""", "", True),
("Basement / Laundry Room Clean",
"""<ol>
<li>Take all dishes to the dishroom.</li>
<li>Throw away any trash lying around.</li>
<li>Organize the laundry room and maintenance hallway.</li>
<li>Sweep laundry room floor.</li>
<li>Organize free pile by category. Throw away anything that's obviously trash.</li>
<li>Make sure basement doors are closed. These should never be left open.</li>
</ol>
""", "", True),
("Bathroom Clean",
"""<ol>
<li>Clean all sinks, toilets and handles.</li>
<li>Sweep and mop the floors.</li>
<li>Scrub the grout and surfaces in the showers.</li>
<li>Take out all trash, recycling, and compost.</li>
</ol>
""", "", True),
("Bike / Living / Study Room Clean",
"""<ol>
<li>Clear out the rooms of any trash.</li>
<li>Pick up dishes and food and move them to the dish room.</li>
<li>Recycle any cans, bottles, or paper.</li>
</ol>
""", "", True),
("Roofdeck Clean & Top Two Floors",
"""
""", "", True),
("Ramp and Amphitheater Clean", "", "", True),
("Ramp and Gazebo Clean", "", "", True),
("Pantry / Fridge Clean", "", "", True),
("Dishroom Clean", "", "", True),
("Free Pile Clean", "", "", True),
("Fruit Cage / Bread Area Clean", "", "", True),
("Bread Run", "", "", True),
("Brunch", "", "", True),
("Extra bagels", "", "", True),
("Dishes", "", "", True),
("Dairy / Non-perishables Run", "", "", True),
("Farmer's Market Run", "", "", True),
("Hummus", "", "", True),
("Granola", "", "", True),
("Laundry", "", "", True),
("Sweep & Mop", "", "", True),
("Cook", "", "", True),
("IKC", "", "", True),
("Main Entrance / Front Walk Clean", "", "", True),
("Mail Sort / Forward", "", "", True),
("Vacuum", "", "", True),
("Maintenance Assistant", "", "", True),
("CO Member Resource", "", "", False),
("CO WRM", "", "", False),
("CO Health Worker", "", "", False),
("ConComm", "", "", False),
("ConComm AA", "", "", False),
("BSC President", "", "", False),
]
# (type_title, hours, days, count, start, end)
REGULAR_WORKSHIFTS = [
# Morning Clean
("Kitchen / Dinning Room Clean", 1, [0, 1, 2, 3, 4, 5, 6], 1, None, time(11)),
# Afternoon Clean
("Kitchen / Dinning Room Clean", 0.5, [0, 1, 2, 3, 4, 5, 6], 1, time(13), time(15)),
# After Dinner Clean
("Kitchen / Dinning Room Clean", 1, [1, 2, 4, 6], 1, time(20), time(21)),
# Morning Pots
("Pots", 1, [0, 1, 2, 3, 4, 5, 6], 1, None, time(11)),
# Afternoon Pots
("Pots", 1, [0, 2, 4, 5, 6], 2, time(13), time(17)),
("Pots", 1, [1, 3], 1, time(13), time(17)),
# Evening Pots
("Pots", 2, [1, 2, 6], 2, time(20), None),
# Morning Dishes
("Dishes", 1, [0, 1, 2, 3, 4, 5, 6], 1, None, time(11)),
# Early Afternoon Dishes
("Dishes", 1, [0, 1, 2, 3, 4, 5, 6], 1, time(13), time(16)),
# Before Dinner Dishes
("Dishes", 1, [0, 1, 2, 3, 4, 6], 1, time(17), time(19)),
# Evening Dishes
("Dishes", 1, [1, 2, 4, 5, 6], 1, time(20), None),
# Evening Sweep & Mop
("Sweep & Mop", 1.5, [1, 2, 6], 1, time(21), None),
("Main Entrance / Front Walk Clean", 1, [1, 3], 1, None, None),
("Bike / Living / Study Room Clean", 1, [1, 4], 1, None, time(21)),
("Roofdeck Clean & Top Two Floors", 1, [1, 4], 1, None, time(19)),
("Ramp and Amphitheater Clean", 1, [2], 1, None, None),
("Pantry / Fridge Clean", 1, [5], 1, time(20), None),
("Free Pile Clean", 1.5, [2], 1, None, None),
("Dishroom Clean", 1, [3], 1, None, time(22)),
("Vacuum", 2, [1, 6], 1, None, time(22)),
("Food Put Away", 0.5, [0, 3], 1, time(13), time(16)),
# Afternoon Food Put Away
("Food Put Away", 1, [3], 1, time(16), time(19)),
("Fruit Cage / Bread Area Clean", 0.5, [2], 1, None, time(22)),
("Bread Run", 2, [3], 1, None, None),
# ("Dairy / Non-perishables Run", 2, [3], 2, None, None),
("Cook", 3, [0, 1, 2, 3, 4, 6], 3, time(16), time(19)),
# Monday IKC
("IKC", 2, [0], 7, time(20), time(23)),
# Thursday IKC
("IKC", 2, [3], 7, time(20), time(23)),
]
# (type_title, hours, count)
WEEK_LONG = (
("Basement / Laundry Room Clean", 2, 1),
("Laundry", 1, 1),
("CO Member Resource", 5, 1),
("CO WRM", 5, 1),
("CO Health Worker", 5, 1),
("ConComm", 2, 1),
("ConComm AA", 2, 1),
("BSC President", 5, 1),
("Maintenance Assistant", 3, 1),
("Farmer's Market Run", 2, 1),
("Granola", 2, 1),
("Hummus", 2, 1),
("Mail Sort / Forward", 1, 1),
)
# (type_title, hours, days, count, start, end)
HUMOR_WORKSHIFTS = [
("Pots", 2, [4, 5], 2, time(20), time(0)),
("Sweep & Mop", 2, [4, 5], 1, time(20), time(0)),
]
# (type_title, hours, days, count, start, end)
BATHROOM_WORKSHIFTS = [
("Bathroom Clean", 2, [1, 3, 5], 3, None, None),
]
def _get_semester():
# Start the Workshift Semester
year, season = get_year_season()
start_date, end_date = get_semester_start_end(year, season)
try:
semester = Semester.objects.get(current=True)
except Semester.DoesNotExist:
semester, created = Semester.objects.get_or_create(
year=year,
season=season,
defaults=dict(
rate=13.30,
start_date=start_date,
end_date=end_date,
),
)
else:
created = False
if created:
logging.info("Started a new workshift semester")
return semester
def _fill_workshift_types():
# Workshift Types
for title, description, quick_tips, rateable in WORKSHIFT_TYPES:
WorkshiftType.objects.get_or_create(
title=title,
defaults=dict(
description=description,
quick_tips=quick_tips,
rateable=rateable,
),
)
def reset_all_shifts(semester=None, pool=None):
if semester is None:
semester = _get_semester()
shifts = RegularWorkshift.objects.filter(pool__semester=semester)
if pool is not None:
shifts = shifts.filter(pool=pool)
shift_count = shifts.count()
shifts.delete()
make_manager_workshifts(semester)
return shift_count
def fill_regular_shifts(regular_hours=5, semester=None):
if semester is None:
semester = _get_semester()
_fill_workshift_types()
# Regular Weekly Workshift Hours
pool, created = WorkshiftPool.objects.get_or_create(
semester=semester,
is_primary=True,
defaults=dict(hours=regular_hours, any_blown=True),
)
if created:
pool.managers = Manager.objects.filter(workshift_manager=True)
else:
pool.hours = regular_hours
pool.weeks_per_period = 0
pool.save()
_fill_workshift_types()
count = 0
# Regular Workshifts
for type_title, hours, days, count, start, end in REGULAR_WORKSHIFTS:
wtype = WorkshiftType.objects.get(title=type_title)
for day in days:
shift, created = RegularWorkshift.objects.get_or_create(
workshift_type=wtype,
pool=pool,
day=day,
start_time=start,
end_time=end,
defaults=dict(
count=count,
hours=hours,
),
)
if not created:
shift.hours = hours
shift.count = count
shift.save()
else:
count += 1
for type_title, hours, count in WEEK_LONG:
wtype = WorkshiftType.objects.get(title=type_title)
shift, created = RegularWorkshift.objects.get_or_create(
workshift_type=wtype,
pool=pool,
count=count,
week_long=True,
defaults=dict(
start_time=None,
end_time=None,
hours=hours,
),
)
if not created:
shift.hours = hours
shift.count = count
shift.save()
else:
count += 1
return count
def fill_bathroom_shifts(bathroom_hours=4, semester=None):
if semester is None:
semester = _get_semester()
pool, created = WorkshiftPool.objects.get_or_create(
title="Bathroom Shift",
semester=semester,
defaults=dict(any_blown=True, hours=bathroom_hours, weeks_per_period=0),
)
if not created:
pool.hours = pool
pool.weeks_per_period = 0
pool.save()
_fill_workshift_types()
count = 0
for type_title, hours, days, count, start, end in BATHROOM_WORKSHIFTS:
wtype = WorkshiftType.objects.get(title=type_title)
for day in days:
shift, created = RegularWorkshift.objects.get_or_create(
workshift_type=wtype,
pool=pool,
day=day,
defaults=dict(
start_time=start,
end_time=end,
hours=hours,
count=count,
),
)
if not created:
shift.hours = hours
shift.count = count
shift.save()
else:
count += 1
return count
def fill_hi_shifts(hi_hours=5, semester=None):
if semester is None:
semester = _get_semester()
# HI Hours
pool, created = WorkshiftPool.objects.get_or_create(
title="Home Improvement",
semester=semester,
defaults=dict(hours=hi_hours, weeks_per_period=0),
)
if created:
pool.managers = Manager.objects.filter(title="Maintenance Manager")
else:
pool.hours = hi_hours
pool.weeks_per_period = 0
pool.save()
count = 0
return count
def fill_social_shifts(social_hours=1, semester=None):
if semester is None:
semester = _get_semester()
# Social Hours
pool, created = WorkshiftPool.objects.get_or_create(
title="Social",
semester=semester,
defaults=dict(hours=social_hours, weeks_per_period=0),
)
if created:
pool.managers = Manager.objects.filter(title="Social Manager")
else:
pool.hours = social_hours
pool.weeks_per_period = 0
pool.save()
count = 0
return count
def fill_humor_shifts(humor_hours=2, semester=None):
if semester is None:
semester = _get_semester()
# Humor Shift
pool, created = WorkshiftPool.objects.get_or_create(
title="Humor Shift",
semester=semester,
defaults=dict(any_blown=True, hours=humor_hours, weeks_per_period=6),
)
if created:
pool.managers = Manager.objects.filter(workshift_manager=True)
else:
pool.hours = humor_hours
pool.weeks_per_period = 0
pool.save()
_fill_workshift_types()
count = 0
# Humor Workshifts
for type_title, hours, days, count, start, end in HUMOR_WORKSHIFTS:
wtype = WorkshiftType.objects.get(title=type_title)
for day in days:
shift, created = RegularWorkshift.objects.get_or_create(
workshift_type=wtype,
pool=pool,
day=day,
defaults=dict(
start_time=start,
end_time=end,
hours=hours,
count=count,
),
)
if not created:
shift.hours = hours
shift.count = count
shift.save()
else:
count += 1
return count
| knagra/farnsworth | workshift/fill.py | Python | bsd-2-clause | 12,986 |
# This file is part of HDL Checker.
#
# Copyright (c) 2015 - 2019 suoto (Andre Souto)
#
# HDL Checker is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# HDL Checker is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with HDL Checker. If not, see <http://www.gnu.org/licenses/>.
"Exceptions raised by hdl_checker"
class HdlCheckerBaseException(Exception):
"""
Base class for exceptions raise by hdl_checker
"""
class SanityCheckError(HdlCheckerBaseException):
"""
Exception raised when a builder fails to execute its sanity check
"""
def __init__(self, builder, msg):
self._msg = msg
self.builder = builder
super(SanityCheckError, self).__init__()
def __str__(self): # pragma: no cover
return "Failed to create builder '%s' with message '%s'" % (
self.builder,
self._msg,
)
class UnknownParameterError(HdlCheckerBaseException):
"""
Exception raised when an unknown parameter is found in a
configuration file
"""
def __init__(self, parameter):
self._parameter = parameter
super(UnknownParameterError, self).__init__()
def __str__(self): # pragma: no cover
return "Unknown parameter '%s'" % self._parameter
class UnknownTypeExtension(HdlCheckerBaseException):
"""
Exception thrown when trying to get the file type of an unknown extension.
Known extensions are one of '.vhd', '.vhdl', '.v', '.vh', '.sv', '.svh'
"""
def __init__(self, path):
super(UnknownTypeExtension, self).__init__()
self._path = path
def __str__(self):
return "Couldn't determine file type for path '%s'" % self._path
| suoto/hdlcc | hdl_checker/exceptions.py | Python | gpl-3.0 | 2,134 |
#!/usr/bin/env python
"""
Parse a simple Makefile and execute it.
Doen't support comments, variables, patterns or anything complex...
Doesn't check file system. all targets are always built.
"""
import os
def parse_makefile(fname):
"""Parses a makefile.
Input consists of only 2 kinds of lines::
target: dependency1 dependency2...
command1
command2
...
target2: dependency3...
command3
(any leading whitespace means command.)
Returns tuple (rules, commands) where:
- rules is a dict {'target': ['dependency1', ...], ...}
- commands is a dict {'target': ['command1', ...], ...}
"""
rules = {}
commands = {}
for line in open(fname):
if not line[0].isspace():
# dep line (parse, set `target`)
target, rest = line.split(':')
target = target.strip()
rules[target] = rest.split()
commands[target] = []
else:
# command line (`target` was set by last dep line)
commands[target].append(line.strip())
return rules, commands
def build(target, rules, commands):
"""Compute order in which things should be built for target.
>>> build('a', {'a': ['b', 'c'], 'b': ['d']}, {'a': [], 'b': []})
== Building d -> b ==
== Building b, c -> a ==
"""
if target not in rules:
# Nothing needs to be done.
# (A real make tool would verify that the file exists.)
return
# Build all dependencies.
for dep in rules[target]:
build(dep, rules, commands)
# And then build the target.
# (A real make tool would compare file dates to see if it's needed.)
build_one(target, rules, commands)
def build_one(target, rules, commands):
"""Execute commands for one target."""
print('== Building {0} -> {1} =='.format(', '.join(rules[target]), target))
for command in commands[target]:
print(command)
os.system(command)
import doctest
doctest.testmod()
rules, commands = parse_makefile('make_bonus.txt')
build('all', rules, commands)
| veltzer/demos-python | src/exercises/advanced/build_system/solution2.py | Python | gpl-3.0 | 2,163 |
# -*- coding: utf-8 -*-
from __future__ import with_statement
from cms.admin import forms
from cms.admin.forms import PageUserForm
from cms.api import create_page, create_page_user
from cms.forms.fields import PageSelectFormField, SuperLazyIterator
from cms.forms.utils import (get_site_choices, get_page_choices,
update_site_and_page_choices)
from cms.test_utils.testcases import CMSTestCase
from cms.test_utils.util.context_managers import SettingsOverride
from django.contrib.auth.models import User
from django.contrib.sites.models import Site
from django.core.cache import cache
class Mock_PageSelectFormField(PageSelectFormField):
def __init__(self, required=False):
# That's to have a proper mock object, without having to resort
# to dirtier tricks. We want to test *just* compress here.
self.required = required
self.error_messages = {}
self.error_messages['invalid_page'] = 'Invalid_page'
class FormsTestCase(CMSTestCase):
def setUp(self):
cache.clear()
def test_get_site_choices(self):
result = get_site_choices()
self.assertEquals(result, [])
def test_get_page_choices(self):
result = get_page_choices()
self.assertEquals(result, [('', '----')])
def test_get_site_choices_without_moderator(self):
with SettingsOverride(CMS_MODERATOR=False):
result = get_site_choices()
self.assertEquals(result, [])
def test_get_site_choices_without_moderator_with_superuser(self):
with SettingsOverride(CMS_MODERATOR=False):
# boilerplate (creating a page)
user_super = User(username="super", is_staff=True, is_active=True,
is_superuser=True)
user_super.set_password("super")
user_super.save()
with self.login_user_context(user_super):
create_page("home", "nav_playground.html", "en", created_by=user_super)
# The proper test
result = get_site_choices()
self.assertEquals(result, [(1,'example.com')])
def test_compress_function_raises_when_page_is_none(self):
raised = False
try:
fake_field = Mock_PageSelectFormField(required=True)
data_list = (0, None) #(site_id, page_id) dsite-id is not used
fake_field.compress(data_list)
self.fail('compress function didn\'t raise!')
except forms.ValidationError:
raised = True
self.assertTrue(raised)
def test_compress_function_returns_none_when_not_required(self):
fake_field = Mock_PageSelectFormField(required=False)
data_list = (0, None) #(site_id, page_id) dsite-id is not used
result = fake_field.compress(data_list)
self.assertEquals(result, None)
def test_compress_function_returns_none_when_no_data_list(self):
fake_field = Mock_PageSelectFormField(required=False)
data_list = None
result = fake_field.compress(data_list)
self.assertEquals(result, None)
def test_compress_function_gets_a_page_when_one_exists(self):
# boilerplate (creating a page)
user_super = User(username="super", is_staff=True, is_active=True,
is_superuser=True)
user_super.set_password("super")
user_super.save()
with self.login_user_context(user_super):
home_page = create_page("home", "nav_playground.html", "en", created_by=user_super)
# The actual test
fake_field = Mock_PageSelectFormField()
data_list = (0, home_page.pk) #(site_id, page_id) dsite-id is not used
result = fake_field.compress(data_list)
self.assertEquals(home_page,result)
def test_update_site_and_page_choices(self):
with SettingsOverride(CMS_MODERATOR=False):
Site.objects.all().delete()
site = Site.objects.create(domain='http://www.django-cms.org', name='Django CMS')
page1 = create_page('Page 1', 'nav_playground.html', 'en', site=site)
page2 = create_page('Page 2', 'nav_playground.html', 'de', site=site)
# enfore the choices to be casted to a list
site_choices, page_choices = [list(bit) for bit in update_site_and_page_choices('en')]
self.assertEqual(page_choices, [
('', '----'),
(site.name, [
(page1.pk, 'Page 1'),
(page2.pk, 'Page 2'),
])
])
self.assertEqual(site_choices, [(site.pk, site.name)])
def test_superlazy_iterator_behaves_properly_for_sites(self):
normal_result = get_site_choices()
lazy_result = SuperLazyIterator(get_site_choices)
self.assertEquals(normal_result, list(lazy_result))
def test_superlazy_iterator_behaves_properly_for_pages(self):
normal_result = get_page_choices()
lazy_result = SuperLazyIterator(get_page_choices)
self.assertEquals(normal_result, list(lazy_result))
def test_page_user_form_initial(self):
myuser = User.objects.create_superuser("myuser", "[email protected]", "myuser")
user = create_page_user(myuser, myuser, grant_all=True)
puf = PageUserForm(instance=user)
names = ['can_add_page', 'can_change_page', 'can_delete_page',
'can_add_pageuser', 'can_change_pageuser',
'can_delete_pageuser', 'can_add_pagepermission',
'can_change_pagepermission', 'can_delete_pagepermission']
for name in names:
self.assertTrue(puf.initial.get(name, False))
| driesdesmet/django-cms | cms/tests/forms.py | Python | bsd-3-clause | 5,792 |
#!/usr/bin/env pythonw
import tkinter as Tk
from tkinter import ttk
import matplotlib
import numpy as np
import numpy.ma as ma
import new_cmaps
from new_cnorms import PowerNormWithNeg
from Numba2DHist import Fast2DHist, Fast2DWeightedHist, vecLog10Norm
import matplotlib.colors as mcolors
import matplotlib.gridspec as gridspec
import matplotlib.patheffects as PathEffects
class PhasePanel:
# A dictionary of all of the parameters for this plot with the default parameters
plot_param_dict = {'twoD' : 1,
'mom_dim': 0,
'masked': 1,
'cnorm_type': 'Log', #Colormap normalization. Opts are Log or Linear
'prtl_type': 0,
'cpow_num': 0.6,
'show_cbar': True,
'weighted': False,
'show_shock': False,
'show_int_region': True,
'xbins' : 200,
'pbins' : 200,
'v_min': -2.0,
'v_max' : 0,
'set_v_min': False,
'set_v_max': False,
'p_min': -2.0,
'p_max' : 2,
'set_E_min' : False,
'E_min': 1.0,
'set_E_max': False,
'E_max': 200.0,
'set_p_min': False,
'set_p_max': False,
'spatial_x': True,
'spatial_y': False,
'symmetric': False,
'interpolation': 'nearest',
'face_color': 'gainsboro'}
prtl_opts = ['proton_p', 'electron_p']
direction_opts = ['x-x', 'y-x', 'z-x']
# Old labels:
#ylabel_list =[
# [[r'$P_{px}\ [m_i c]$', r'$P_{py}\ [m_i c]$',r'$P_{pz}\ [m_i c]$'],
# [r'$P_{ex}\ [m_e c]$', r'$P_{ey}\ [m_e c]$',r'$P_{ez}\ [m_e c]$']],
# [[r'$P\prime_{px}\ [m_i c]$', r'$P\prime_{py}\ [m_i c]$',r'$P\prime_{pz}\ [m_i c]$'],
# [r'$P\prime_{ex}\ [m_e c]$', r'$P\prime_{ey}\ [m_e c]$',r'$P\prime_{ez}\ [m_e c]$']]
# ]
ylabel_list =[
[[r'$\gamma_i\beta_{x,i}$',r'$\gamma_i\beta_{y,i}$',r'$\gamma_i\beta_{z,i}$'],
[r'$\gamma_e\beta_{x,e}$',r'$\gamma_e\beta_{y,e}$',r'$\gamma_e\beta_{z,e}$']],
[[r'$\gamma\prime_i\beta\prime_{x,i}$',r'$\gamma\prime_i\beta\prime_{y,i}$',r'$\gamma\prime_i\beta\prime_{z,i}$'],
[r'$\gamma\prime_e\beta\prime_{x,e}$',r'$\gamma\prime_e\beta\prime_{y,e}$',r'$\gamma\prime_e\beta\prime_{z,e}$']]
]
gradient = np.linspace(0, 1, 256)# A way to make the colorbar display better
gradient = np.vstack((gradient, gradient))
def __init__(self, parent, figwrapper):
self.settings_window = None
self.FigWrap = figwrapper
self.parent = parent
self.ChartTypes = self.FigWrap.PlotTypeDict.keys()
self.chartType = self.FigWrap.chartType
self.figure = self.FigWrap.figure
self.InterpolationMethods = ['none','nearest', 'bilinear', 'bicubic', 'spline16',
'spline36', 'hanning', 'hamming', 'hermite', 'kaiser', 'quadric',
'catrom', 'gaussian', 'bessel', 'mitchell', 'sinc', 'lanczos']
# A variable that controls whether the energy integration region
# is shown
self.IntRegVar = Tk.IntVar()
self.IntRegVar.set(self.GetPlotParam('show_int_region'))
self.IntRegVar.trace('w', self.IntVarHandler)
# Figure out the energy color the intergration region
if self.GetPlotParam('prtl_type') == 1: #electons
self.energy_color = self.parent.electron_color
else:
self.energy_color = self.parent.ion_color
# A list that will hold any lines for the integration region
def IntVarHandler(self, *args):
# This should only be called by the user-interaction when all the plots already exist...
# so we can take some shortcut s and assume a lot of things are already created.
self.SetPlotParam('show_int_region', self.IntRegVar.get(), update_plot = False)
if self.IntRegVar.get() == True:
# We need to show the integration region.
# Look for all the spectra plots and plot the lines.
for i in range(self.parent.MainParamDict['NumOfRows']):
for j in range(self.parent.MainParamDict['NumOfCols']):
if self.parent.SubPlotList[i][j].chartType == 'SpectraPlot':
k = min(self.parent.SubPlotList[i][j].graph.spect_num, len(self.parent.dashes_options)-1)
# figure out if we are as ion phase diagram or an electron one
if self.GetPlotParam('prtl_type') == 0:
# Append the left line to the list
self.IntRegionLines.append(self.axes.axvline(
max(self.parent.SubPlotList[i][j].graph.i_left_loc, self.xmin+1),
linewidth = 1.5, linestyle = '-', color = self.energy_color))
# Choose the right dashes pattern
self.IntRegionLines[-1].set_dashes(self.parent.dashes_options[k])
# Append the left line to the list
self.IntRegionLines.append(self.axes.axvline(
min(self.parent.SubPlotList[i][j].graph.i_right_loc, self.xmax-1),
linewidth = 1.5, linestyle = '-', color = self.energy_color))
# Choose the right dashes pattern
self.IntRegionLines[-1].set_dashes(self.parent.dashes_options[k])
else:
# Append the left line to the list
self.IntRegionLines.append(self.axes.axvline(
max(self.parent.SubPlotList[i][j].graph.e_left_loc, self.xmin+1),
linewidth = 1.5, linestyle = '-', color = self.energy_color))
# Choose the right dashes pattern
self.IntRegionLines[-1].set_dashes(self.parent.dashes_options[k])
# Append the left line to the list
self.IntRegionLines.append(self.axes.axvline(
min(self.parent.SubPlotList[i][j].graph.e_right_loc, self.xmax-1),
linewidth = 1.5, linestyle = '-', color = self.energy_color))
# Choose the right dashes pattern
self.IntRegionLines[-1].set_dashes(self.parent.dashes_options[k])
# CLOSES IF. NOW IF WE TURN OFF THE INTEGRATION REGIONS, we have to delete all the lines.
else:
for i in xrange(len(self.IntRegionLines)):
self.IntRegionLines.pop(0).remove()
# Update the canvas
self.parent.canvas.draw()
self.parent.canvas.get_tk_widget().update_idletasks()
def ChangePlotType(self, str_arg):
self.FigWrap.ChangeGraph(str_arg)
def norm(self, vmin=None, vmax=None):
if self.GetPlotParam('cnorm_type') == "Log":
return mcolors.LogNorm(vmin, vmax)
else:
return mcolors.Normalize(vmin, vmax)
def set_plot_keys(self):
'''A helper function that will insure that each hdf5 file will only be
opened once per time step'''
self.arrs_needed = ['c_omp', 'bx', 'istep', 'me', 'mi']
# First see if we will need to know the energy of the particle
# (requied for lorentz boosts and setting e_min and e_max)
Need_Energy = self.parent.MainParamDict['DoLorentzBoost'] and np.abs(self.parent.MainParamDict['GammaBoost'])>1E-8
Need_Energy = Need_Energy or self.GetPlotParam('set_E_min')
Need_Energy = Need_Energy or self.GetPlotParam('set_E_max')
if self.GetPlotParam('prtl_type') == 0:
self.arrs_needed.append('xi')
if self.GetPlotParam('weighted'):
self.arrs_needed.append('chi')
if Need_Energy:
self.arrs_needed.append('ui')
self.arrs_needed.append('vi')
self.arrs_needed.append('wi')
elif self.GetPlotParam('mom_dim') == 0:
self.arrs_needed.append('ui')
elif self.GetPlotParam('mom_dim') == 1:
self.arrs_needed.append('vi')
elif self.GetPlotParam('mom_dim') == 2:
self.arrs_needed.append('wi')
if self.GetPlotParam('prtl_type') == 1:
self.arrs_needed.append('xe')
if self.GetPlotParam('weighted'):
self.arrs_needed.append('che')
if Need_Energy:
self.arrs_needed.append('ue')
self.arrs_needed.append('ve')
self.arrs_needed.append('we')
elif self.GetPlotParam('mom_dim') == 0:
self.arrs_needed.append('ue')
elif self.GetPlotParam('mom_dim') == 1:
self.arrs_needed.append('ve')
elif self.GetPlotParam('mom_dim') == 2:
self.arrs_needed.append('we')
return self.arrs_needed
def LoadData(self):
''' A helper function that checks if the histogram has
already been calculated and if it hasn't, it calculates
it then stores it.'''
self.key_name = str(self.GetPlotParam('pbins')) + 'x' + str(self.GetPlotParam('xbins'))
if self.GetPlotParam('masked'):
self.key_name += 'masked_'
if self.GetPlotParam('weighted'):
self.key_name += 'weighted_'
if self.GetPlotParam('set_E_min'):
self.key_name += 'Emin_'+str(self.GetPlotParam('E_min')) + '_'
if self.GetPlotParam('set_E_max'):
self.key_name += 'Emax_'+str(self.GetPlotParam('E_max')) + '_'
if self.parent.MainParamDict['DoLorentzBoost'] and np.abs(self.parent.MainParamDict['GammaBoost'])>1E-8:
self.key_name += 'boosted_'+ str(self.parent.MainParamDict['GammaBoost'])+'_'
self.key_name += self.prtl_opts[self.GetPlotParam('prtl_type')]
self.key_name += self.direction_opts[self.GetPlotParam('mom_dim')]
self.key_name += str(int(self.parent.MainParamDict['PrtlStride']))
if self.key_name in self.parent.DataDict.keys():
self.hist2d = self.parent.DataDict[self.key_name]
elif self.parent.MainParamDict['DoLorentzBoost'] and np.abs(self.parent.MainParamDict['GammaBoost'])>1E-8:
# Gotta boost it
self.c_omp = self.FigWrap.LoadKey('c_omp')[0]
self.istep = self.FigWrap.LoadKey('istep')[0]
self.weights = None
self.x_values = None
self.y_values = None
# x_min & x_max before boostin'
self.xmin = 0
self.xmax = self.FigWrap.LoadKey('bx').shape[2]/self.c_omp*self.istep
self.xmax = self.xmax if (self.xmax != self.xmin) else self.xmin + 1
# First calculate beta and gamma
if self.parent.MainParamDict['GammaBoost'] >=1:
self.GammaBoost = self.parent.MainParamDict['GammaBoost']
self.betaBoost = np.sqrt(1-1/self.parent.MainParamDict['GammaBoost']**2)
elif self.parent.MainParamDict['GammaBoost'] >-1:
self.betaBoost = self.parent.MainParamDict['GammaBoost']
self.GammaBoost = np.sqrt(1-self.betaBoost**2)**(-1)
else:
self.GammaBoost = -self.parent.MainParamDict['GammaBoost']
self.betaBoost = -np.sqrt(1-1/self.parent.MainParamDict['GammaBoost']**2)
# Now load the data. We require all 3 dimensions to determine
# the velociy and LF in the boosted frame.
if self.GetPlotParam('prtl_type') == 0:
# first load everything downstream frame
self.x_values = self.FigWrap.LoadKey('xi')/self.c_omp
u = self.FigWrap.LoadKey('ui')
v = self.FigWrap.LoadKey('vi')
w = self.FigWrap.LoadKey('wi')
if self.GetPlotParam('weighted'):
self.weights = self.FigWrap.LoadKey('chi')
if self.GetPlotParam('prtl_type') == 1: #electons
self.x_values = self.FigWrap.LoadKey('xe')/self.c_omp
u = self.FigWrap.LoadKey('ue')
v = self.FigWrap.LoadKey('ve')
w = self.FigWrap.LoadKey('we')
if self.GetPlotParam('weighted'):
self.weights = self.FigWrap.LoadKey('che')
# Now calculate gamma of the particles in downstream restframe
gamma_ds = np.sqrt(u**2+v**2+w**2+1)
# calculate the velocities from the momenta
vx = u/gamma_ds
vy = v/gamma_ds
vz = w/gamma_ds
# Now calulate the velocities in the boosted frames
tmp_helper = 1-vx*self.betaBoost
vx_prime = (vx-self.betaBoost)/tmp_helper
vy_prime = vy/self.GammaBoost/tmp_helper
vz_prime = vz/self.GammaBoost/tmp_helper
# Now calculate the LF in the boosted frames using rapidity
# Initial rapidity
rap_prtl = np.arccosh(gamma_ds)
rap_boost = np.arccosh(self.GammaBoost)
#v_tot_sq = vx_prime**2 + vy_prime**2 + vz_prime**2
#gamma_old_way = 1/np.sqrt(1-v_tot_sq)
gamma_prime = gamma_ds*self.GammaBoost-np.sign(u)*np.sign(self.betaBoost)*np.sinh(rap_prtl)*np.sinh(rap_boost)/np.sqrt(1+(v/u)**2+(w/u)**2)
if self.GetPlotParam('mom_dim') == 0:
self.y_values = vx_prime*gamma_prime
if self.GetPlotParam('mom_dim') == 1:
self.y_values = vy_prime*gamma_prime
if self.GetPlotParam('mom_dim') == 2:
self.y_values = vz_prime*gamma_prime
# Some of the values are becoming NaN.
# ignore them, but I don't think this should be happening anymore....
nan_ind = np.isnan(self.y_values)
self.pmin = 0.0 if len(self.y_values) == 0 else min(self.y_values)
self.pmax = 0.0 if len(self.y_values) == 0 else max(self.y_values)
self.pmax = self.pmax if (self.pmax != self.pmin) else self.pmin + 1
if self.GetPlotParam('set_E_min') or self.GetPlotParam('set_E_max'):
# We need to calculate the total energy in units m_e c^2
if self.GetPlotParam('prtl_type')==0:
energy = gamma_ds*self.FigWrap.LoadKey('mi')[0]/self.FigWrap.LoadKey('me')[0]
else:
energy = np.copy(gamma_ds)
# Now find the particles that fall in our range
if self.GetPlotParam('set_E_min'):
inRange = energy >= self.FigWrap.GetPlotParam('E_min')
if self.GetPlotParam('set_E_max'):
inRange *= energy <= self.GetPlotParam('E_max')
elif self.GetPlotParam('set_E_max'):
inRange = energy <= self.GetPlotParam('E_max')
inRange *= ~nan_ind
if self.GetPlotParam('weighted'):
self.hist2d = Fast2DWeightedHist(self.y_values[inRange], self.x_values[inRange], self.weights[inRange], self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
self.hist2d = Fast2DHist(self.y_values[inRange], self.x_values[inRange], self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
if self.GetPlotParam('weighted'):
self.hist2d = Fast2DWeightedHist(self.y_values, self.x_values, self.weights, self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
self.hist2d = Fast2DHist(self.y_values, self.x_values, self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
try:
if self.GetPlotParam('masked'):
zval = ma.masked_array(self.hist2d[0])
zval[zval == 0] = ma.masked
zval *= float(zval.max())**(-1)
tmplist = [zval[~zval.mask].min(), zval.max()]
else:
zval = np.copy(self.hist2d[0])
zval[zval==0] = 0.5
zval *= float(zval.max())**(-1)
tmplist = [zval.min(), zval.max()]
except ValueError:
tmplist=[0.1,1]
self.hist2d = zval, self.hist2d[1], self.hist2d[2], tmplist
self.parent.DataDict[self.key_name] = self.hist2d
else:
# Generate the X-axis values
self.c_omp = self.FigWrap.LoadKey('c_omp')[0]
self.istep = self.FigWrap.LoadKey('istep')[0]
self.weights = None
self.x_values = None
self.y_values = None
# Choose the particle type and px, py, or pz
if self.GetPlotParam('prtl_type') == 0: #protons
self.x_values = self.FigWrap.LoadKey('xi')/self.c_omp
if self.GetPlotParam('weighted'):
self.weights = self.FigWrap.LoadKey('chi')
if self.GetPlotParam('mom_dim') == 0:
self.y_values = self.FigWrap.LoadKey('ui')
if self.GetPlotParam('mom_dim') == 1:
self.y_values = self.FigWrap.LoadKey('vi')
if self.GetPlotParam('mom_dim') == 2:
self.y_values = self.FigWrap.LoadKey('wi')
if self.GetPlotParam('prtl_type') == 1: #electons
self.energy_color = self.parent.electron_color
self.x_values = self.FigWrap.LoadKey('xe')/self.c_omp
if self.GetPlotParam('weighted'):
self.weights = self.FigWrap.LoadKey('che')
if self.GetPlotParam('mom_dim') == 0:
self.y_values = self.FigWrap.LoadKey('ue')
if self.GetPlotParam('mom_dim') == 1:
self.y_values = self.FigWrap.LoadKey('ve')
if self.GetPlotParam('mom_dim') == 2:
self.y_values = self.FigWrap.LoadKey('we')
self.pmin = 0.0 if len(self.y_values) == 0 else min(self.y_values)
self.pmax = 0.0 if len(self.y_values) == 0 else max(self.y_values)
self.pmax = self.pmax if (self.pmax != self.pmin) else self.pmin + 1
self.xmin = 0
self.xmax = self.FigWrap.LoadKey('bx').shape[2]/self.c_omp*self.istep
self.xmax = self.xmax if (self.xmax != self.xmin) else self.xmin + 1
if self.GetPlotParam('set_E_min') or self.GetPlotParam('set_E_max'):
# We need to calculate the total energy of each particle in
# units m_e c^2
# First load the data. We require all 3 dimensions of momentum
# to determine the energy in the downstream frame
if self.GetPlotParam('prtl_type') == 0:
u = self.FigWrap.LoadKey('ui')
v = self.FigWrap.LoadKey('vi')
w = self.FigWrap.LoadKey('wi')
if self.GetPlotParam('prtl_type') == 1: #electons
self.x_values = self.FigWrap.LoadKey('xe')/self.c_omp
u = self.FigWrap.LoadKey('ue')
v = self.FigWrap.LoadKey('ve')
w = self.FigWrap.LoadKey('we')
# Now calculate LF of the particles in downstream restframe
energy = np.sqrt(u**2+v**2+w**2+1)
# If they are electrons this already the energy in units m_e c^2.
# Otherwise...
if self.GetPlotParam('prtl_type')==0:
energy *= self.FigWrap.LoadKey('mi')[0]/self.FigWrap.LoadKey('me')[0]
# Now find the particles that fall in our range
if self.GetPlotParam('set_E_min'):
inRange = energy >= self.FigWrap.GetPlotParam('E_min')
if self.GetPlotParam('set_E_max'):
inRange *= energy <= self.GetPlotParam('E_max')
elif self.GetPlotParam('set_E_max'):
inRange = energy <= self.GetPlotParam('E_max')
if self.GetPlotParam('weighted'):
self.hist2d = Fast2DWeightedHist(self.y_values[inRange], self.x_values[inRange], self.weights[inRange], self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
self.hist2d = Fast2DHist(self.y_values[inRange], self.x_values[inRange], self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
if self.GetPlotParam('weighted'):
self.hist2d = Fast2DWeightedHist(self.y_values, self.x_values, self.weights, self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
else:
self.hist2d = Fast2DHist(self.y_values, self.x_values, self.pmin,self.pmax, self.GetPlotParam('pbins'), self.xmin,self.xmax, self.GetPlotParam('xbins')), [self.pmin, self.pmax], [self.xmin, self.xmax]
try:
if self.GetPlotParam('masked'):
zval = ma.masked_array(self.hist2d[0])
zval[zval == 0] = ma.masked
zval *= float(zval.max())**(-1)
tmplist = [zval[~zval.mask].min(), zval.max()]
else:
zval = np.copy(self.hist2d[0])
zval[zval==0] = 0.5
zval *= float(zval.max())**(-1)
tmplist = [zval.min(), zval.max()]
except ValueError:
tmplist = [0.1,1]
self.hist2d = zval, self.hist2d[1], self.hist2d[2], tmplist
self.parent.DataDict[self.key_name] = self.hist2d
def UpdateLabelsandColors(self):
# set the colors
if self.GetPlotParam('prtl_type') == 0: #protons
self.energy_color = self.parent.ion_color
else: #electons
self.energy_color = self.parent.electron_color
for line in self.IntRegionLines:
line.set_color(self.energy_color)
#set the xlabels
if self.parent.MainParamDict['DoLorentzBoost'] and np.abs(self.parent.MainParamDict['GammaBoost'])>1E-8:
self.x_label = r'$x\prime\ [c/\omega_{\rm pe}]$'
else:
self.x_label = r'$x\ [c/\omega_{\rm pe}]$'
#set the ylabel
self.y_label = self.ylabel_list[self.parent.MainParamDict['DoLorentzBoost']][self.GetPlotParam('prtl_type')][self.GetPlotParam('mom_dim')]
def draw(self):
# In order to speed up the plotting, we only recalculate everything
# if necessary.
self.IntRegionLines = []
# Figure out the color and ylabel
# Choose the particle type and px, py, or pz
self.UpdateLabelsandColors()
self.xmin = self.hist2d[2][0]
self.xmax = self.hist2d[2][-1]
self.ymin = self.hist2d[1][0]
self.ymax = self.hist2d[1][-1]
if self.GetPlotParam('masked'):
self.tick_color = 'k'
else:
self.tick_color = 'white'
self.clim = list(self.hist2d[3])
if self.GetPlotParam('set_v_min'):
self.clim[0] = 10**self.GetPlotParam('v_min')
if self.GetPlotParam('set_v_max'):
self.clim[1] = 10**self.GetPlotParam('v_max')
self.gs = gridspec.GridSpecFromSubplotSpec(100,100, subplot_spec = self.parent.gs0[self.FigWrap.pos])#, bottom=0.2,left=0.1,right=0.95, top = 0.95)
if self.parent.MainParamDict['LinkSpatial'] == 1:
if self.FigWrap.pos == self.parent.first_x:
self.axes = self.figure.add_subplot(self.gs[self.parent.axes_extent[0]:self.parent.axes_extent[1], self.parent.axes_extent[2]:self.parent.axes_extent[3]])
else:
self.axes = self.figure.add_subplot(self.gs[self.parent.axes_extent[0]:self.parent.axes_extent[1], self.parent.axes_extent[2]:self.parent.axes_extent[3]], sharex = self.parent.SubPlotList[self.parent.first_x[0]][self.parent.first_x[1]].graph.axes)
else:
self.axes = self.figure.add_subplot(self.gs[self.parent.axes_extent[0]:self.parent.axes_extent[1], self.parent.axes_extent[2]:self.parent.axes_extent[3]])
self.cax = self.axes.imshow(self.hist2d[0],
cmap = new_cmaps.cmaps[self.parent.MainParamDict['ColorMap']],
norm = self.norm(), origin = 'lower',
aspect = 'auto',
interpolation=self.GetPlotParam('interpolation'))
self.cax.set_extent([self.xmin, self.xmax, self.ymin, self.ymax])
self.cax.set_clim(self.clim)
self.shock_line = self.axes.axvline(self.parent.shock_loc, linewidth = 1.5, linestyle = '--', color = self.parent.shock_color, path_effects=[PathEffects.Stroke(linewidth=2, foreground='k'),
PathEffects.Normal()])
if not self.GetPlotParam('show_shock'):
self.shock_line.set_visible(False)
self.axC = self.figure.add_subplot(self.gs[self.parent.cbar_extent[0]:self.parent.cbar_extent[1], self.parent.cbar_extent[2]:self.parent.cbar_extent[3]])
self.parent.cbarList.append(self.axC)
# Technically I should use the colorbar class here,
# but I found it annoying in some of it's limitations.
if self.parent.MainParamDict['HorizontalCbars']:
self.cbar = self.axC.imshow(self.gradient, aspect='auto',
cmap=new_cmaps.cmaps[self.parent.MainParamDict['ColorMap']])
# Make the colobar axis more like the real colorbar
self.axC.tick_params(axis='x',
which = 'both', # bothe major and minor ticks
top = False, # turn off top ticks
labelsize=self.parent.MainParamDict['NumFontSize'])
self.axC.tick_params(axis='y', # changes apply to the y-axis
which='both', # both major and minor ticks are affected
left=False, # ticks along the bottom edge are off
right=False, # ticks along the top edge are off
labelleft=False)
else:
self.cbar = self.axC.imshow(np.transpose(self.gradient)[::-1], aspect='auto',
cmap=new_cmaps.cmaps[self.parent.MainParamDict['ColorMap']])
# Make the colobar axis more like the real colorbar
self.axC.tick_params(axis='x',
which = 'both', # bothe major and minor ticks
top = False, # turn off top ticks
bottom = False,
labelbottom = False,
labelsize=self.parent.MainParamDict['NumFontSize'])
self.axC.tick_params(axis='y', # changes apply to the y-axis
which='both', # both major and minor ticks are affected
left=False, # ticks along the bottom edge are off
right=True, # ticks along the top edge are off
labelleft=False,
labelright=True,
labelsize=self.parent.MainParamDict['NumFontSize'])
self.cbar.set_extent([0, 1.0, 0, 1.0])
if not self.GetPlotParam('show_cbar'):
self.axC.set_visible(False)
if int(matplotlib.__version__[0]) < 2:
self.axes.set_axis_bgcolor(self.GetPlotParam('face_color'))
else:
self.axes.set_facecolor(self.GetPlotParam('face_color'))
self.axes.tick_params(labelsize = self.parent.MainParamDict['NumFontSize'], color=self.tick_color)
self.axes.set_xlabel(self.x_label, labelpad = self.parent.MainParamDict['xLabelPad'], color = 'black', size = self.parent.MainParamDict['AxLabelSize'])
self.axes.set_ylabel(self.y_label, labelpad = self.parent.MainParamDict['yLabelPad'], color = 'black', size = self.parent.MainParamDict['AxLabelSize'])
self.refresh()
def refresh(self):
'''This is a function that will be called only if self.axes already
holds a density type plot. We only update things that have shown. If
hasn't changed, or isn't viewed, don't touch it. The difference between this and last
time, is that we won't actually do any drawing in the plot. The plot
will be redrawn after all subplots data is changed. '''
# Main goal, only change what is showing..
self.xmin = self.hist2d[2][0]
self.xmax = self.hist2d[2][-1]
self.ymin = self.hist2d[1][0]
self.ymax = self.hist2d[1][-1]
self.clim = list(self.hist2d[3])
self.cax.set_data(self.hist2d[0])
self.cax.set_extent([self.xmin,self.xmax, self.ymin, self.ymax])
if self.GetPlotParam('set_v_min'):
self.clim[0] = 10**self.GetPlotParam('v_min')
if self.GetPlotParam('set_v_max'):
self.clim[1] = 10**self.GetPlotParam('v_max')
self.cax.set_clim(self.clim)
if self.GetPlotParam('show_cbar'):
self.CbarTickFormatter()
if self.GetPlotParam('show_shock'):
self.shock_line.set_xdata([self.parent.shock_loc,self.parent.shock_loc])
self.UpdateLabelsandColors()
self.axes.set_xlabel(self.x_label, labelpad = self.parent.MainParamDict['xLabelPad'], color = 'black', size = self.parent.MainParamDict['AxLabelSize'])
self.axes.set_ylabel(self.y_label, labelpad = self.parent.MainParamDict['yLabelPad'], color = 'black', size = self.parent.MainParamDict['AxLabelSize'])
if self.GetPlotParam('set_p_min'):
self.ymin = self.GetPlotParam('p_min')
if self.GetPlotParam('set_p_max'):
self.ymax = self.GetPlotParam('p_max')
if self.GetPlotParam('symmetric'):
self.ymin = -max(abs(self.ymin), abs(self.ymax))
self.ymax = abs(self.ymin)
self.axes.set_ylim(self.ymin, self.ymax)
if self.parent.MainParamDict['SetxLim'] and self.parent.MainParamDict['LinkSpatial'] == 1:
if self.parent.MainParamDict['xLimsRelative']:
self.axes.set_xlim(self.parent.MainParamDict['xLeft'] + self.parent.shock_loc,
self.parent.MainParamDict['xRight'] + self.parent.shock_loc)
else:
self.axes.set_xlim(self.parent.MainParamDict['xLeft'], self.parent.MainParamDict['xRight'])
else:
self.axes.set_xlim(self.xmin,self.xmax)
def CbarTickFormatter(self):
''' A helper function that sets the cbar ticks & labels. This used to be
easier, but because I am no longer using the colorbar class i have to do
stuff manually.'''
clim = np.copy(self.cax.get_clim())
if self.GetPlotParam('show_cbar'):
if self.GetPlotParam('cnorm_type') == "Log":
if self.parent.MainParamDict['HorizontalCbars']:
self.cbar.set_extent([np.log10(clim[0]),np.log10(clim[1]),0,1])
self.axC.set_xlim(np.log10(clim[0]),np.log10(clim[1]))
self.axC.xaxis.set_label_position("top")
if self.GetPlotParam('prtl_type') ==0:
self.axC.set_xlabel(r'$\log{\ \ f_i(p)}$', size = self.parent.MainParamDict['AxLabelSize'])#, labelpad =15, rotation = -90)
else:
self.axC.set_xlabel(r'$\log{\ \ f_e(p)}$', size = self.parent.MainParamDict['AxLabelSize'])#, size = 12,labelpad =15, rotation = -90)
else:
self.cbar.set_extent([0,1,np.log10(clim[0]),np.log10(clim[1])])
self.axC.set_ylim(np.log10(clim[0]),np.log10(clim[1]))
self.axC.locator_params(axis='y',nbins=6)
self.axC.yaxis.set_label_position("right")
if self.GetPlotParam('prtl_type') ==0:
self.axC.set_ylabel(r'$\log{\ \ f_i(p)}$', labelpad = self.parent.MainParamDict['cbarLabelPad'], rotation = -90, size = self.parent.MainParamDict['AxLabelSize'])
else:
self.axC.set_ylabel(r'$\log{\ \ f_e(p)}$', labelpad = self.parent.MainParamDict['cbarLabelPad'], rotation = -90, size = self.parent.MainParamDict['AxLabelSize'])
else:# self.GetPlotParam('cnorm_type') == "Linear":
if self.parent.MainParamDict['HorizontalCbars']:
self.cbar.set_extent([clim[0], clim[1], 0, 1])
self.axC.set_xlim(clim[0], clim[1])
if self.GetPlotParam('prtl_type') ==0:
self.axC.set_xlabel(r'$f_i(p)$', size = self.parent.MainParamDict['AxLabelSize'])
else:
self.axC.set_xlabel(r'$f_e(p)$', size = self.parent.MainParamDict['AxLabelSize'])
else:
self.cbar.set_extent([0, 1, clim[0], clim[1]])
self.axC.set_ylim(clim[0], clim[1])
self.axC.locator_params(axis='y', nbins=6)
self.axC.yaxis.set_label_position("right")
if self.GetPlotParam('prtl_type') ==0:
self.axC.set_ylabel(r'$f_i(p)$', labelpad = self.parent.MainParamDict['cbarLabelPad'], rotation = -90, size = self.parent.MainParamDict['AxLabelSize'])
else:
self.axC.set_ylabel(r'$f_e(p)$', labelpad = self.parent.MainParamDict['cbarLabelPad'], rotation = -90, size = self.parent.MainParamDict['AxLabelSize'])
def GetPlotParam(self, keyname):
return self.FigWrap.GetPlotParam(keyname)
def SetPlotParam(self, keyname, value, update_plot = True):
self.FigWrap.SetPlotParam(keyname, value, update_plot = update_plot)
def OpenSettings(self):
if self.settings_window is None:
self.settings_window = PhaseSettings(self)
else:
self.settings_window.destroy()
self.settings_window = PhaseSettings(self)
class PhaseSettings(Tk.Toplevel):
def __init__(self, parent):
self.parent = parent
Tk.Toplevel.__init__(self)
self.wm_title('Phase Plot (%d,%d) Settings' % self.parent.FigWrap.pos)
self.parent = parent
frm = ttk.Frame(self)
frm.pack(fill=Tk.BOTH, expand=True)
self.protocol('WM_DELETE_WINDOW', self.OnClosing)
self.bind('<Return>', self.TxtEnter)
# Create the OptionMenu to chooses the Interpolation Type:
self.InterpolVar = Tk.StringVar(self)
self.InterpolVar.set(self.parent.GetPlotParam('interpolation')) # default value
self.InterpolVar.trace('w', self.InterpolChanged)
ttk.Label(frm, text="Interpolation Method:").grid(row=0, column = 2)
InterplChooser = ttk.OptionMenu(frm, self.InterpolVar, self.parent.GetPlotParam('interpolation'), *tuple(self.parent.InterpolationMethods))
InterplChooser.grid(row =0, column = 3, sticky = Tk.W + Tk.E)
# Create the OptionMenu to chooses the Chart Type:
self.ctypevar = Tk.StringVar(self)
self.ctypevar.set(self.parent.chartType) # default value
self.ctypevar.trace('w', self.ctypeChanged)
ttk.Label(frm, text="Choose Chart Type:").grid(row=0, column = 0)
cmapChooser = ttk.OptionMenu(frm, self.ctypevar, self.parent.chartType, *tuple(self.parent.ChartTypes))
cmapChooser.grid(row =0, column = 1, sticky = Tk.W + Tk.E)
# the Radiobox Control to choose the particle
self.prtlList = ['ion', 'electron']
self.pvar = Tk.IntVar()
self.pvar.set(self.parent.GetPlotParam('prtl_type'))
ttk.Label(frm, text='Particle:').grid(row = 1, sticky = Tk.W)
for i in range(len(self.prtlList)):
ttk.Radiobutton(frm,
text=self.prtlList[i],
variable=self.pvar,
command = self.RadioPrtl,
value=i).grid(row = 2+i, sticky =Tk.W)
# the Radiobox Control to choose the momentum dim
self.dimList = ['x-px', 'x-py', 'x-pz']
self.dimvar = Tk.IntVar()
self.dimvar.set(self.parent.GetPlotParam('mom_dim'))
ttk.Label(frm, text='Dimenison:').grid(row = 1, column = 1, sticky = Tk.W)
for i in range(len(self.dimList)):
ttk.Radiobutton(frm,
text=self.dimList[i],
variable=self.dimvar,
command = self.RadioDim,
value=i).grid(row = 2+i, column = 1, sticky = Tk.W)
# Control whether or not Cbar is shown
self.CbarVar = Tk.IntVar()
self.CbarVar.set(self.parent.GetPlotParam('show_cbar'))
cb = ttk.Checkbutton(frm, text = "Show Color bar",
variable = self.CbarVar,
command = self.CbarHandler)
cb.grid(row = 6, sticky = Tk.W)
# show shock
self.ShockVar = Tk.IntVar()
self.ShockVar.set(self.parent.GetPlotParam('show_shock'))
cb = ttk.Checkbutton(frm, text = "Show Shock",
variable = self.ShockVar,
command = self.ShockVarHandler)
cb.grid(row = 6, column = 1, sticky = Tk.W)
# Use full div cmap
self.SymVar = Tk.IntVar()
self.SymVar.set(self.parent.GetPlotParam('symmetric'))
cb = ttk.Checkbutton(frm, text = "Symmetric about zero",
variable = self.SymVar,
command = self.SymmetricHandler)
cb.grid(row = 8, column = 1, sticky = Tk.W)
# Control if the plot is weightedd
self.WeightVar = Tk.IntVar()
self.WeightVar.set(self.parent.GetPlotParam('weighted'))
cb = ttk.Checkbutton(frm, text = "Weight by charge",
variable = self.WeightVar,
command = lambda:
self.parent.SetPlotParam('weighted', self.WeightVar.get()))
cb.grid(row = 7, sticky = Tk.W)
# Show energy integration region
#self.IntRegVar = Tk.IntVar()
#self.IntRegVar.set(self.parent.GetPlotParam('show_int_region'))
cb = ttk.Checkbutton(frm, text = "Show Energy Region",
variable = self.parent.IntRegVar)#,
# command = self.ShowIntRegionHandler)
cb.grid(row = 7, column = 1, sticky = Tk.W)
# control mask
self.MaskVar = Tk.IntVar()
self.MaskVar.set(self.parent.GetPlotParam('masked'))
cb = ttk.Checkbutton(frm, text = "Mask Zeros",
variable = self.MaskVar,
command = lambda:
self.parent.SetPlotParam('masked', self.MaskVar.get()))
cb.grid(row = 8, sticky = Tk.W)
self.TrueVar = Tk.IntVar()
self.TrueVar.set(1)
self.pBins = Tk.StringVar()
self.pBins.set(str(self.parent.GetPlotParam('pbins')))
ttk.Label(frm, text ='# of pbins').grid(row = 9, column = 0, sticky = Tk.W)
ttk.Entry(frm, textvariable=self.pBins, width=7).grid(row = 9, column = 1)
self.xBins = Tk.StringVar()
self.xBins.set(str(self.parent.GetPlotParam('xbins')))
ttk.Label(frm, text ='# of xbins').grid(row = 10, column = 0, sticky = Tk.W)
ttk.Entry(frm, textvariable=self.xBins, width=7).grid(row = 10, column = 1)
# ttk.Label(frm, text = 'If the zero values are not masked they are set to z_min/2').grid(row =9, columnspan =2)
# Define functions for the events
# Now the field lim
self.setVminVar = Tk.IntVar()
self.setVminVar.set(self.parent.GetPlotParam('set_v_min'))
self.setVminVar.trace('w', self.setVminChanged)
self.setVmaxVar = Tk.IntVar()
self.setVmaxVar.set(self.parent.GetPlotParam('set_v_max'))
self.setVmaxVar.trace('w', self.setVmaxChanged)
self.Vmin = Tk.StringVar()
self.Vmin.set(str(self.parent.GetPlotParam('v_min')))
self.Vmax = Tk.StringVar()
self.Vmax.set(str(self.parent.GetPlotParam('v_max')))
cb = ttk.Checkbutton(frm, text ='Set log(f) min',
variable = self.setVminVar)
cb.grid(row = 3, column = 2, sticky = Tk.W)
self.VminEnter = ttk.Entry(frm, textvariable=self.Vmin, width=7)
self.VminEnter.grid(row = 3, column = 3)
cb = ttk.Checkbutton(frm, text ='Set log(f) max',
variable = self.setVmaxVar)
cb.grid(row = 4, column = 2, sticky = Tk.W)
self.VmaxEnter = ttk.Entry(frm, textvariable=self.Vmax, width=7)
self.VmaxEnter.grid(row = 4, column = 3)
# Now the y lim
self.setPminVar = Tk.IntVar()
self.setPminVar.set(self.parent.GetPlotParam('set_p_min'))
self.setPminVar.trace('w', self.setPminChanged)
self.setPmaxVar = Tk.IntVar()
self.setPmaxVar.set(self.parent.GetPlotParam('set_p_max'))
self.setPmaxVar.trace('w', self.setPmaxChanged)
self.Pmin = Tk.StringVar()
self.Pmin.set(str(self.parent.GetPlotParam('p_min')))
self.Pmax = Tk.StringVar()
self.Pmax.set(str(self.parent.GetPlotParam('p_max')))
cb = ttk.Checkbutton(frm, text ='Set y_axis min',
variable = self.setPminVar)
cb.grid(row = 5, column = 2, sticky = Tk.W)
self.PminEnter = ttk.Entry(frm, textvariable=self.Pmin, width=7)
self.PminEnter.grid(row = 5, column = 3)
cb = ttk.Checkbutton(frm, text ='Set y_axis max',
variable = self.setPmaxVar)
cb.grid(row = 6, column = 2, sticky = Tk.W)
self.PmaxEnter = ttk.Entry(frm, textvariable=self.Pmax, width=7)
self.PmaxEnter.grid(row = 6, column = 3)
# Now the E lim
self.setEminVar = Tk.IntVar()
self.setEminVar.set(self.parent.GetPlotParam('set_E_min'))
self.setEminVar.trace('w', self.setEminChanged)
self.setEmaxVar = Tk.IntVar()
self.setEmaxVar.set(self.parent.GetPlotParam('set_E_max'))
self.setEmaxVar.trace('w', self.setEmaxChanged)
self.Emin = Tk.StringVar()
self.Emin.set(str(self.parent.GetPlotParam('E_min')))
self.Emax = Tk.StringVar()
self.Emax.set(str(self.parent.GetPlotParam('E_max')))
cb = ttk.Checkbutton(frm, text ='Set E_min (m_e c^2)',
variable = self.setEminVar)
cb.grid(row = 7, column = 2, sticky = Tk.W)
self.EminEnter = ttk.Entry(frm, textvariable=self.Emin, width=7)
self.EminEnter.grid(row = 7, column = 3)
cb = ttk.Checkbutton(frm, text ='Set E_max (m_e c^2)',
variable = self.setEmaxVar)
cb.grid(row = 8, column = 2, sticky = Tk.W)
self.EmaxEnter = ttk.Entry(frm, textvariable=self.Emax, width=7)
self.EmaxEnter.grid(row = 8, column = 3)
def ShockVarHandler(self, *args):
if self.parent.GetPlotParam('show_shock')== self.ShockVar.get():
pass
else:
self.parent.shock_line.set_visible(self.ShockVar.get())
self.parent.SetPlotParam('show_shock', self.ShockVar.get())
def CbarHandler(self, *args):
if self.parent.GetPlotParam('show_cbar')== self.CbarVar.get():
pass
else:
self.parent.axC.set_visible(self.CbarVar.get())
self.parent.SetPlotParam('show_cbar', self.CbarVar.get(), update_plot =self.parent.GetPlotParam('twoD'))
def ctypeChanged(self, *args):
if self.ctypevar.get() == self.parent.chartType:
pass
else:
self.parent.ChangePlotType(self.ctypevar.get())
self.destroy()
def InterpolChanged(self, *args):
if self.InterpolVar.get() == self.parent.GetPlotParam('interpolation'):
pass
else:
self.parent.cax.set_interpolation(self.InterpolVar.get())
self.parent.SetPlotParam('interpolation', self.InterpolVar.get())
def RadioPrtl(self):
if self.pvar.get() == self.parent.GetPlotParam('prtl_type'):
pass
else:
self.parent.SetPlotParam('prtl_type', self.pvar.get(), update_plot = False)
self.parent.UpdateLabelsandColors()
self.parent.axes.set_ylabel(self.parent.y_label, labelpad = self.parent.parent.MainParamDict['yLabelPad'], color = 'black', size = self.parent.parent.MainParamDict['AxLabelSize'])
#self.parent.lineleft.set_color(self.parent.energy_color)
#self.parent.lineright.set_color(self.parent.energy_color)
self.parent.SetPlotParam('prtl_type', self.pvar.get())
def RadioDim(self):
if self.dimvar.get() == self.parent.GetPlotParam('mom_dim'):
pass
else:
self.parent.SetPlotParam('mom_dim', self.dimvar.get(), update_plot = False)
self.parent.UpdateLabelsandColors()
self.parent.axes.set_ylabel(self.parent.y_label, labelpad = self.parent.parent.MainParamDict['yLabelPad'], color = 'black', size = self.parent.parent.MainParamDict['AxLabelSize'])
self.parent.SetPlotParam('mom_dim', self.dimvar.get())
def SymmetricHandler(self, *args):
if self.parent.GetPlotParam('symmetric') == self.SymVar.get():
pass
else:
self.parent.SetPlotParam('symmetric', self.SymVar.get(), update_plot = True)
def setVminChanged(self, *args):
if self.setVminVar.get() == self.parent.GetPlotParam('set_v_min'):
pass
else:
self.parent.SetPlotParam('set_v_min', self.setVminVar.get())
def setVmaxChanged(self, *args):
if self.setVmaxVar.get() == self.parent.GetPlotParam('set_v_max'):
pass
else:
self.parent.SetPlotParam('set_v_max', self.setVmaxVar.get())
def setPminChanged(self, *args):
if self.setPminVar.get() == self.parent.GetPlotParam('set_p_min'):
pass
else:
self.parent.SetPlotParam('set_p_min', self.setPminVar.get())
def setPmaxChanged(self, *args):
if self.setPmaxVar.get() == self.parent.GetPlotParam('set_p_max'):
pass
else:
self.parent.SetPlotParam('set_p_max', self.setPmaxVar.get())
def setEminChanged(self, *args):
if self.setEminVar.get() == self.parent.GetPlotParam('set_E_min'):
pass
else:
self.parent.SetPlotParam('set_E_min', self.setEminVar.get())
def setEmaxChanged(self, *args):
if self.setEmaxVar.get() == self.parent.GetPlotParam('set_E_max'):
pass
else:
self.parent.SetPlotParam('set_E_max', self.setEmaxVar.get())
def TxtEnter(self, e):
self.FieldsCallback()
def FieldsCallback(self):
#### First set the Float Values
tkvarLimList = [self.Vmin, self.Vmax, self.Pmin, self.Pmax, self.Emin, self.Emax]
plot_param_List = ['v_min', 'v_max', 'p_min', 'p_max', 'E_min', 'E_max']
tkvarSetList = [self.setVminVar, self.setVmaxVar, self.setPminVar, self.setPmaxVar, self.setEminVar, self.setEmaxVar]
to_reload = False
for j in range(len(tkvarLimList)):
try:
#make sure the user types in a float
if np.abs(float(tkvarLimList[j].get()) - self.parent.GetPlotParam(plot_param_List[j])) > 1E-4:
self.parent.SetPlotParam(plot_param_List[j], float(tkvarLimList[j].get()), update_plot = False)
to_reload += True*tkvarSetList[j].get()
except ValueError:
#if they type in random stuff, just set it ot the param value
tkvarLimList[j].set(str(self.parent.GetPlotParam(plot_param_List[j])))
intVarList = [self.pBins, self.xBins]
intParamList = ['pbins', 'xbins']
for j in range(len(intVarList)):
try:
#make sure the user types in a float
intVarList[j].set(str(int(float(intVarList[j].get()))))
if int(float(intVarList[j].get())) - int(self.parent.GetPlotParam(intParamList[j])) != 0:
self.parent.SetPlotParam(intParamList[j], int(float(intVarList[j].get())), update_plot = False)
to_reload += True
except ValueError:
# print hi
#if they type in random stuff, just set it ot the param value
intVarList[j].set(str(self.parent.GetPlotParam(intVarList[j])))
if to_reload:
self.parent.SetPlotParam('v_min', self.parent.GetPlotParam('v_min'))
def OnClosing(self):
self.parent.settings_window = None
self.destroy()
| pcrumley/Iseult | src/phase_plots.py | Python | gpl-3.0 | 50,141 |
# coding: utf-8
"""
KubeVirt API
This is KubeVirt API an add-on for Kubernetes.
OpenAPI spec version: 1.0.0
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
from pprint import pformat
from six import iteritems
import re
class V1alpha1VirtualMachineTemplateSpec(object):
"""
NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'metadata': 'K8sIoApimachineryPkgApisMetaV1ObjectMeta',
'spec': 'V1VirtualMachineSpec'
}
attribute_map = {
'metadata': 'metadata',
'spec': 'spec'
}
def __init__(self, metadata=None, spec=None):
"""
V1alpha1VirtualMachineTemplateSpec - a model defined in Swagger
"""
self._metadata = None
self._spec = None
if metadata is not None:
self.metadata = metadata
if spec is not None:
self.spec = spec
@property
def metadata(self):
"""
Gets the metadata of this V1alpha1VirtualMachineTemplateSpec.
:return: The metadata of this V1alpha1VirtualMachineTemplateSpec.
:rtype: K8sIoApimachineryPkgApisMetaV1ObjectMeta
"""
return self._metadata
@metadata.setter
def metadata(self, metadata):
"""
Sets the metadata of this V1alpha1VirtualMachineTemplateSpec.
:param metadata: The metadata of this V1alpha1VirtualMachineTemplateSpec.
:type: K8sIoApimachineryPkgApisMetaV1ObjectMeta
"""
self._metadata = metadata
@property
def spec(self):
"""
Gets the spec of this V1alpha1VirtualMachineTemplateSpec.
VirtualMachineSpec contains the VirtualMachine specification.
:return: The spec of this V1alpha1VirtualMachineTemplateSpec.
:rtype: V1VirtualMachineSpec
"""
return self._spec
@spec.setter
def spec(self, spec):
"""
Sets the spec of this V1alpha1VirtualMachineTemplateSpec.
VirtualMachineSpec contains the VirtualMachine specification.
:param spec: The spec of this V1alpha1VirtualMachineTemplateSpec.
:type: V1VirtualMachineSpec
"""
self._spec = spec
def to_dict(self):
"""
Returns the model properties as a dict
"""
result = {}
for attr, _ in iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
return result
def to_str(self):
"""
Returns the string representation of the model
"""
return pformat(self.to_dict())
def __repr__(self):
"""
For `print` and `pprint`
"""
return self.to_str()
def __eq__(self, other):
"""
Returns true if both objects are equal
"""
if not isinstance(other, V1alpha1VirtualMachineTemplateSpec):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""
Returns true if both objects are not equal
"""
return not self == other
| kubevirt/client-python | kubevirt/models/v1alpha1_virtual_machine_template_spec.py | Python | apache-2.0 | 4,066 |
import cards
# Player : armazena informacoes sobre o jogador, usado a nivel cliente para tomar acoes
class Player:
# Metodo construtor
def __init__(self):
self.hand = []
# Inicializa atributos no inicio de uma rodada
def round(self, card_strings):
card_list = []
for card in card_strings:
card_list.append(card.split(" "))
self.hand = [cards.Card(int(card_list[i][0]), card_list[i][1]) for i in range(3)]
# Printa as cartas na tela
def printCards(self):
i = 0
print "Sua mao tem:"
for card in self.hand:
print str(i) + ") " + str(card)
i = i + 1
# Joga carta : remove carta da mao e envia mensagem ao servidor
def play(self, index, socket):
message = str(self.hand[index])
del self.hand[index]
socket.send(message)
# Retorna o numero de cartas na mao
def numCards(self):
return len(self.hand) | jvitorfromhell/truco | player.py | Python | gpl-3.0 | 975 |
#!/usr/bin/env python
# (c) Copyright [2016] Hewlett Packard Enterprise Development LP Licensed under
# the Apache License, Version 2.0 (the "License"); you may not use this file
# except in compliance with the License. You may obtain a copy of the License
# at Unless required by applicable
# law or agreed to in writing, software distributed under the License is
# distributed on an "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the specific language
# governing permissions and limitations under the License.
from canari.maltego.utils import debug
from canari.framework import configure
from canari.maltego.entities import URL
from canari.maltego.message import Label, UIMessage
from common.entities import Indicator
from common.client import search_indicator, encode_to_utf8, ThreatCentralError
__author__ = 'Bart Otten'
__copyright__ = '(c) Copyright [2016] Hewlett Packard Enterprise Development LP'
__credits__ = []
__license__ = 'Apache 2.0'
__version__ = '1'
__maintainer__ = 'Bart Otten'
__email__ = '[email protected]'
__status__ = 'Development'
__all__ = [
'dotransform'
]
@configure(
label='Search URL in Indicators',
description='Search URL on Threat Central',
uuids=['threatcentral.v2.URLToIndicators'],
inputs=[('Threat Central', URL)],
debug=False,
remote=False
)
def dotransform(request, response, config):
try:
url = request.fields['url']
except KeyError:
url = request.value
try:
indicators = search_indicator(url)
except ThreatCentralError as err:
response += UIMessage(err.value, type='PartialError')
else:
try:
for indicator in indicators:
if indicator.get('tcScore'):
weight = int(indicator.get('tcScore'))
else:
weight = 1
indicator = indicator.get('resource')
e = Indicator(encode_to_utf8(indicator.get('title')), weight=weight)
e.title = encode_to_utf8(indicator.get('title'))
# e.resourceId = indicator.get('resourceId')
e.resourceId = indicator.get('resourceId')
if indicator.get('severity'):
e += Label('Severity', indicator.get('severity', dict()).get('displayName'))
e.severity = indicator.get('severity', dict()).get('displayName')
if indicator.get('confidence'):
e += Label('Confidence', indicator.get('confidence', dict()).get('displayName'))
e.confidence = indicator.get('confidence', dict()).get('displayName')
if indicator.get('indicatorType'):
e += Label('Indicator Type', indicator.get('indicatorType', dict()).get('displayName'))
e.indicatorType = indicator.get('indicatorType', dict()).get('displayName')
if indicator.get('description'):
e += Label('Description', '<br/>'.join(encode_to_utf8(indicator.get('description')
).split('\n')))
response += e
except AttributeError as err:
response += UIMessage('Error: {}'.format(err), type='PartialError')
except ThreatCentralError as err:
response += UIMessage(err.value, type='PartialError')
except TypeError:
return response
return response
| ThreatCentral/blackberries | src/ThreatCentral/transforms/URLToIndicators.py | Python | apache-2.0 | 3,518 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-04-24 01:13
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
initial = True
dependencies = [
]
operations = [
migrations.CreateModel(
name='Task',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('task', models.CharField(max_length=50, verbose_name='tarefa')),
],
options={
'verbose_name': 'tarefa',
'verbose_name_plural': 'tarefas',
'ordering': ('task',),
},
),
]
| rg3915/spark | spark/gamification/migrations/0001_initial.py | Python | mit | 739 |
"""
Copyright (c) 2020 Alex Forencich
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
THE SOFTWARE.
"""
import logging
import os
from scapy.layers.l2 import Ether, ARP
from scapy.layers.inet import IP, UDP
import cocotb_test.simulator
import cocotb
from cocotb.log import SimLog
from cocotb.clock import Clock
from cocotb.triggers import RisingEdge
from cocotbext.eth import GmiiFrame, GmiiPhy
class TB:
def __init__(self, dut, speed=1000e6):
self.dut = dut
self.log = SimLog("cocotb.tb")
self.log.setLevel(logging.DEBUG)
cocotb.start_soon(Clock(dut.clk, 8, units="ns").start())
# Ethernet
self.gmii_phy = GmiiPhy(dut.phy_txd, dut.phy_tx_er, dut.phy_tx_en, dut.phy_tx_clk, dut.phy_gtx_clk,
dut.phy_rxd, dut.phy_rx_er, dut.phy_rx_dv, dut.phy_rx_clk, speed=speed)
dut.btnu.setimmediatevalue(0)
dut.btnl.setimmediatevalue(0)
dut.btnd.setimmediatevalue(0)
dut.btnr.setimmediatevalue(0)
dut.btnc.setimmediatevalue(0)
dut.sw.setimmediatevalue(0)
dut.uart_rxd.setimmediatevalue(1)
dut.uart_cts.setimmediatevalue(1)
async def init(self):
self.dut.rst.setimmediatevalue(0)
for k in range(10):
await RisingEdge(self.dut.clk)
self.dut.rst <= 1
for k in range(10):
await RisingEdge(self.dut.clk)
self.dut.rst <= 0
@cocotb.test()
async def run_test(dut):
tb = TB(dut)
await tb.init()
tb.log.info("test UDP RX packet")
payload = bytes([x % 256 for x in range(256)])
eth = Ether(src='5a:51:52:53:54:55', dst='02:00:00:00:00:00')
ip = IP(src='192.168.1.100', dst='192.168.1.128')
udp = UDP(sport=5678, dport=1234)
test_pkt = eth / ip / udp / payload
test_frame = GmiiFrame.from_payload(test_pkt.build())
await tb.gmii_phy.rx.send(test_frame)
tb.log.info("receive ARP request")
rx_frame = await tb.gmii_phy.tx.recv()
rx_pkt = Ether(bytes(rx_frame.get_payload()))
tb.log.info("RX packet: %s", repr(rx_pkt))
assert rx_pkt.dst == 'ff:ff:ff:ff:ff:ff'
assert rx_pkt.src == test_pkt.dst
assert rx_pkt[ARP].hwtype == 1
assert rx_pkt[ARP].ptype == 0x0800
assert rx_pkt[ARP].hwlen == 6
assert rx_pkt[ARP].plen == 4
assert rx_pkt[ARP].op == 1
assert rx_pkt[ARP].hwsrc == test_pkt.dst
assert rx_pkt[ARP].psrc == test_pkt[IP].dst
assert rx_pkt[ARP].hwdst == '00:00:00:00:00:00'
assert rx_pkt[ARP].pdst == test_pkt[IP].src
tb.log.info("send ARP response")
eth = Ether(src=test_pkt.src, dst=test_pkt.dst)
arp = ARP(hwtype=1, ptype=0x0800, hwlen=6, plen=4, op=2,
hwsrc=test_pkt.src, psrc=test_pkt[IP].src,
hwdst=test_pkt.dst, pdst=test_pkt[IP].dst)
resp_pkt = eth / arp
resp_frame = GmiiFrame.from_payload(resp_pkt.build())
await tb.gmii_phy.rx.send(resp_frame)
tb.log.info("receive UDP packet")
rx_frame = await tb.gmii_phy.tx.recv()
rx_pkt = Ether(bytes(rx_frame.get_payload()))
tb.log.info("RX packet: %s", repr(rx_pkt))
assert rx_pkt.dst == test_pkt.src
assert rx_pkt.src == test_pkt.dst
assert rx_pkt[IP].dst == test_pkt[IP].src
assert rx_pkt[IP].src == test_pkt[IP].dst
assert rx_pkt[UDP].dport == test_pkt[UDP].sport
assert rx_pkt[UDP].sport == test_pkt[UDP].dport
assert rx_pkt[UDP].payload == test_pkt[UDP].payload
await RisingEdge(dut.clk)
await RisingEdge(dut.clk)
# cocotb-test
tests_dir = os.path.abspath(os.path.dirname(__file__))
rtl_dir = os.path.abspath(os.path.join(tests_dir, '..', '..', 'rtl'))
lib_dir = os.path.abspath(os.path.join(rtl_dir, '..', 'lib'))
axis_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'eth', 'lib', 'axis', 'rtl'))
eth_rtl_dir = os.path.abspath(os.path.join(lib_dir, 'eth', 'rtl'))
def test_fpga_core(request):
dut = "fpga_core"
module = os.path.splitext(os.path.basename(__file__))[0]
toplevel = dut
verilog_sources = [
os.path.join(rtl_dir, f"{dut}.v"),
os.path.join(eth_rtl_dir, "iddr.v"),
os.path.join(eth_rtl_dir, "oddr.v"),
os.path.join(eth_rtl_dir, "ssio_sdr_in.v"),
os.path.join(eth_rtl_dir, "ssio_sdr_out.v"),
os.path.join(eth_rtl_dir, "gmii_phy_if.v"),
os.path.join(eth_rtl_dir, "eth_mac_1g_gmii_fifo.v"),
os.path.join(eth_rtl_dir, "eth_mac_1g_gmii.v"),
os.path.join(eth_rtl_dir, "eth_mac_1g.v"),
os.path.join(eth_rtl_dir, "axis_gmii_rx.v"),
os.path.join(eth_rtl_dir, "axis_gmii_tx.v"),
os.path.join(eth_rtl_dir, "lfsr.v"),
os.path.join(eth_rtl_dir, "eth_axis_rx.v"),
os.path.join(eth_rtl_dir, "eth_axis_tx.v"),
os.path.join(eth_rtl_dir, "udp_complete.v"),
os.path.join(eth_rtl_dir, "udp_checksum_gen.v"),
os.path.join(eth_rtl_dir, "udp.v"),
os.path.join(eth_rtl_dir, "udp_ip_rx.v"),
os.path.join(eth_rtl_dir, "udp_ip_tx.v"),
os.path.join(eth_rtl_dir, "ip_complete.v"),
os.path.join(eth_rtl_dir, "ip.v"),
os.path.join(eth_rtl_dir, "ip_eth_rx.v"),
os.path.join(eth_rtl_dir, "ip_eth_tx.v"),
os.path.join(eth_rtl_dir, "ip_arb_mux.v"),
os.path.join(eth_rtl_dir, "arp.v"),
os.path.join(eth_rtl_dir, "arp_cache.v"),
os.path.join(eth_rtl_dir, "arp_eth_rx.v"),
os.path.join(eth_rtl_dir, "arp_eth_tx.v"),
os.path.join(eth_rtl_dir, "eth_arb_mux.v"),
os.path.join(axis_rtl_dir, "arbiter.v"),
os.path.join(axis_rtl_dir, "priority_encoder.v"),
os.path.join(axis_rtl_dir, "axis_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo.v"),
os.path.join(axis_rtl_dir, "axis_async_fifo_adapter.v"),
]
parameters = {}
# parameters['A'] = val
extra_env = {f'PARAM_{k}': str(v) for k, v in parameters.items()}
sim_build = os.path.join(tests_dir, "sim_build",
request.node.name.replace('[', '-').replace(']', ''))
cocotb_test.simulator.run(
python_search=[tests_dir],
verilog_sources=verilog_sources,
toplevel=toplevel,
module=module,
parameters=parameters,
sim_build=sim_build,
extra_env=extra_env,
)
| alexforencich/verilog-ethernet | example/KC705/fpga_gmii/tb/fpga_core/test_fpga_core.py | Python | mit | 7,184 |
## @package gradient
"""
Written by Daniel M. Aukes and CONTRIBUTORS
Email: danaukes<at>asu.edu.
Please see LICENSE for full license.
"""
# Functions for handling colour gradients.
from math import floor
from .enhanced_grid import Grid2D
from .image import mix_color
## A gradient without any interpolation.
class DiscreteGradient:
def __init__(self, colors):
self.colors = []
for color in colors:
self.colors.append(color)
self.color_count = len(colors)
def get_color(self, t): #assumes 0 <= t < 1
col_index = int(floor(t * self.color_count))
if (col_index >= self.color_count):
col_index = self.color_count - 1
return self.colors[col_index]
## A gradient between two colours with linear interpolation.
class SimpleGradient:
def __init__(self, color0, color1):
self.color0 = color0
self.color1 = color1
def get_color(self, t):
return mix_color(self.color0, self.color1, t)
## Maps a gradient to a grid, and returns the result as a new grid.
# @grid A grid containing values in the range [0 1]
def map_gradient(gradient, grid):
color_grid = Grid2D(grid.dims)
for index in grid.index_iter():
color_grid[index] = gradient.get_color(grid[index])
return color_grid
| danaukes/popupcad | popupcad_gazebo/gradient.py | Python | mit | 1,330 |
from rest_framework import routers
from vendor import views
router = routers.SimpleRouter()
router.register("popsicles", views.PopsicleViewSet)
router.register("machines", views.MachineViewSet)
router.register("stock/removal", views.PopsicleRemovalViewSet)
router.register("stock/entry", views.PopsicleEntryViewSet)
router.register("locations", views.LocationViewSet)
router.register("purchases", views.PurchaseViewSet)
router.register("users", views.UserViewSet)
| pi2-picole/api | vendor/urls.py | Python | mit | 466 |
# --------------------------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# --------------------------------------------------------------------------------------------
# pylint: disable=line-too-long
from azure.cli.core.commands import cli_command
from azure.cli.core.profiles import supported_api_version, PROFILE_TYPE
from azure.cli.command_modules.consumption._client_factory import usage_details_mgmt_client_factory
from azure.cli.command_modules.consumption._transformers import transform_usage_list_output
from ._exception_handler import consumption_exception_handler
if not supported_api_version(PROFILE_TYPE, max_api='2017-03-09-profile'):
cli_command(__name__, 'consumption usage list', 'azure.cli.command_modules.consumption.custom#cli_consumption_list_usage', usage_details_mgmt_client_factory, transform=transform_usage_list_output, exception_handler=consumption_exception_handler)
| samedder/azure-cli | src/command_modules/azure-cli-consumption/azure/cli/command_modules/consumption/commands.py | Python | mit | 1,078 |
# -*- coding: utf-8 -*-
#!/usr/bin/python
#####
# Made by Jose Patricio VM
#####
# Script para generar pacos a partir de ordenes de Servicio de PCI Industrial
#
#
#####
import wget
import pyexcel
import datetime, time, re
from sys import argv
import simplejson
from linkaform_api import network, utils, settings
#Nombre del campo en la la Forma: Nombre en el Archvio
class LoadFile:
def __init__(self, settings={}):
self.settings = settings
self.lkf_api = utils.Cache(settings)
self.net = network.Network(settings)
#self.cr = self.net.get_collections()
def read_file(self, file_url='', file_name=''):
#sheet = pyexcel.get_sheet(file_name="bolsa.xlsx")
if file_name:
sheet = pyexcel.get_sheet(file_name = file_name)
if file_url:
sheet = pyexcel.get_sheet(url = file_url)
records = sheet.array
header = records.pop(0)
try:
header = [str(col).lower().replace(u'\xa0',u' ').strip().replace(' ', '_') for col in header]
except UnicodeEncodeError:
header = [col.lower().replace(u'\xa0',u' ').strip().replace(' ', '_') for col in header]
return header, records
def convert_to_epoch(self, strisodate):
if type(strisodate) == datetime.date or type(strisodate) == datetime.datetime:
return time.mktime(strisodate.timetuple())
strisodate2 = re.sub(' ','',strisodate)
strisodate2 = strisodate2.split(' ')[0]
try:
date_object = datetime.strptime(strisodate2, '%Y-%m-%d')
except ValueError:
date_object = datetime.strptime(strisodate2[:8], '%d/%m/%y')
return int(date_object.strftime("%s"))
def convert_to_sting_date(self, strisodate):
if type(strisodate) == datetime.date:
return strisodate
strisodate2 = re.sub(' ','',strisodate)
strisodate2 = strisodate2.split(' ')[0]
try:
date_object = datetime.strptime(strisodate2, '%Y-%m-%d')
except ValueError:
try:
date_object = datetime.strptime(strisodate2[:10], '%d/%m/%Y')
except ValueError:
date_object = datetime.strptime(strisodate2[:8], '%d/%m/%y')
return date_object.strftime('%Y-%m-%d')
def convert_to_sting_date(self, field):
res = {'field_id':field['field_id'], 'field_type':field['field_type'], 'label':field['label'], 'options':field['options']}
if field.has_key('group') and field['group']:
res['group_id'] = field['group']['group_id']
return res
def make_header_dict(self, header):
header_dict = {}
for position in range(len(header)):
content = header[position].encode('utf-8')
if str(content).lower().replace(' ' ,'') in header_dict.keys():
continue
header_dict[str(content).lower().replace(' ' ,'')] = position
return header_dict
def get_pos_field_id_dict(self, header, form_id, equivalcens_map={}):
#form_id=10378 el id de bolsa
#pos_field_id ={3: {'field_type': 'text', 'field_id': '584648c8b43fdd7d44ae63d1'}, 9: {'field_type': 'text', 'field_id': '58464a29b43fdd773c240163'}, 51: {'field_type': 'integer', 'field_id': '584648c8b43fdd7d44ae63d0'}}
#return pos_field_id
pos_field_id = {}
form_fields = self.lkf_api.get_form_id_fields(form_id)
if not form_fields:
raise ValueError('No data form FORM')
if not header:
raise ValueError('No data on HEADER')
header_dict = self.make_header_dict(header)
aa = False
assigne_headers = []
if len(form_fields) > 0:
fields = form_fields[0]['fields']
fields_json = {}
if 'folio' in header_dict.keys():
pos_field_id[header_dict['folio']] = {'field_type':'folio'}
elif equivalcens_map.has_key('folio'):
for eq_opt in equivalcens_map['folio']:
if eq_opt in header_dict.keys():
pos_field_id[header_dict[eq_opt]] = {'field_type':'folio'}
for field in fields:
label = field['label'].lower().replace(' ' ,'')
label_underscore = field['label'].lower().replace(' ' ,'_')
if label in header_dict.keys():
if label in assigne_headers:
continue
assigne_headers.append(label)
pos_field_id[header_dict[label]] = self.convert_to_sting_date(field)
elif label_underscore in header_dict.keys():
if label in assigne_headers:
continue
assigne_headers.append(label)
pos_field_id[header_dict[label_underscore]] = self.convert_to_sting_date(field)
elif field['label'] in equivalcens_map.keys():
header_lable = equivalcens_map[field['label']]
header_lable = header_lable.lower().replace(' ' ,'')
if header_lable in header_dict.keys():
if label in assigne_headers:
continue
assigne_headers.append(label)
pos_field_id[header_dict[header_lable]] = self.convert_to_sting_date(field)
return pos_field_id
def set_custom_values(self, pos_field_id, record):
custom_answer = {}
#set status de la orden
#custom_answer['f1054000a030000000000002'] = 'por_asignar'
return custom_answer
def update_metadata_from_record(self, header, record):
res = {}
if 'created_at' in header.keys():
pos = header['created_at']
if record[pos]:
#res['created_at'] = self.convert_to_sting_date(record[pos])
res['created_at'] = convert_to_epoch(record[pos])
if 'form_id' in header.keys():
pos = header['form_id']
if record[pos]:
res['form_id'] = record[pos]
return res
def get_nongroup_fields(self, pos_field_id):
res = []
for pos, element in pos_field_id.iteritems():
if element.has_key('group_id') and element['group_id']:
continue
else:
res.append(pos)
return res
def check_record_is_group_iterration(self, non_group_fields, record):
for pos in non_group_fields:
if record[pos]:
return False
return True
def prepare_record_list(self, pos_field_id, form_id, records, header):
records_to_upload = []
metadata = self.lkf_api.get_metadata(form_id=form_id, user_id=self.settings.config['USER_ID'] )
header_dict = self.make_header_dict(header)
non_group_fields = self.get_nongroup_fields(pos_field_id)
print 'len records', len(records)
for record in records:
is_group_iteration = self.check_record_is_group_iterration(non_group_fields, record)
is_group_iteration = False
metadata.update(self.update_metadata_from_record(header_dict, record))
cont = False
answer = {}
this_record = {}
count = 0
this_record.update(metadata)
group_iteration = {}
for pos, element in pos_field_id.iteritems():
count +=1
if element['field_type'] == 'folio':
this_record['folio'] = str(record[pos])
else:
element_answer = self.lkf_api.make_infosync_json(record[pos], element)
if element.has_key('group_id') and element['group_id'] and element_answer:
if not answer.has_key(element['group_id']):
answer[element['group_id']] = []
#answer.update(element_answer)
if not group_iteration.has_key(element['group_id']):
group_iteration[element['group_id']] = {}
group_iteration[element['group_id']].update(element_answer)
else:
answer.update(element_answer)
#answer[element['group_id']].append(group_iteration)
answer.update(self.set_custom_values(pos_field_id, record ))
if is_group_iteration:
last_rec = records_to_upload[-1]
for group_id in group_iteration.keys():
last_rec['answers'][group_id].append(group_iteration[group_id])
records_to_upload[-1] = last_rec
else:
for group_id in group_iteration.keys():
answer[group_id].append(group_iteration[group_id])
this_record["answers"] = answer
records_to_upload.append(this_record)
return records_to_upload
def create_record(self, records_to_create):
error_list = self.net.post_forms_answers_list(records_to_create)
return error_list
def remove_splecial_characters(self, text, replace_with='', remove_spaces=False):
if type(text) == str:
text = text.replace('\xa0', replace_with)
text = text.replace('\xc2',replace_with)
if remove_spaces:
text = text.strip()
if type(text) == unicode:
text = text.replace(u'\xa0', replace_with)
text = text.replace(u'\xc2', replace_with)
if remove_spaces:
text = text.strip()
return text
def remove_splecial_characters_list(self, text_list):
res = []
for text in text_list:
res.append(self.remove_splecial_characters(text, '', True))
return res
def get_file_to_upload(self, file_url='', file_name='', form_id=None, equivalcens_map={}):
if not form_id:
raise ValueError('Must specify form id')
if not file_url and not file_name:
raise ValueError('Must specify either one, file_url or file_name')
if file_url:
header, records = self.read_file(file_url=file_url)
elif file_name:
header, records = self.read_file(file_name=file_name)
header = self.remove_splecial_characters_list(header)
return header, records
def upload_file(self, file_url='', file_name='', form_id=None, equivalcens_map={}):
header, records = self.get_file_to_upload(file_url=file_url, file_name=file_name, form_id=form_id, equivalcens_map=equivalcens_map)
pos_field_id = self.get_pos_field_id_dict(header, form_id, equivalcens_map)
records_to_upload = self.prepare_record_list(pos_field_id, form_id, records, header)
error_list = self.create_record(records_to_upload)
return error_list
def print_help(self):
print '---------------- HELP --------------------------'
print 'more arguments needed'
print 'the script should be run like this'
print '''python upload_excel_file.py '{"file_name":"/tmp/personal.xlsx", "form_id":"1234", "equivalcens_map":{"foo":"bar"}}' '''
print '* form_id: where 1234 is the id of the form, is a requierd argument'
print '** file_name: file in you local machine'
print '** file_url: file on a remote url'
print 'if running from console you shoud send the settings json a second argument'
print 'running from console example'
print ''''python upload_excel_file.py '{"file_name":"/tmp/personal.xlsx", "form_id":"1234", "equivalcens_map":{"foo":"bar"}} '{"USERNAME": "mike"}' '''
if __name__ == "__main__":
if len(argv) > 1:
config = simplejson.loads(argv[1])
if argv[1] == 'help' or argv[1] == '--help':
LoadFile(settings).print_help()
elif not config.has_key('form_id'):
LoadFile(settings).print_help()
elif not config.has_key('file_name') and not config.has_key('file_url'):
LoadFile(settings).print_help()
else:
try:
if argv[2]:
settings.config.update(simplejson.loads(argv[2]))
except IndexError:
import settings
load_files = LoadFile(settings)
load_files.upload_file(config.get('file_url'), config.get('file_name'),
config.get('form_id'), config.get('equivalcens_map'))
else:
LoadFile(settings).print_help()
| linkaform/linkaform_api | linkaform_api/upload_file.py | Python | gpl-3.0 | 12,588 |
from hpp.corbaserver.rbprm.rbprmbuilder import Builder
from hpp.gepetto import Viewer
from hpp.corbaserver import Client
from hpp.corbaserver.robot import Robot as Parent
from hpp.corbaserver.rbprm.problem_solver import ProblemSolver
import omniORB.any
class Robot (Parent):
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
# URDF file describing the trunk of the robot HyQ
urdfName = 'hrp2_trunk_flexible'
urdfSuffix = ""
srdfSuffix = ""
def __init__ (self, robotName, load = True):
Parent.__init__ (self, robotName, self.rootJointType, load)
self.tf_root = "base_footprint"
self.client.basic = Client ()
self.load = load
rootJointType = 'freeflyer'
packageName = 'hpp-rbprm-corba'
meshPackageName = 'hpp-rbprm-corba'
urdfName = 'hrp2_trunk_flexible'
urdfNameRom = ['hrp2_larm_rom','hrp2_rarm_rom','hrp2_lleg_rom','hrp2_rleg_rom']
urdfSuffix = ""
srdfSuffix = ""
vMax = 4;
aMax = 6;
extraDof = 6
# Creating an instance of the helper class, and loading the robot
rbprmBuilder = Builder ()
rbprmBuilder.loadModel(urdfName, urdfNameRom, rootJointType, meshPackageName, packageName, urdfSuffix, srdfSuffix)
#rbprmBuilder.setJointBounds ("base_joint_xyz", [-1.25,2, -0.5, 5.5, 0.6, 1.8])
rbprmBuilder.setJointBounds ("base_joint_xyz", [-2,4, 0, 2, 0.2, 1.8])
rbprmBuilder.setJointBounds('CHEST_JOINT0',[0,0])
rbprmBuilder.setJointBounds('CHEST_JOINT1',[-0.35,0.1])
rbprmBuilder.setJointBounds('HEAD_JOINT0',[0,0])
rbprmBuilder.setJointBounds('HEAD_JOINT1',[0,0])
# The following lines set constraint on the valid configurations:
# a configuration is valid only if all limbs can create a contact ...
rbprmBuilder.setFilter(['hrp2_lleg_rom','hrp2_rleg_rom'])
rbprmBuilder.setAffordanceFilter('hrp2_lleg_rom', ['Support',])
rbprmBuilder.setAffordanceFilter('hrp2_rleg_rom', ['Support'])
# We also bound the rotations of the torso. (z, y, x)
rbprmBuilder.boundSO3([-0.1,0.1,-0.65,0.65,-0.2,0.2])
rbprmBuilder.client.basic.robot.setDimensionExtraConfigSpace(extraDof)
rbprmBuilder.client.basic.robot.setExtraConfigSpaceBounds([-4,4,-1,1,-2,2,0,0,0,0,0,0])
indexECS = rbprmBuilder.getConfigSize() - rbprmBuilder.client.basic.robot.getDimensionExtraConfigSpace()
# Creating an instance of HPP problem solver and the viewer
ps = ProblemSolver( rbprmBuilder )
ps.client.problem.setParameter("aMax",omniORB.any.to_any(aMax))
ps.client.problem.setParameter("vMax",omniORB.any.to_any(vMax))
ps.client.problem.setParameter("sizeFootX",omniORB.any.to_any(0.24))
ps.client.problem.setParameter("sizeFootY",omniORB.any.to_any(0.14))
r = Viewer (ps)
from hpp.corbaserver.affordance.affordance import AffordanceTool
afftool = AffordanceTool ()
afftool.setAffordanceConfig('Support', [0.5, 0.03, 0.00005])
afftool.loadObstacleModel (packageName, "downSlope", "planning", r)
#r.loadObstacleModel (packageName, "ground", "planning")
afftool.visualiseAffordances('Support', r, [0.25, 0.5, 0.5])
r.addLandmark(r.sceneName,1)
# Setting initial and goal configurations
q_init = rbprmBuilder.getCurrentConfig ();
q_init[3:7] = [1,0,0,0]
q_init[8] = -0.2
q_init [0:3] = [-1.6, 1, 1.75]; r (q_init)
#q_init[3:7] = [0.7071,0,0,0.7071]
#q_init [0:3] = [1, 1, 0.65]
rbprmBuilder.setCurrentConfig (q_init)
q_goal = q_init [::]
q_goal[3:7] = [1,0,0,0]
q_goal[8] = 0
q_goal [0:3] = [3, 1, 0.55]; r (q_goal)
r (q_goal)
#~ q_goal [0:3] = [-1.5, 0, 0.63]; r (q_goal)
# Choosing a path optimizer
ps.setInitialConfig (q_init)
ps.addGoalConfig (q_goal)
# Choosing RBPRM shooter and path validation methods.
ps.client.problem.selectConFigurationShooter("RbprmShooter")
ps.client.problem.selectPathValidation("RbprmPathValidation",0.05)
# Choosing kinodynamic methods :
ps.selectSteeringMethod("RBPRMKinodynamic")
ps.selectDistance("KinodynamicDistance")
ps.selectPathPlanner("DynamicPlanner")
#solve the problem :
r(q_init)
#r.solveAndDisplay("rm",1,0.01)
#ps.solve()
#ps.client.problem.prepareSolveStepByStep()
#ps.client.problem.finishSolveStepByStep()
q_far = q_init[::]
q_far[2] = -3
r(q_far)
"""
camera = [0.6293167471885681,
-9.560577392578125,
10.504343032836914,
0.9323806762695312,
0.36073973774909973,
0.008668755181133747,
0.02139890193939209]
r.client.gui.setCameraTransform(0,camera)
"""
"""
r.client.gui.removeFromGroup("rm",r.sceneName)
r.client.gui.removeFromGroup("rmstart",r.sceneName)
r.client.gui.removeFromGroup("rmgoal",r.sceneName)
for i in range(0,ps.numberNodes()):
r.client.gui.removeFromGroup("vecRM"+str(i),r.sceneName)
"""
# for seed 1486657707
#ps.client.problem.extractPath(0,0,2.15)
# Playing the computed path
from hpp.gepetto import PathPlayer
pp = PathPlayer (rbprmBuilder.client.basic, r)
pp.dt=0.03
pp.displayVelocityPath(1)
r.client.gui.setVisibility("path_1_root","ALWAYS_ON_TOP")
#display path
pp.speed=0.3
#pp (0)
#display path with post-optimisation
"""
q_far = q_init[::]
q_far[2] = -3
r(q_far)
"""
| pFernbach/hpp-rbprm-corba | script/scenarios/sandbox/dynamic/downSlope_hrp2_loadPath.py | Python | lgpl-3.0 | 4,915 |
# Licensed to the Apache Software Foundation (ASF) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The ASF licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import unittest
import os
from cairis.mio.ModelImport import importModelFile, importLocationsFile
import cairis.core.BorgFactory
from cairis.core.Borg import Borg
import cairis.core.MisuseCaseFactory
__author__ = 'Shamal Faily'
class MisuseCaseFactoryTests(unittest.TestCase):
def setUp(self):
cairis.core.BorgFactory.initialise()
importModelFile(os.environ['CAIRIS_SRC'] + '/../examples/exemplars/NeuroGrid/NeuroGrid.xml',1,'test')
def testBuildMisuseCase(self):
mc = cairis.core.MisuseCaseFactory.build('Social Engineering','Certificate ubiquity')
mcEnv = mc.theEnvironmentProperties[0]
self.assertEqual(mcEnv.theEnvironmentName,'Psychosis')
self.assertEqual(mcEnv.theLikelihood,'Occasional')
self.assertEqual(mcEnv.theSeverity,'Critical')
self.assertEqual(mcEnv.theRiskRating.rating,'Undesirable')
self.assertEqual(mcEnv.theObjective,'Exploit vulnerabilities in User certificate to threaten User certificate,Client workstation.')
| nathanbjenx/cairis | cairis/test/test_MisuseCaseFactory.py | Python | apache-2.0 | 1,777 |
# -*- coding: utf-8 -*-
import itertools
import os
import random
import string
from django.test import RequestFactory
from django.conf import settings
from django.contrib.auth.models import Group, Permission
from django.contrib.auth import get_user_model
from django.contrib.messages.storage.fallback import FallbackStorage
from django.contrib.contenttypes.models import ContentType
from django.contrib.sessions.middleware import SessionMiddleware
from django.utils import timezone
from django.core.files.uploadedfile import SimpleUploadedFile
from requests import Response
from profile.models import Profile
from resume.models import (
WorkExperience,
WorkExperienceTranslation,
)
LETTERS = string.ascii_letters + string.digits
class Factory():
"""A factory class used to create the models objects for tests"""
request_factory = RequestFactory()
default_password = 'test'
def __init__(self):
self.counter = itertools.count(start=1)
def make_dates(self, start_date, end_date, delta):
"""Generate the date list
from djanog.utils import timezone
from datetime import timedelta
start_date = timezone.now() - timedelta(days=10)
end_date = timezone.now()
"""
current_date = start_date
while current_date < end_date:
yield current_date
current_date += delta
def make_unique_int(self):
return self.counter.next()
def make_unique_string(self, prefix='string-', length=6):
unique_string = ''.join(random.choice(LETTERS) for i in range(6))
return prefix + unique_string
def get_test_file_path(self, file_name):
return os.path.join(
os.path.dirname(__file__), 'test_data', file_name)
def make_email(self, username=None, domain='example.com'):
if username is None:
username = self.make_unique_string()
assert '@' not in domain
return '{}@{}'.format(username, domain)
def make_upload_image(self, file_name=None, test_file_name='eg_128x128.png'):
with open(self.get_test_file_path(test_file_name), 'rb') as f:
image_data = f.read()
if file_name is None:
filename = test_file_name
return SimpleUploadedFile(filename, image_data)
def make_request(self, path='/', method='get', user=None, session=False,
message=False, **kwargs):
request = getattr(self.request_factory, method.lower())(path, **kwargs)
if user is not None:
request.user = user
if session:
middleware = SessionMiddleware()
middleware.process_request(request)
request.session.save()
if message:
request._messages = FallbackStorage(request)
return request
def make_response(self, content='', status_code=200):
response = Response()
response.status_code = status_code
response._content = content
return response
def make_permission(self, perm_name, model):
assert model is None
if perm_name is None:
perm_name = self.make_unique_string('perm-')
ct = ContentType.objects.get_for_model(model)
permission, _ = Permission.objecs.get_or_create(
codename=perm_name, default={'content-type': ct}
)
return permission
def make_group(self, model, name=None, permissions=None):
if name is None:
name = self.make_unique_string(prefix='group-')
if permissions is None:
permissions = []
group = Group.objects.create(name=name)
for permission in permissions:
if isinstance(permission, (str, bytes)):
permission = self.make_permission(permission, model)
group.permission.add(permission)
return group
def make_user(self, username=None, password=None, email=None, is_admin=False,
permissions=None, groups=None, full_name='', mobile='',
gender=None, birthday=None, country='', city='',
is_public=True, avatar=None, avatar_upload_name=None):
if email is None:
username = self.make_unique_string(prefix='email-')
email = self.make_email(username)
if password is None:
password = self.default_password
if full_name == '':
full_name = self.make_unique_string(prefix='name-')
if gender is None:
gender = 'U'
if birthday is None:
birthday = timezone.now().date()
if permissions is None:
permissions = []
if groups is None:
groups = []
users = get_user_model().objects.filter(
email=email, password=password)
if users.exists():
return users.first()
user = get_user_model().objects.create_user(
email=email, password=password)
user.full_name = full_name
if is_admin:
user.is_staff = True
user.is_superuser = True
user.save()
for permission in permissions:
if isinstance(permission, (str, bytes)):
permission = Permission.objects.get(codename=permission)
user.user_permissions.add(permission)
for group in groups:
if isinstance(group, (str, bytes)):
group = Group.objects.get(name=group)
user.groups.add(group)
profile = Profile.objects.filter(user=user).update(
gender=gender, birthday=birthday, avatar=avatar,
avatar_upload_name=avatar_upload_name, country=country,
city=city, is_public=is_public)
assert profile == 1, \
'Only one profile should associate to user, Got %s' % profile
user.profile.refresh_from_db()
return user
def make_work_experience(self, user=None, is_public=False, date_start=None,
date_end=None):
user = self.make_user() if user is None else user
date_start = timezone.now().date() if date_start is None else date_start
date_end = timezone.now().date() if date_end is None else date_end
model = WorkExperience.objects.create(
user=user, is_public=is_public,
date_start=date_start, date_end=date_end)
return model
def make_work_experience_translation(
self, related_model=None, is_public=False, user=None,
language=None, position=None, company=None, location=None,
date_start=None, date_end=None, contribution=None, keywords=None):
languages = settings.LANGUAGES
date_start = timezone.now().date() if date_start is None else date_start
date_end = timezone.now().date() if date_end is None else date_end
if related_model is None:
user = self.make_user() if user is None else user
related_model = self.make_work_experience(
user=user, is_public=is_public,
date_start=date_start, date_end=date_end)
if language is None or \
language not in [x for x, _ in languages]:
language = languages[0][0]
position = self.make_unique_string('position-') \
if position is None else position
company = self.make_unique_string('company-') \
if company is None else company
location = self.make_unique_string('location-') \
if location is None else location
contribution = self.make_unique_string(length=20) \
if contribution is None else ''
keywords = self.make_unique_string() if keywords is None else ''
translation = WorkExperienceTranslation.objects.create(
related_model=related_model, language=language,
position=position, company=company, location=location,
contribution=contribution, keywords=keywords)
return translation
def make_multi_work_experience_translations(
self, user=None, work_experience=None, number=len(settings.LANGUAGES)):
languages = settings.LANGUAGES
if work_experience is None:
user = self.make_user() if user is None else user
work_experience = self.make_work_experience(user=user)
translation_list = []
if number > len(languages):
print('translation number cannot be greater than languages number, '
'use the language max number instead!')
number = len(languages)
for i in range(number):
translation = self.make_work_experience_translation(
related_model=work_experience, language=languages[i][0])
translation_list.append(translation)
return translation_list
| memodir/cv | src/testings/factory.py | Python | apache-2.0 | 8,832 |
# Copyright 2007 Matt Chaput. All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY MATT CHAPUT ``AS IS'' AND ANY EXPRESS OR
# IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF
# MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO
# EVENT SHALL MATT CHAPUT OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT,
# INCIDENTAL, SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA,
# OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE,
# EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# The views and conclusions contained in the software and documentation are
# those of the authors and should not be interpreted as representing official
# policies, either expressed or implied, of Matt Chaput.
"""
This module contains objects that query the search index. These query
objects are composable to form complex query trees.
"""
from __future__ import division
import copy
import fnmatch
import re
from array import array
from whoosh.analysis import Token
from whoosh.compat import u, text_type
from whoosh.lang.morph_en import variations
from whoosh.matching import (AndMaybeMatcher, DisjunctionMaxMatcher,
ListMatcher, IntersectionMatcher, InverseMatcher,
NullMatcher, RequireMatcher, UnionMatcher,
WrappingMatcher, AndNotMatcher, NullMatcherClass)
from whoosh.reading import TermNotFound
from whoosh.support.times import datetime_to_long
from whoosh.util import make_binary_tree, make_weighted_tree, methodcaller
# Exceptions
class QueryError(Exception):
"""Error encountered while running a query.
"""
pass
# Functions
def error_query(msg, q=None):
"""Returns the query in the second argument (or a :class:`NullQuery` if the
second argument is not given) with its ``error`` attribute set to
``msg``.
"""
if q is None:
q = _NullQuery()
q.error = msg
return q
def token_lists(q, phrases=True):
"""Returns the terms in the query tree, with the query hierarchy
represented as nested lists.
"""
if q.is_leaf():
if phrases or not isinstance(q, Phrase):
return list(q.tokens())
else:
ls = []
for qq in q.children():
t = token_lists(qq, phrases=phrases)
if len(t) == 1:
t = t[0]
if t:
ls.append(t)
return ls
# Utility classes
class Lowest(object):
"""A value that is always compares lower than any other object except
itself.
"""
def __cmp__(self, other):
if other.__class__ is Lowest:
return 0
return -1
def __eq__(self, other):
return self.__class__ is type(other)
def __lt__(self, other):
return type(other) is not self.__class__
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
Lowest = Lowest()
class Highest(object):
"""A value that is always compares higher than any other object except
itself.
"""
def __cmp__(self, other):
if other.__class__ is Highest:
return 0
return 1
def __eq__(self, other):
return self.__class__ is type(other)
def __lt__(self, other):
return type(other) is self.__class__
def __ne__(self, other):
return not self.__eq__(other)
def __gt__(self, other):
return not (self.__lt__(other) or self.__eq__(other))
def __le__(self, other):
return self.__eq__(other) or self.__lt__(other)
def __ge__(self, other):
return self.__eq__(other) or self.__gt__(other)
Highest = Highest()
# Base classes
class Query(object):
"""Abstract base class for all queries.
Note that this base class implements __or__, __and__, and __sub__ to allow
slightly more convenient composition of query objects::
>>> Term("content", u"a") | Term("content", u"b")
Or([Term("content", u"a"), Term("content", u"b")])
>>> Term("content", u"a") & Term("content", u"b")
And([Term("content", u"a"), Term("content", u"b")])
>>> Term("content", u"a") - Term("content", u"b")
And([Term("content", u"a"), Not(Term("content", u"b"))])
"""
# For queries produced by the query parser, record where in the user
# query this object originated
startchar = endchar = None
# For queries produced by the query parser, records an error that resulted
# in this query
error = None
def __or__(self, query):
"""Allows you to use | between query objects to wrap them in an Or
query.
"""
return Or([self, query]).normalize()
def __and__(self, query):
"""Allows you to use & between query objects to wrap them in an And
query.
"""
return And([self, query]).normalize()
def __sub__(self, query):
"""Allows you to use - between query objects to add the right-hand
query as a "NOT" query.
"""
return And([self, Not(query)]).normalize()
def __hash__(self):
raise NotImplementedError
def __ne__(self, other):
return not self.__eq__(other)
def is_leaf(self):
"""Returns True if this is a leaf node in the query tree, or False if
this query has sub-queries.
"""
return True
def children(self):
"""Returns an iterator of the subqueries of this object.
"""
return iter([])
def is_range(self):
"""Returns True if this object searches for values within a range.
"""
return False
def has_terms(self):
"""Returns True if this specific object represents a search for a
specific term (as opposed to a pattern, as in Wildcard and Prefix) or
terms (i.e., whether the ``replace()`` method does something
meaningful on this instance).
"""
return False
def apply(self, fn):
"""If this query has children, calls the given function on each child
and returns a new copy of this node with the new children returned by
the function. If this is a leaf node, simply returns this object.
This is useful for writing functions that transform a query tree. For
example, this function changes all Term objects in a query tree into
Variations objects::
def term2var(q):
if isinstance(q, Term):
return Variations(q.fieldname, q.text)
else:
return q.apply(term2var)
q = And([Term("f", "alfa"),
Or([Term("f", "bravo"),
Not(Term("f", "charlie"))])])
q = term2var(q)
Note that this method does not automatically create copies of nodes.
To avoid modifying the original tree, your function should call the
:meth:`Query.copy` method on nodes before changing their attributes.
"""
return self
def accept(self, fn):
"""Applies the given function to this query's subqueries (if any) and
then to this query itself::
def boost_phrases(q):
if isintance(q, Phrase):
q.boost *= 2.0
return q
myquery = myquery.accept(boost_phrases)
This method automatically creates copies of the nodes in the original
tree before passing them to your function, so your function can change
attributes on nodes without altering the original tree.
This method is less flexible than using :meth:`Query.apply` (in fact
it's implemented using that method) but is often more straightforward.
"""
def fn_wrapper(q):
q = q.apply(fn_wrapper)
return fn(q)
return fn_wrapper(self)
def replace(self, fieldname, oldtext, newtext):
"""Returns a copy of this query with oldtext replaced by newtext (if
oldtext was anywhere in this query).
Note that this returns a *new* query with the given text replaced. It
*does not* modify the original query "in place".
"""
# The default implementation uses the apply method to "pass down" the
# replace() method call
if self.is_leaf():
return copy.copy(self)
else:
return self.apply(methodcaller("replace", fieldname, oldtext,
newtext))
def copy(self):
"""Deprecated, just use ``copy.deepcopy``.
"""
return copy.deepcopy(self)
def all_terms(self, termset=None, phrases=True):
"""Returns a set of all terms in this query tree.
This method exists for backwards compatibility. For more flexibility
use the :meth:`Query.iter_all_terms` method instead, which simply
yields the terms in the query.
:param phrases: Whether to add words found in Phrase queries.
:rtype: set
"""
if not termset:
termset = set()
for q in self.leaves():
if q.has_terms():
if phrases or not isinstance(q, Phrase):
termset.update(q.terms())
return termset
def _existing_terms_helper(self, ixreader, termset, reverse):
if termset is None:
termset = set()
if reverse:
test = lambda t: t not in ixreader
else:
test = lambda t: t in ixreader
return termset, test
def existing_terms(self, ixreader, termset=None, reverse=False,
phrases=True, expand=False):
"""Returns a set of all terms in this query tree that exist in the
given ixreaderder.
This method exists for backwards compatibility. For more flexibility
use the :meth:`Query.iter_all_terms` method instead, which simply
yields the terms in the query.
:param ixreader: A :class:`whoosh.reading.IndexReader` object.
:param reverse: If True, this method adds *missing* terms rather than
*existing* terms to the set.
:param phrases: Whether to add words found in Phrase queries.
:param expand: If True, queries that match multiple terms
(such as :class:`Wildcard` and :class:`Prefix`) will return all
matching expansions.
:rtype: set
"""
# By default, this method calls all_terms() and then filters based on
# the contents of the reader. Subclasses that need to use the reader to
# generate the terms (i.e. MultiTerm) need to override this
# implementation
termset, test = self._existing_terms_helper(ixreader, termset, reverse)
if self.is_leaf():
gen = self.all_terms(phrases=phrases)
termset.update(t for t in gen if test(t))
else:
for q in self.children():
q.existing_terms(ixreader, termset, reverse, phrases, expand)
return termset
def leaves(self):
"""Returns an iterator of all the leaf queries in this query tree as a
flat series.
"""
if self.is_leaf():
yield self
else:
for q in self.children():
for qq in q.leaves():
yield qq
def iter_all_terms(self):
"""Returns an iterator of ("fieldname", "text") pairs for all terms in
this query tree.
>>> qp = qparser.QueryParser("text", myindex.schema)
>>> q = myparser.parse("alfa bravo title:charlie")
>>> # List the terms in a query
>>> list(q.iter_all_terms())
[("text", "alfa"), ("text", "bravo"), ("title", "charlie")]
>>> # Get a set of all terms in the query that don't exist in the index
>>> r = myindex.reader()
>>> missing = set(t for t in q.iter_all_terms() if t not in r)
set([("text", "alfa"), ("title", "charlie")])
>>> # All terms in the query that occur in fewer than 5 documents in
>>> # the index
>>> [t for t in q.iter_all_terms() if r.doc_frequency(t[0], t[1]) < 5]
[("title", "charlie")]
"""
for q in self.leaves():
if q.has_terms():
for t in q.terms():
yield t
def all_tokens(self, boost=1.0):
"""Returns an iterator of :class:`analysis.Token` objects corresponding
to all terms in this query tree. The Token objects will have the
``fieldname``, ``text``, and ``boost`` attributes set. If the query
was built by the query parser, they Token objects will also have
``startchar`` and ``endchar`` attributes indexing into the original
user query.
"""
if self.is_leaf():
for token in self.tokens(boost):
yield token
else:
boost *= self.boost if hasattr(self, "boost") else 1.0
for child in self.children():
for token in child.all_tokens(boost):
yield token
def terms(self):
"""Yields zero or more ("fieldname", "text") pairs searched for by this
query object. You can check whether a query object targets specific
terms before you call this method using :meth:`Query.has_terms`.
To get all terms in a query tree, use :meth:`Query.iter_all_terms`.
"""
for token in self.tokens():
yield (token.fieldname, token.text)
def tokens(self, boost=1.0):
"""Yields zero or more :class:`analysis.Token` objects corresponding to
the terms searched for by this query object. You can check whether a
query object targets specific terms before you call this method using
:meth:`Query.has_terms`.
The Token objects will have the ``fieldname``, ``text``, and ``boost``
attributes set. If the query was built by the query parser, they Token
objects will also have ``startchar`` and ``endchar`` attributes
indexing into the original user query.
To get all tokens for a query tree, use :meth:`Query.all_tokens`.
"""
return []
def requires(self):
"""Returns a set of queries that are *known* to be required to match
for the entire query to match. Note that other queries might also turn
out to be required but not be determinable by examining the static
query.
>>> a = Term("f", u"a")
>>> b = Term("f", u"b")
>>> And([a, b]).requires()
set([Term("f", u"a"), Term("f", u"b")])
>>> Or([a, b]).requires()
set([])
>>> AndMaybe(a, b).requires()
set([Term("f", u"a")])
>>> a.requires()
set([Term("f", u"a")])
"""
# Subclasses should implement the _add_required_to(qset) method
return set([self])
def field(self):
"""Returns the field this query matches in, or None if this query does
not match in a single field.
"""
return self.fieldname
def with_boost(self, boost):
"""Returns a COPY of this query with the boost set to the given value.
If a query type does not accept a boost itself, it will try to pass the
boost on to its children, if any.
"""
q = self.copy()
q.boost = boost
return q
def estimate_size(self, ixreader):
"""Returns an estimate of how many documents this query could
potentially match (for example, the estimated size of a simple term
query is the document frequency of the term). It is permissible to
overestimate, but not to underestimate.
"""
raise NotImplementedError
def estimate_min_size(self, ixreader):
"""Returns an estimate of the minimum number of documents this query
could potentially match.
"""
return self.estimate_size(ixreader)
def matcher(self, searcher):
"""Returns a :class:`~whoosh.matching.Matcher` object you can use to
retrieve documents and scores matching this query.
:rtype: :class:`whoosh.matching.Matcher`
"""
raise NotImplementedError
def docs(self, searcher):
"""Returns an iterator of docnums matching this query.
>>> searcher = my_index.searcher()
>>> list(my_query.docs(searcher))
[10, 34, 78, 103]
:param searcher: A :class:`whoosh.searching.Searcher` object.
"""
try:
return self.matcher(searcher).all_ids()
except TermNotFound:
return iter([])
def normalize(self):
"""Returns a recursively "normalized" form of this query. The
normalized form removes redundancy and empty queries. This is called
automatically on query trees created by the query parser, but you may
want to call it yourself if you're writing your own parser or building
your own queries.
>>> q = And([And([Term("f", u"a"),
... Term("f", u"b")]),
... Term("f", u"c"), Or([])])
>>> q.normalize()
And([Term("f", u"a"), Term("f", u"b"), Term("f", u"c")])
Note that this returns a *new, normalized* query. It *does not* modify
the original query "in place".
"""
return self
def simplify(self, ixreader):
"""Returns a recursively simplified form of this query, where
"second-order" queries (such as Prefix and Variations) are re-written
into lower-level queries (such as Term and Or).
"""
return self
class WrappingQuery(Query):
def __init__(self, child):
self.child = child
def __repr__(self):
return "%s(%r)" % (self.__class__.__name__, self.child)
def __hash__(self):
return hash(self.__class__.__name__) ^ hash(self.child)
def _rewrap(self, child):
return self.__class__(child)
def is_leaf(self):
return False
def children(self):
yield self.child
def apply(self, fn):
return self._rewrap(fn(self.child))
def requires(self):
return self.child.requires()
def field(self):
return self.child.field()
def with_boost(self, boost):
return self._rewrap(self.child.with_boost(boost))
def estimate_size(self, ixreader):
return self.child.estimate_size(ixreader)
def estimate_min_size(self, ixreader):
return self.child.estimate_min_size(ixreader)
def matcher(self, searcher):
return self.child.matcher(searcher)
class CompoundQuery(Query):
"""Abstract base class for queries that combine or manipulate the results
of multiple sub-queries .
"""
def __init__(self, subqueries, boost=1.0):
self.subqueries = subqueries
self.boost = boost
def __repr__(self):
r = "%s(%r" % (self.__class__.__name__, self.subqueries)
if hasattr(self, "boost") and self.boost != 1:
r += ", boost=%s" % self.boost
r += ")"
return r
def __unicode__(self):
r = u("(")
r += (self.JOINT).join([text_type(s) for s in self.subqueries])
r += u(")")
return r
__str__ = __unicode__
def __eq__(self, other):
return other and self.__class__ is other.__class__ and\
self.subqueries == other.subqueries and\
self.boost == other.boost
def __getitem__(self, i):
return self.subqueries.__getitem__(i)
def __len__(self):
return len(self.subqueries)
def __iter__(self):
return iter(self.subqueries)
def __hash__(self):
h = hash(self.__class__.__name__) ^ hash(self.boost)
for q in self.subqueries:
h ^= hash(q)
return h
def is_leaf(self):
return False
def children(self):
return iter(self.subqueries)
def apply(self, fn):
return self.__class__([fn(q) for q in self.subqueries],
boost=self.boost)
def field(self):
if self.subqueries:
f = self.subqueries[0].field()
if all(q.field() == f for q in self.subqueries[1:]):
return f
def estimate_size(self, ixreader):
return sum(q.estimate_size(ixreader) for q in self.subqueries)
def estimate_min_size(self, ixreader):
subs, nots = self._split_queries()
subs_min = min(q.estimate_min_size(ixreader) for q in subs)
if nots:
nots_sum = sum(q.estimate_size(ixreader) for q in nots)
subs_min = max(0, subs_min - nots_sum)
return subs_min
def normalize(self):
# Normalize subqueries and merge nested instances of this class
subqueries = []
for s in self.subqueries:
s = s.normalize()
if isinstance(s, self.__class__):
subqueries += [ss.with_boost(ss.boost * s.boost) for ss in s]
else:
subqueries.append(s)
# If every subquery is Null, this query is Null
if all(q is NullQuery for q in subqueries):
return NullQuery
# If there's an unfielded Every inside, then this query is Every
if any((isinstance(q, Every) and q.fieldname is None)
for q in subqueries):
return Every()
# Merge ranges and Everys
everyfields = set()
i = 0
while i < len(subqueries):
q = subqueries[i]
f = q.field()
if f in everyfields:
subqueries.pop(i)
continue
if isinstance(q, (TermRange, NumericRange)):
j = i + 1
while j < len(subqueries):
if q.overlaps(subqueries[j]):
qq = subqueries.pop(j)
q = q.merge(qq, intersect=self.intersect_merge)
else:
j += 1
q = subqueries[i] = q.normalize()
if isinstance(q, Every):
everyfields.add(q.fieldname)
i += 1
# Eliminate duplicate queries
subqs = []
seenqs = set()
for s in subqueries:
if (not isinstance(s, Every) and s.field() in everyfields):
continue
if s in seenqs:
continue
seenqs.add(s)
subqs.append(s)
# Remove NullQuerys
subqs = [q for q in subqs if q is not NullQuery]
if not subqs:
return NullQuery
if len(subqs) == 1:
sub = subqs[0]
if not (self.boost == 1.0 and sub.boost == 1.0):
sub = sub.with_boost(sub.boost * self.boost)
return sub
return self.__class__(subqs, boost=self.boost)
def _split_queries(self):
subs = [q for q in self.subqueries if not isinstance(q, Not)]
nots = [q.query for q in self.subqueries if isinstance(q, Not)]
return (subs, nots)
def simplify(self, ixreader):
subs, nots = self._split_queries()
if subs:
subs = self.__class__([subq.simplify(ixreader) for subq in subs],
boost=self.boost).normalize()
if nots:
nots = Or(nots).simplify().normalize()
return AndNot(subs, nots)
else:
return subs
else:
return NullQuery
def _matcher(self, matchercls, q_weight_fn, searcher, **kwargs):
# q_weight_fn is a function which is called on each query and returns a
# "weight" value which is used to build a huffman-like matcher tree. If
# q_weight_fn is None, an order-preserving binary tree is used instead.
# Pull any queries inside a Not() out into their own list
subs, nots = self._split_queries()
if not subs:
return NullMatcher()
# Create a matcher from the list of subqueries
if len(subs) == 1:
m = subs[0].matcher(searcher)
elif q_weight_fn is None:
subms = [q.matcher(searcher) for q in subs]
m = make_binary_tree(matchercls, subms)
else:
subms = [(q_weight_fn(q), q.matcher(searcher)) for q in subs]
m = make_weighted_tree(matchercls, subms)
# If there were queries inside Not(), make a matcher for them and
# wrap the matchers in an AndNotMatcher
if nots:
if len(nots) == 1:
notm = nots[0].matcher(searcher)
else:
r = searcher.reader()
notms = [(q.estimate_size(r), q.matcher(searcher))
for q in nots]
notm = make_weighted_tree(UnionMatcher, notms)
if notm.is_active():
m = AndNotMatcher(m, notm)
# If this query had a boost, add a wrapping matcher to apply the boost
if self.boost != 1.0:
m = WrappingMatcher(m, self.boost)
return m
class MultiTerm(Query):
"""Abstract base class for queries that operate on multiple terms in the
same field.
"""
TOO_MANY_CLAUSES = 1024
constantscore = False
def _words(self, ixreader):
raise NotImplementedError
def simplify(self, ixreader):
existing = [Term(self.fieldname, word, boost=self.boost)
for word in sorted(set(self._words(ixreader)))]
if len(existing) == 1:
return existing[0]
elif existing:
return Or(existing)
else:
return NullQuery
def estimate_size(self, ixreader):
return sum(ixreader.doc_frequency(self.fieldname, text)
for text in self._words(ixreader))
def estimate_min_size(self, ixreader):
return min(ixreader.doc_frequency(self.fieldname, text)
for text in self._words(ixreader))
def existing_terms(self, ixreader, termset=None, reverse=False,
phrases=True, expand=False):
termset, test = self._existing_terms_helper(ixreader, termset, reverse)
if not expand:
return termset
fieldname = self.field()
if fieldname is None:
return termset
for word in self._words(ixreader):
term = (fieldname, word)
if test(term):
termset.add(term)
return termset
def matcher(self, searcher):
fieldname = self.fieldname
reader = searcher.reader()
qs = [Term(fieldname, word) for word in self._words(reader)]
if not qs:
return NullMatcher()
if len(qs) == 1:
# If there's only one term, just use it
q = qs[0]
elif self.constantscore or len(qs) > self.TOO_MANY_CLAUSES:
# If there's so many clauses that an Or search would take forever,
# trade memory for time and just put all the matching docs in a set
# and serve it up as a ListMatcher
docset = set()
for q in qs:
docset.update(q.matcher(searcher).all_ids())
return ListMatcher(sorted(docset), all_weights=self.boost)
else:
# The default case: Or the terms together
q = Or(qs)
return q.matcher(searcher)
# Concrete classes
class Term(Query):
"""Matches documents containing the given term (fieldname+text pair).
>>> Term("content", u"render")
"""
__inittypes__ = dict(fieldname=str, text=text_type, boost=float)
def __init__(self, fieldname, text, boost=1.0):
self.fieldname = fieldname
self.text = text
self.boost = boost
def __eq__(self, other):
return (other
and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.text == other.text
and self.boost == other.boost)
def __repr__(self):
r = "%s(%r, %r" % (self.__class__.__name__, self.fieldname, self.text)
if self.boost != 1.0:
r += ", boost=%s" % self.boost
r += ")"
return r
def __unicode__(self):
t = u("%s:%s") % (self.fieldname, self.text)
if self.boost != 1:
t += u("^") + text_type(self.boost)
return t
__str__ = __unicode__
def __hash__(self):
return hash(self.fieldname) ^ hash(self.text) ^ hash(self.boost)
def has_terms(self):
return True
def tokens(self, boost=1.0):
yield Token(fieldname=self.fieldname, text=self.text,
boost=boost * self.boost, startchar=self.startchar,
endchar=self.endchar, chars=True)
def replace(self, fieldname, oldtext, newtext):
q = copy.copy(self)
if q.fieldname == fieldname and q.text == oldtext:
q.text = newtext
return q
def estimate_size(self, ixreader):
return ixreader.doc_frequency(self.fieldname, self.text)
def matcher(self, searcher):
if (self.fieldname, self.text) in searcher.reader():
m = searcher.postings(self.fieldname, self.text)
if self.boost != 1.0:
m = WrappingMatcher(m, boost=self.boost)
return m
else:
return NullMatcher()
class And(CompoundQuery):
"""Matches documents that match ALL of the subqueries.
>>> And([Term("content", u"render"),
... Term("content", u"shade"),
... Not(Term("content", u"texture"))])
>>> # You can also do this
>>> Term("content", u"render") & Term("content", u"shade")
"""
# This is used by the superclass's __unicode__ method.
JOINT = " AND "
intersect_merge = True
def requires(self):
s = set()
for q in self.subqueries:
s |= q.requires()
return s
def estimate_size(self, ixreader):
return min(q.estimate_size(ixreader) for q in self.subqueries)
def matcher(self, searcher):
r = searcher.reader()
return self._matcher(IntersectionMatcher,
lambda q: 0 - q.estimate_size(r), searcher)
class Or(CompoundQuery):
"""Matches documents that match ANY of the subqueries.
>>> Or([Term("content", u"render"),
... And([Term("content", u"shade"), Term("content", u"texture")]),
... Not(Term("content", u"network"))])
>>> # You can also do this
>>> Term("content", u"render") | Term("content", u"shade")
"""
# This is used by the superclass's __unicode__ method.
JOINT = " OR "
intersect_merge = False
matcher_class = UnionMatcher
def __init__(self, subqueries, boost=1.0, minmatch=0):
CompoundQuery.__init__(self, subqueries, boost=boost)
self.minmatch = minmatch
def __unicode__(self):
r = u("(")
r += (self.JOINT).join([text_type(s) for s in self.subqueries])
r += u(")")
if self.minmatch:
r += u(">%s") % self.minmatch
return r
__str__ = __unicode__
def normalize(self):
norm = CompoundQuery.normalize(self)
if norm.__class__ is self.__class__:
norm.minmatch = self.minmatch
return norm
def requires(self):
if len(self.subqueries) == 1:
return self.subqueries[0].requires()
else:
return set()
def matcher(self, searcher):
r = searcher.reader()
return self._matcher(self.matcher_class, lambda q: q.estimate_size(r),
searcher)
class DisjunctionMax(CompoundQuery):
"""Matches all documents that match any of the subqueries, but scores each
document using the maximum score from the subqueries.
"""
def __init__(self, subqueries, boost=1.0, tiebreak=0.0):
CompoundQuery.__init__(self, subqueries, boost=boost)
self.tiebreak = tiebreak
def __unicode__(self):
r = u("DisMax(")
r += " ".join([text_type(s) for s in self.subqueries])
r += u(")")
if self.tiebreak:
s += u("~") + text_type(self.tiebreak)
return r
__str__ = __unicode__
def normalize(self):
norm = CompoundQuery.normalize(self)
if norm.__class__ is self.__class__:
norm.tiebreak = self.tiebreak
return norm
def requires(self):
if len(self.subqueries) == 1:
return self.subqueries[0].requires()
else:
return set()
def matcher(self, searcher):
r = searcher.reader()
return self._matcher(DisjunctionMaxMatcher,
lambda q: q.estimate_size(r), searcher,
tiebreak=self.tiebreak)
class Not(Query):
"""Excludes any documents that match the subquery.
>>> # Match documents that contain 'render' but not 'texture'
>>> And([Term("content", u"render"),
... Not(Term("content", u"texture"))])
>>> # You can also do this
>>> Term("content", u"render") - Term("content", u"texture")
"""
__inittypes__ = dict(query=Query)
def __init__(self, query, boost=1.0):
"""
:param query: A :class:`Query` object. The results of this query
are *excluded* from the parent query.
:param boost: Boost is meaningless for excluded documents but this
keyword argument is accepted for the sake of a consistent
interface.
"""
self.query = query
self.boost = boost
def __eq__(self, other):
return other and self.__class__ is other.__class__ and\
self.query == other.query
def __repr__(self):
return "%s(%s)" % (self.__class__.__name__, repr(self.query))
def __unicode__(self):
return u("NOT ") + text_type(self.query)
__str__ = __unicode__
def __hash__(self):
return (hash(self.__class__.__name__)
^ hash(self.query)
^ hash(self.boost))
def is_leaf(self):
return False
def children(self):
yield self.query
def apply(self, fn):
return self.__class__(fn(self.query))
def normalize(self):
query = self.query.normalize()
if query is NullQuery:
return NullQuery
else:
return self.__class__(query, boost=self.boost)
def field(self):
return None
def estimate_size(self, ixreader):
return ixreader.doc_count()
def estimate_min_size(self, ixreader):
return 1 if ixreader.doc_count() else 0
def matcher(self, searcher):
# Usually only called if Not is the root query. Otherwise, queries such
# as And and Or do special handling of Not subqueries.
reader = searcher.reader()
child = self.query.matcher(searcher)
return InverseMatcher(child, searcher.doc_count_all(),
missing=reader.is_deleted)
class PatternQuery(MultiTerm):
"""An intermediate base class for common methods of Prefix and Wildcard.
"""
__inittypes__ = dict(fieldname=str, text=text_type, boost=float)
def __init__(self, fieldname, text, boost=1.0, constantscore=True):
self.fieldname = fieldname
self.text = text
self.boost = boost
self.constantscore = constantscore
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.text == other.text and self.boost == other.boost
and self.constantscore == other.constantscore)
def __repr__(self):
r = "%s(%r, %r" % (self.__class__.__name__, self.fieldname, self.text)
if self.boost != 1:
r += ", boost=%s" % self.boost
r += ")"
return r
def __hash__(self):
return (hash(self.fieldname) ^ hash(self.text) ^ hash(self.boost)
^ hash(self.constantscore))
def _get_pattern(self):
raise NotImplementedError
def _find_prefix(self, text):
# Subclasses/instances should set the SPECIAL_CHARS attribute to a set
# of characters that mark the end of the literal prefix
specialchars = self.SPECIAL_CHARS
for i, char in enumerate(self.text):
if char in specialchars:
break
return self.text[:i]
def _words(self, ixreader):
exp = re.compile(self._get_pattern())
prefix = self._find_prefix(self.text)
if prefix:
candidates = ixreader.expand_prefix(self.fieldname, prefix)
else:
candidates = ixreader.lexicon(self.fieldname)
for text in candidates:
if exp.match(text):
yield text
class Prefix(PatternQuery):
"""Matches documents that contain any terms that start with the given text.
>>> # Match documents containing words starting with 'comp'
>>> Prefix("content", u"comp")
"""
def __unicode__(self):
return "%s:%s*" % (self.fieldname, self.text)
__str__ = __unicode__
def _words(self, ixreader):
return ixreader.expand_prefix(self.fieldname, self.text)
class Wildcard(PatternQuery):
"""Matches documents that contain any terms that match a "glob" pattern.
See the Python ``fnmatch`` module for information about globs.
>>> Wildcard("content", u"in*f?x")
"""
SPECIAL_CHARS = frozenset("*?")
def __unicode__(self):
return "%s:%s" % (self.fieldname, self.text)
__str__ = __unicode__
def _get_pattern(self):
return fnmatch.translate(self.text)
def normalize(self):
# If there are no wildcard characters in this "wildcard", turn it into
# a simple Term
text = self.text
if text == "*":
return Every(self.fieldname, boost=self.boost)
if "*" not in text and "?" not in text:
# If no wildcard chars, convert to a normal term.
return Term(self.fieldname, self.text, boost=self.boost)
elif ("?" not in text and text.endswith("*")
and text.find("*") == len(text) - 1):
# If the only wildcard char is an asterisk at the end, convert to a
# Prefix query.
return Prefix(self.fieldname, self.text[:-1], boost=self.boost)
else:
return self
# _words() implemented in PatternQuery
class Regex(PatternQuery):
"""Matches documents that contain any terms that match a regular
expression. See the Python ``re`` module for information about regular
expressions.
"""
SPECIAL_CHARS = frozenset("{}()[].?+^$\\")
def __unicode__(self):
return '%s:r"%s"' % (self.fieldname, self.text)
__str__ = __unicode__
def _get_pattern(self):
return self.text
def _find_prefix(self, text):
if "|" in text:
return ""
if text.startswith("^"):
text = text[1:]
elif text.startswith("\\A"):
text = text[2:]
return PatternQuery._find_prefix(self, text)
# _words() implemented in PatternQuery
class ExpandingTerm(MultiTerm):
"""Intermediate base class for queries such as FuzzyTerm and Variations
that expand into multiple queries, but come from a single term.
"""
def has_terms(self):
return True
def tokens(self, boost=1.0):
yield Token(fieldname=self.fieldname, text=self.text,
boost=boost * self.boost, startchar=self.startchar,
endchar=self.endchar, chars=True)
class FuzzyTerm(ExpandingTerm):
"""Matches documents containing words similar to the given term.
"""
__inittypes__ = dict(fieldname=str, text=text_type, boost=float,
maxdist=float, prefixlength=int)
def __init__(self, fieldname, text, boost=1.0, maxdist=1,
prefixlength=1, constantscore=True):
"""
:param fieldname: The name of the field to search.
:param text: The text to search for.
:param boost: A boost factor to apply to scores of documents matching
this query.
:param maxdist: The maximum edit distance from the given text.
:param prefixlength: The matched terms must share this many initial
characters with 'text'. For example, if text is "light" and
prefixlength is 2, then only terms starting with "li" are checked
for similarity.
"""
self.fieldname = fieldname
self.text = text
self.boost = boost
self.maxdist = maxdist
self.prefixlength = prefixlength
self.constantscore = constantscore
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.text == other.text
and self.maxdist == other.maxdist
and self.prefixlength == other.prefixlength
and self.boost == other.boost
and self.constantscore == other.constantscore)
def __repr__(self):
r = "%s(%r, %r, boost=%f, maxdist=%d, prefixlength=%d)"
return r % (self.__class__.__name__, self.fieldname, self.text,
self.boost, self.maxdist, self.prefixlength)
def __unicode__(self):
r = self.text + u("~")
if self.maxdist > 1:
r += u("%d") % self.maxdist
if self.boost != 1.0:
r += u("^%f") % self.boost
return r
__str__ = __unicode__
def __hash__(self):
return (hash(self.fieldname) ^ hash(self.text) ^ hash(self.boost)
^ hash(self.maxdist) ^ hash(self.prefixlength)
^ hash(self.constantscore))
def _words(self, ixreader):
return ixreader.terms_within(self.fieldname, self.text, self.maxdist,
prefix=self.prefixlength)
class Variations(ExpandingTerm):
"""Query that automatically searches for morphological variations of the
given word in the same field.
"""
def __init__(self, fieldname, text, boost=1.0):
self.fieldname = fieldname
self.text = text
self.boost = boost
def __repr__(self):
r = "%s(%r, %r" % (self.__class__.__name__, self.fieldname, self.text)
if self.boost != 1:
r += ", boost=%s" % self.boost
r += ")"
return r
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.text == other.text and self.boost == other.boost)
def __hash__(self):
return hash(self.fieldname) ^ hash(self.text) ^ hash(self.boost)
def _words(self, ixreader):
fieldname = self.fieldname
return [word for word in variations(self.text)
if (fieldname, word) in ixreader]
def __unicode__(self):
return u("%s:<%s>") % (self.fieldname, self.text)
__str__ = __unicode__
def replace(self, fieldname, oldtext, newtext):
q = copy.copy(self)
if q.fieldname == fieldname and q.text == oldtext:
q.text = newtext
return q
class RangeMixin(object):
# Contains methods shared by TermRange and NumericRange
def __repr__(self):
return ('%s(%r, %r, %r, %s, %s, boost=%s, constantscore=%s)'
% (self.__class__.__name__, self.fieldname, self.start,
self.end, self.startexcl, self.endexcl, self.boost,
self.constantscore))
def __unicode__(self):
startchar = "{" if self.startexcl else "["
endchar = "}" if self.endexcl else "]"
start = '' if self.start is None else self.start
end = '' if self.end is None else self.end
return u("%s:%s%s TO %s%s") % (self.fieldname, startchar, start, end,
endchar)
__str__ = __unicode__
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.start == other.start and self.end == other.end
and self.startexcl == other.startexcl
and self.endexcl == other.endexcl
and self.boost == other.boost
and self.constantscore == other.constantscore)
def __hash__(self):
return (hash(self.fieldname) ^ hash(self.start) ^ hash(self.startexcl)
^ hash(self.end) ^ hash(self.endexcl) ^ hash(self.boost))
def is_range(self):
return True
def _comparable_start(self):
if self.start is None:
return (Lowest, 0)
else:
second = 1 if self.startexcl else 0
return (self.start, second)
def _comparable_end(self):
if self.end is None:
return (Highest, 0)
else:
second = -1 if self.endexcl else 0
return (self.end, second)
def overlaps(self, other):
if not isinstance(other, TermRange):
return False
if self.fieldname != other.fieldname:
return False
start1 = self._comparable_start()
start2 = other._comparable_start()
end1 = self._comparable_end()
end2 = other._comparable_end()
return ((start1 >= start2 and start1 <= end2)
or (end1 >= start2 and end1 <= end2)
or (start2 >= start1 and start2 <= end1)
or (end2 >= start1 and end2 <= end1))
def merge(self, other, intersect=True):
assert self.fieldname == other.fieldname
start1 = self._comparable_start()
start2 = other._comparable_start()
end1 = self._comparable_end()
end2 = other._comparable_end()
if start1 >= start2 and end1 <= end2:
start = start2
end = end2
elif start2 >= start1 and end2 <= end1:
start = start1
end = end1
elif intersect:
start = max(start1, start2)
end = min(end1, end2)
else:
start = min(start1, start2)
end = max(end1, end2)
startval = None if start[0] is Lowest else start[0]
startexcl = start[1] == 1
endval = None if end[0] is Highest else end[0]
endexcl = end[1] == -1
boost = max(self.boost, other.boost)
constantscore = self.constantscore or other.constantscore
return self.__class__(self.fieldname, startval, endval, startexcl,
endexcl, boost=boost,
constantscore=constantscore)
class TermRange(RangeMixin, MultiTerm):
"""Matches documents containing any terms in a given range.
>>> # Match documents where the indexed "id" field is greater than or equal
>>> # to 'apple' and less than or equal to 'pear'.
>>> TermRange("id", u"apple", u"pear")
"""
def __init__(self, fieldname, start, end, startexcl=False, endexcl=False,
boost=1.0, constantscore=True):
"""
:param fieldname: The name of the field to search.
:param start: Match terms equal to or greater than this.
:param end: Match terms equal to or less than this.
:param startexcl: If True, the range start is exclusive. If False, the
range start is inclusive.
:param endexcl: If True, the range end is exclusive. If False, the
range end is inclusive.
:param boost: Boost factor that should be applied to the raw score of
results matched by this query.
"""
self.fieldname = fieldname
self.start = start
self.end = end
self.startexcl = startexcl
self.endexcl = endexcl
self.boost = boost
self.constantscore = constantscore
def normalize(self):
if self.start in ('', None) and self.end in (u('\uffff'), None):
return Every(self.fieldname, boost=self.boost)
elif self.start == self.end:
if self.startexcl or self.endexcl:
return NullQuery
return Term(self.fieldname, self.start, boost=self.boost)
else:
return TermRange(self.fieldname, self.start, self.end,
self.startexcl, self.endexcl,
boost=self.boost)
#def replace(self, fieldname, oldtext, newtext):
# q = self.copy()
# if q.fieldname == fieldname:
# if q.start == oldtext:
# q.start = newtext
# if q.end == oldtext:
# q.end = newtext
# return q
def _words(self, ixreader):
fieldname = self.fieldname
start = '' if self.start is None else self.start
end = u('\uFFFF') if self.end is None else self.end
startexcl = self.startexcl
endexcl = self.endexcl
for fname, t in ixreader.terms_from(fieldname, start):
if fname != fieldname:
break
if t == start and startexcl:
continue
if t == end and endexcl:
break
if t > end:
break
yield t
class NumericRange(RangeMixin, Query):
"""A range query for NUMERIC fields. Takes advantage of tiered indexing
to speed up large ranges by matching at a high resolution at the edges of
the range and a low resolution in the middle.
>>> # Match numbers from 10 to 5925 in the "number" field.
>>> nr = NumericRange("number", 10, 5925)
"""
def __init__(self, fieldname, start, end, startexcl=False, endexcl=False,
boost=1.0, constantscore=True):
"""
:param fieldname: The name of the field to search.
:param start: Match terms equal to or greater than this number. This
should be a number type, not a string.
:param end: Match terms equal to or less than this number. This should
be a number type, not a string.
:param startexcl: If True, the range start is exclusive. If False, the
range start is inclusive.
:param endexcl: If True, the range end is exclusive. If False, the
range end is inclusive.
:param boost: Boost factor that should be applied to the raw score of
results matched by this query.
:param constantscore: If True, the compiled query returns a constant
score (the value of the ``boost`` keyword argument) instead of
actually scoring the matched terms. This gives a nice speed boost
and won't affect the results in most cases since numeric ranges
will almost always be used as a filter.
"""
self.fieldname = fieldname
self.start = start
self.end = end
self.startexcl = startexcl
self.endexcl = endexcl
self.boost = boost
self.constantscore = constantscore
def simplify(self, ixreader):
return self._compile_query(ixreader).simplify(ixreader)
def estimate_size(self, ixreader):
return self._compile_query(ixreader).estimate_size(ixreader)
def estimate_min_size(self, ixreader):
return self._compile_query(ixreader).estimate_min_size(ixreader)
def docs(self, searcher):
q = self._compile_query(searcher.reader())
return q.docs(searcher)
def _compile_query(self, ixreader):
from whoosh.fields import NUMERIC
from whoosh.support.numeric import tiered_ranges
field = ixreader.schema[self.fieldname]
if not isinstance(field, NUMERIC):
raise Exception("NumericRange: field %r is not numeric"
% self.fieldname)
start = field.prepare_number(self.start)
end = field.prepare_number(self.end)
subqueries = []
# Get the term ranges for the different resolutions
for starttext, endtext in tiered_ranges(field.type, field.signed,
start, end, field.shift_step,
self.startexcl, self.endexcl):
if starttext == endtext:
subq = Term(self.fieldname, starttext)
else:
subq = TermRange(self.fieldname, starttext, endtext)
subqueries.append(subq)
if len(subqueries) == 1:
q = subqueries[0]
elif subqueries:
q = Or(subqueries, boost=self.boost)
else:
return NullQuery
if self.constantscore:
q = ConstantScoreQuery(q, self.boost)
return q
def matcher(self, searcher):
q = self._compile_query(searcher.reader())
return q.matcher(searcher)
class DateRange(NumericRange):
"""This is a very thin subclass of :class:`NumericRange` that only
overrides the initializer and ``__repr__()`` methods to work with datetime
objects instead of numbers. Internally this object converts the datetime
objects it's created with to numbers and otherwise acts like a
``NumericRange`` query.
>>> DateRange("date", datetime(2010, 11, 3, 3, 0),
... datetime(2010, 11, 3, 17, 59))
"""
def __init__(self, fieldname, start, end, startexcl=False, endexcl=False,
boost=1.0, constantscore=True):
self.startdate = start
self.enddate = end
if start:
start = datetime_to_long(start)
if end:
end = datetime_to_long(end)
super(DateRange, self).__init__(fieldname, start, end,
startexcl=startexcl, endexcl=endexcl,
boost=boost,
constantscore=constantscore)
def __repr__(self):
return '%s(%r, %r, %r, %s, %s, boost=%s)' % (self.__class__.__name__,
self.fieldname,
self.startdate, self.enddate,
self.startexcl, self.endexcl,
self.boost)
class Phrase(Query):
"""Matches documents containing a given phrase."""
def __init__(self, fieldname, words, slop=1, boost=1.0, char_ranges=None):
"""
:param fieldname: the field to search.
:param words: a list of words (unicode strings) in the phrase.
:param slop: the number of words allowed between each "word" in the
phrase; the default of 1 means the phrase must match exactly.
:param boost: a boost factor that to apply to the raw score of
documents matched by this query.
:param char_ranges: if a Phrase object is created by the query parser,
it will set this attribute to a list of (startchar, endchar) pairs
corresponding to the words in the phrase
"""
self.fieldname = fieldname
self.words = words
self.slop = slop
self.boost = boost
self.char_ranges = char_ranges
def __eq__(self, other):
return (other and self.__class__ is other.__class__ and
self.fieldname == other.fieldname and self.words == other.words
and self.slop == other.slop and self.boost == other.boost)
def __repr__(self):
return "%s(%r, %r, slop=%s, boost=%f)" % (self.__class__.__name__,
self.fieldname, self.words,
self.slop, self.boost)
def __unicode__(self):
return u('%s:"%s"') % (self.fieldname, u(" ").join(self.words))
__str__ = __unicode__
def __hash__(self):
h = hash(self.fieldname) ^ hash(self.slop) ^ hash(self.boost)
for w in self.words:
h ^= hash(w)
return h
def has_terms(self):
return True
def tokens(self, boost=1.0):
char_ranges = self.char_ranges
startchar = endchar = None
for i, word in enumerate(self.words):
if char_ranges:
startchar, endchar = char_ranges[i]
yield Token(fieldname=self.fieldname, text=word,
boost=boost * self.boost, startchar=startchar,
endchar=endchar, chars=True)
def normalize(self):
if not self.words:
return NullQuery
if len(self.words) == 1:
return Term(self.fieldname, self.words[0])
words = [w for w in self.words if w is not None]
return self.__class__(self.fieldname, words, slop=self.slop,
boost=self.boost, char_ranges=self.char_ranges)
def replace(self, fieldname, oldtext, newtext):
q = copy.copy(self)
if q.fieldname == fieldname:
for i, word in enumerate(q.words):
if word == oldtext:
q.words[i] = newtext
return q
def _and_query(self):
return And([Term(self.fieldname, word) for word in self.words])
def estimate_size(self, ixreader):
return self._and_query().estimate_size(ixreader)
def estimate_min_size(self, ixreader):
return self._and_query().estimate_min_size(ixreader)
def matcher(self, searcher):
fieldname = self.fieldname
reader = searcher.reader()
# Shortcut the query if one of the words doesn't exist.
for word in self.words:
if (fieldname, word) not in reader:
return NullMatcher()
field = searcher.schema[fieldname]
if not field.format or not field.format.supports("positions"):
raise QueryError("Phrase search: %r field has no positions"
% self.fieldname)
# Construct a tree of SpanNear queries representing the words in the
# phrase and return its matcher
from whoosh.spans import SpanNear
q = SpanNear.phrase(fieldname, self.words, slop=self.slop)
m = q.matcher(searcher)
if self.boost != 1.0:
m = WrappingMatcher(m, boost=self.boost)
return m
class Ordered(And):
"""Matches documents containing a list of sub-queries in the given order.
"""
JOINT = " BEFORE "
def matcher(self, searcher):
from whoosh.spans import SpanBefore
return self._matcher(SpanBefore._Matcher, None, searcher)
class Every(Query):
"""A query that matches every document containing any term in a given
field. If you don't specify a field, the query matches every document.
>>> # Match any documents with something in the "path" field
>>> q = Every("path")
>>> # Matcher every document
>>> q = Every()
The unfielded form (matching every document) is efficient.
The fielded is more efficient than a prefix query with an empty prefix or a
'*' wildcard, but it can still be very slow on large indexes. It requires
the searcher to read the full posting list of every term in the given
field.
Instead of using this query it is much more efficient when you create the
index to include a single term that appears in all documents that have the
field you want to match.
For example, instead of this::
# Match all documents that have something in the "path" field
q = Every("path")
Do this when indexing::
# Add an extra field that indicates whether a document has a path
schema = fields.Schema(path=fields.ID, has_path=fields.ID)
# When indexing, set the "has_path" field based on whether the document
# has anything in the "path" field
writer.add_document(text=text_value1)
writer.add_document(text=text_value2, path=path_value2, has_path="t")
Then to find all documents with a path::
q = Term("has_path", "t")
"""
def __init__(self, fieldname=None, boost=1.0):
"""
:param fieldname: the name of the field to match, or ``None`` or ``*``
to match all documents.
"""
if not fieldname or fieldname == "*":
fieldname = None
self.fieldname = fieldname
self.boost = boost
def __repr__(self):
return "%s(%r, boost=%s)" % (self.__class__.__name__, self.fieldname,
self.boost)
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.fieldname == other.fieldname
and self.boost == other.boost)
def __unicode__(self):
return u("%s:*") % self.fieldname
__str__ = __unicode__
def __hash__(self):
return hash(self.fieldname)
def estimate_size(self, ixreader):
return ixreader.doc_count()
def matcher(self, searcher):
fieldname = self.fieldname
reader = searcher.reader()
if fieldname in (None, "", "*"):
# This takes into account deletions
doclist = list(reader.all_doc_ids())
elif (reader.supports_caches()
and reader.fieldcache_available(fieldname)):
# If the reader has a field cache, use it to quickly get the list
# of documents that have a value for this field
fc = reader.fieldcache(self.fieldname)
doclist = [docnum for docnum, ord in fc.ords() if ord != 0]
else:
# This is a hacky hack, but just create an in-memory set of all the
# document numbers of every term in the field. This is SLOOOW for
# large indexes
doclist = set()
for text in searcher.lexicon(fieldname):
pr = searcher.postings(fieldname, text)
doclist.update(pr.all_ids())
doclist = sorted(doclist)
return ListMatcher(doclist, all_weights=self.boost)
class _NullQuery(Query):
"Represents a query that won't match anything."
boost = 1.0
def __call__(self):
return self
def __repr__(self):
return "<%s>" % (self.__class__.__name__,)
def __eq__(self, other):
return isinstance(other, _NullQuery)
def __ne__(self, other):
return not self.__eq__(other)
def __hash__(self):
return id(self)
def __copy__(self):
return self
def __deepcopy__(self, memo):
return self
def field(self):
return None
def estimate_size(self, ixreader):
return 0
def normalize(self):
return self
def simplify(self, ixreader):
return self
def docs(self, searcher):
return []
def matcher(self, searcher):
return NullMatcher()
NullQuery = _NullQuery()
class ConstantScoreQuery(WrappingQuery):
"""Wraps a query and uses a matcher that always gives a constant score
to all matching documents. This is a useful optimization when you don't
care about scores from a certain branch of the query tree because it is
simply acting as a filter. See also the :class:`AndMaybe` query.
"""
def __init__(self, child, score=1.0):
super(ConstantScoreQuery, self).__init__(child)
self.score = score
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.child == other.child and self.score == other.score)
def __hash__(self):
return hash(self.child) ^ hash(self.score)
def _rewrap(self, child):
return self.__class__(child, self.score)
def matcher(self, searcher):
m = self.child.matcher(searcher)
if isinstance(m, NullMatcherClass):
return m
else:
ids = array("I", m.all_ids())
return ListMatcher(ids, all_weights=self.score, term=m.term())
class BinaryQuery(CompoundQuery):
"""Base class for binary queries (queries which are composed of two
sub-queries). Subclasses should set the ``matcherclass`` attribute or
override ``matcher()``, and may also need to override ``normalize()``,
``estimate_size()``, and/or ``estimate_min_size()``.
"""
boost = 1.0
def __init__(self, a, b):
self.a = a
self.b = b
self.subqueries = (a, b)
def __eq__(self, other):
return (other and self.__class__ is other.__class__
and self.a == other.a and self.b == other.b)
def __hash__(self):
return (hash(self.__class__.__name__) ^ hash(self.a) ^ hash(self.b))
def apply(self, fn):
return self.__class__(fn(self.a), fn(self.b))
def field(self):
f = self.a.field()
if self.b.field() == f:
return f
def with_boost(self, boost):
return self.__class__(self.a.with_boost(boost),
self.b.with_boost(boost))
def normalize(self):
a = self.a.normalize()
b = self.b.normalize()
if a is NullQuery and b is NullQuery:
return NullQuery
elif a is NullQuery:
return b
elif b is NullQuery:
return a
return self.__class__(a, b)
def matcher(self, searcher):
return self.matcherclass(self.a.matcher(searcher),
self.b.matcher(searcher))
class Require(BinaryQuery):
"""Binary query returns results from the first query that also appear in
the second query, but only uses the scores from the first query. This lets
you filter results without affecting scores.
"""
JOINT = " REQUIRE "
matcherclass = RequireMatcher
def requires(self):
return self.a.requires() | self.b.requires()
def estimate_size(self, ixreader):
return self.b.estimate_size(ixreader)
def estimate_min_size(self, ixreader):
return self.b.estimate_min_size(ixreader)
def with_boost(self, boost):
return self.__class__(self.a.with_boost(boost), self.b)
def normalize(self):
a = self.a.normalize()
b = self.b.normalize()
if a is NullQuery or b is NullQuery:
return NullQuery
return self.__class__(a, b)
def docs(self, searcher):
return And(self.subqueries).docs(searcher)
class AndMaybe(BinaryQuery):
"""Binary query takes results from the first query. If and only if the
same document also appears in the results from the second query, the score
from the second query will be added to the score from the first query.
"""
JOINT = " ANDMAYBE "
matcherclass = AndMaybeMatcher
def normalize(self):
a = self.a.normalize()
b = self.b.normalize()
if a is NullQuery:
return NullQuery
if b is NullQuery:
return a
return self.__class__(a, b)
def requires(self):
return self.a.requires()
def estimate_min_size(self, ixreader):
return self.subqueries[0].estimate_min_size(ixreader)
def docs(self, searcher):
return self.subqueries[0].docs(searcher)
class AndNot(BinaryQuery):
"""Binary boolean query of the form 'a ANDNOT b', where documents that
match b are removed from the matches for a.
"""
JOINT = " ANDNOT "
matcherclass = AndNotMatcher
def with_boost(self, boost):
return self.__class__(self.a.with_boost(boost), self.b)
def normalize(self):
a = self.a.normalize()
b = self.b.normalize()
if a is NullQuery:
return NullQuery
elif b is NullQuery:
return a
return self.__class__(a, b)
def requires(self):
return self.a.requires()
class Otherwise(BinaryQuery):
"""A binary query that only matches the second clause if the first clause
doesn't match any documents.
"""
JOINT = " OTHERWISE "
def matcher(self, searcher):
m = self.a.matcher(searcher)
if not m.is_active():
m = self.b.matcher(searcher)
return m
def BooleanQuery(required, should, prohibited):
return AndNot(AndMaybe(And(required), Or(should)),
Or(prohibited)).normalize()
| cscott/wikiserver | whoosh/query.py | Python | gpl-2.0 | 69,633 |
# Copyright (C) 2020 KMEE Informática LTDA
# License AGPL-3 - See http://www.gnu.org/licenses/agpl-3.0.html
SIT_MANIF_PENDENTE = ('pendente', 'Pendente')
SIT_MANIF_CIENTE = ('ciente', 'Ciente da Operação')
SIT_MANIF_CONFIRMADO = ("confirmado", 'Confirmada operação')
SIT_MANIF_DESCONHECIDO = ("desconhecido", "Desconhecimento")
SIT_MANIF_NAO_REALIZADO = ("nao_realizado", 'Não realizado')
SITUACAO_MANIFESTACAO = [
SIT_MANIF_PENDENTE,
SIT_MANIF_CIENTE,
SIT_MANIF_CONFIRMADO,
SIT_MANIF_DESCONHECIDO,
SIT_MANIF_NAO_REALIZADO
]
SIT_NFE_AUTORIZADA = ("1", "Autorizada")
SIT_NFE_CANCELADA = ("2", "Cancelada")
SIT_NFE_DENEGADA = ("3", "Denegada")
SITUACAO_NFE = [
SIT_NFE_AUTORIZADA,
SIT_NFE_CANCELADA,
SIT_NFE_DENEGADA
]
OP_TYPE_ENTRADA = ("0", "Entrada")
OP_TYPE_SAIDA = ("1", "Saída")
OPERATION_TYPE = [
OP_TYPE_ENTRADA,
OP_TYPE_SAIDA
]
| kmee/l10n-brazil | l10n_br_fiscal/constants/mdfe.py | Python | agpl-3.0 | 889 |
#!/usr/bin/env python
import errno
import os
import platform
import sys
BASE_URL = os.getenv('LIBCHROMIUMCONTENT_MIRROR') or \
'https://s3.amazonaws.com/brave-laptop-binaries/libchromiumcontent'
LIBCHROMIUMCONTENT_COMMIT = os.getenv('LIBCHROMIUMCONTENT_COMMIT') or \
'd715734c03b0c892ea66695ae63fc0db9c3fc027'
PLATFORM = {
'cygwin': 'win32',
'darwin': 'darwin',
'linux2': 'linux',
'win32': 'win32',
}[sys.platform]
verbose_mode = False
def get_platform_key():
if os.environ.has_key('MAS_BUILD'):
return 'mas'
else:
return PLATFORM
def get_target_arch():
try:
target_arch_path = os.path.join(__file__, '..', '..', '..', 'vendor',
'brightray', 'vendor', 'download',
'libchromiumcontent', '.target_arch')
with open(os.path.normpath(target_arch_path)) as f:
return f.read().strip()
except IOError as e:
if e.errno != errno.ENOENT:
raise
return 'x64'
def get_chromedriver_version():
return 'v2.21'
def get_env_var(name):
value = os.environ.get('ELECTRON_' + name, '')
if not value:
# TODO Remove ATOM_SHELL_* fallback values
value = os.environ.get('ATOM_SHELL_' + name, '')
if value:
print 'Warning: Use $ELECTRON_' + name + ' instead of $ATOM_SHELL_' + name
return value
def s3_config():
config = (get_env_var('S3_BUCKET'),
get_env_var('S3_ACCESS_KEY'),
get_env_var('S3_SECRET_KEY'))
message = ('Error: Please set the $ELECTRON_S3_BUCKET, '
'$ELECTRON_S3_ACCESS_KEY, and '
'$ELECTRON_S3_SECRET_KEY environment variables')
assert all(len(c) for c in config), message
return config
def enable_verbose_mode():
print 'Running in verbose mode'
global verbose_mode
verbose_mode = True
def is_verbose_mode():
return verbose_mode
def get_zip_name(name, version, suffix=''):
arch = get_target_arch()
if arch == 'arm':
arch += 'v7l'
zip_name = '{0}-{1}-{2}-{3}'.format(name, version, get_platform_key(), arch)
if suffix:
zip_name += '-' + suffix
return zip_name + '.zip'
| posix4e/electron | script/lib/config.py | Python | mit | 2,118 |
# coding: utf-8
import filecmp
import numbers
import re
from pathlib import Path
import numpy as np
import pytest
from scipy import sparse
from sklearn.datasets import dump_svmlight_file, load_svmlight_file
from sklearn.model_selection import train_test_split
import lightgbm as lgb
from lightgbm.compat import PANDAS_INSTALLED, pd_DataFrame, pd_Series
from .utils import load_breast_cancer
def test_basic(tmp_path):
X_train, X_test, y_train, y_test = train_test_split(*load_breast_cancer(return_X_y=True),
test_size=0.1, random_state=2)
feature_names = [f"Column_{i}" for i in range(X_train.shape[1])]
feature_names[1] = "a" * 1000 # set one name to a value longer than default buffer size
train_data = lgb.Dataset(X_train, label=y_train, feature_name=feature_names)
valid_data = train_data.create_valid(X_test, label=y_test)
params = {
"objective": "binary",
"metric": "auc",
"min_data": 10,
"num_leaves": 15,
"verbose": -1,
"num_threads": 1,
"max_bin": 255,
"gpu_use_dp": True
}
bst = lgb.Booster(params, train_data)
bst.add_valid(valid_data, "valid_1")
for i in range(20):
bst.update()
if i % 10 == 0:
print(bst.eval_train(), bst.eval_valid())
assert train_data.get_feature_name() == feature_names
assert bst.current_iteration() == 20
assert bst.num_trees() == 20
assert bst.num_model_per_iteration() == 1
assert bst.lower_bound() == pytest.approx(-2.9040190126976606)
assert bst.upper_bound() == pytest.approx(3.3182142872462883)
tname = tmp_path / "svm_light.dat"
model_file = tmp_path / "model.txt"
bst.save_model(model_file)
pred_from_matr = bst.predict(X_test)
with open(tname, "w+b") as f:
dump_svmlight_file(X_test, y_test, f)
pred_from_file = bst.predict(tname)
np.testing.assert_allclose(pred_from_matr, pred_from_file)
# check saved model persistence
bst = lgb.Booster(params, model_file=model_file)
assert bst.feature_name() == feature_names
pred_from_model_file = bst.predict(X_test)
# we need to check the consistency of model file here, so test for exact equal
np.testing.assert_array_equal(pred_from_matr, pred_from_model_file)
# check early stopping is working. Make it stop very early, so the scores should be very close to zero
pred_parameter = {"pred_early_stop": True, "pred_early_stop_freq": 5, "pred_early_stop_margin": 1.5}
pred_early_stopping = bst.predict(X_test, **pred_parameter)
# scores likely to be different, but prediction should still be the same
np.testing.assert_array_equal(np.sign(pred_from_matr), np.sign(pred_early_stopping))
# test that shape is checked during prediction
bad_X_test = X_test[:, 1:]
bad_shape_error_msg = "The number of features in data*"
np.testing.assert_raises_regex(lgb.basic.LightGBMError, bad_shape_error_msg,
bst.predict, bad_X_test)
np.testing.assert_raises_regex(lgb.basic.LightGBMError, bad_shape_error_msg,
bst.predict, sparse.csr_matrix(bad_X_test))
np.testing.assert_raises_regex(lgb.basic.LightGBMError, bad_shape_error_msg,
bst.predict, sparse.csc_matrix(bad_X_test))
with open(tname, "w+b") as f:
dump_svmlight_file(bad_X_test, y_test, f)
np.testing.assert_raises_regex(lgb.basic.LightGBMError, bad_shape_error_msg,
bst.predict, tname)
with open(tname, "w+b") as f:
dump_svmlight_file(X_test, y_test, f, zero_based=False)
np.testing.assert_raises_regex(lgb.basic.LightGBMError, bad_shape_error_msg,
bst.predict, tname)
class NumpySequence(lgb.Sequence):
def __init__(self, ndarray, batch_size):
self.ndarray = ndarray
self.batch_size = batch_size
def __getitem__(self, idx):
# The simple implementation is just a single "return self.ndarray[idx]"
# The following is for demo and testing purpose.
if isinstance(idx, numbers.Integral):
return self.ndarray[idx]
elif isinstance(idx, slice):
if not (idx.step is None or idx.step == 1):
raise NotImplementedError("No need to implement, caller will not set step by now")
return self.ndarray[idx.start:idx.stop]
elif isinstance(idx, list):
return self.ndarray[idx]
else:
raise TypeError(f"Sequence Index must be an integer/list/slice, got {type(idx).__name__}")
def __len__(self):
return len(self.ndarray)
def _create_sequence_from_ndarray(data, num_seq, batch_size):
if num_seq == 1:
return NumpySequence(data, batch_size)
nrow = data.shape[0]
seqs = []
seq_size = nrow // num_seq
for start in range(0, nrow, seq_size):
end = min(start + seq_size, nrow)
seq = NumpySequence(data[start:end], batch_size)
seqs.append(seq)
return seqs
@pytest.mark.parametrize('sample_count', [11, 100, None])
@pytest.mark.parametrize('batch_size', [3, None])
@pytest.mark.parametrize('include_0_and_nan', [False, True])
@pytest.mark.parametrize('num_seq', [1, 3])
def test_sequence(tmpdir, sample_count, batch_size, include_0_and_nan, num_seq):
params = {'bin_construct_sample_cnt': sample_count}
nrow = 50
half_nrow = nrow // 2
ncol = 11
data = np.arange(nrow * ncol, dtype=np.float64).reshape((nrow, ncol))
if include_0_and_nan:
# whole col
data[:, 0] = 0
data[:, 1] = np.nan
# half col
data[:half_nrow, 3] = 0
data[:half_nrow, 2] = np.nan
data[half_nrow:-2, 4] = 0
data[:half_nrow, 4] = np.nan
X = data[:, :-1]
Y = data[:, -1]
npy_bin_fname = tmpdir / 'data_from_npy.bin'
seq_bin_fname = tmpdir / 'data_from_seq.bin'
# Create dataset from numpy array directly.
ds = lgb.Dataset(X, label=Y, params=params)
ds.save_binary(npy_bin_fname)
# Create dataset using Sequence.
seqs = _create_sequence_from_ndarray(X, num_seq, batch_size)
seq_ds = lgb.Dataset(seqs, label=Y, params=params)
seq_ds.save_binary(seq_bin_fname)
assert filecmp.cmp(npy_bin_fname, seq_bin_fname)
# Test for validation set.
# Select some random rows as valid data.
rng = np.random.default_rng() # Pass integer to set seed when needed.
valid_idx = (rng.random(10) * nrow).astype(np.int32)
valid_data = data[valid_idx, :]
valid_X = valid_data[:, :-1]
valid_Y = valid_data[:, -1]
valid_npy_bin_fname = tmpdir / 'valid_data_from_npy.bin'
valid_seq_bin_fname = tmpdir / 'valid_data_from_seq.bin'
valid_seq2_bin_fname = tmpdir / 'valid_data_from_seq2.bin'
valid_ds = lgb.Dataset(valid_X, label=valid_Y, params=params, reference=ds)
valid_ds.save_binary(valid_npy_bin_fname)
# From Dataset constructor, with dataset from numpy array.
valid_seqs = _create_sequence_from_ndarray(valid_X, num_seq, batch_size)
valid_seq_ds = lgb.Dataset(valid_seqs, label=valid_Y, params=params, reference=ds)
valid_seq_ds.save_binary(valid_seq_bin_fname)
assert filecmp.cmp(valid_npy_bin_fname, valid_seq_bin_fname)
# From Dataset.create_valid, with dataset from sequence.
valid_seq_ds2 = seq_ds.create_valid(valid_seqs, label=valid_Y, params=params)
valid_seq_ds2.save_binary(valid_seq2_bin_fname)
assert filecmp.cmp(valid_npy_bin_fname, valid_seq2_bin_fname)
@pytest.mark.parametrize('num_seq', [1, 2])
def test_sequence_get_data(num_seq):
nrow = 20
ncol = 11
data = np.arange(nrow * ncol, dtype=np.float64).reshape((nrow, ncol))
X = data[:, :-1]
Y = data[:, -1]
seqs = _create_sequence_from_ndarray(data=X, num_seq=num_seq, batch_size=6)
seq_ds = lgb.Dataset(seqs, label=Y, params=None, free_raw_data=False).construct()
assert seq_ds.get_data() == seqs
used_indices = np.random.choice(np.arange(nrow), nrow // 3, replace=False)
subset_data = seq_ds.subset(used_indices).construct()
np.testing.assert_array_equal(subset_data.get_data(), X[sorted(used_indices)])
def test_chunked_dataset():
X_train, X_test, y_train, y_test = train_test_split(*load_breast_cancer(return_X_y=True), test_size=0.1,
random_state=2)
chunk_size = X_train.shape[0] // 10 + 1
X_train = [X_train[i * chunk_size:(i + 1) * chunk_size, :] for i in range(X_train.shape[0] // chunk_size + 1)]
X_test = [X_test[i * chunk_size:(i + 1) * chunk_size, :] for i in range(X_test.shape[0] // chunk_size + 1)]
train_data = lgb.Dataset(X_train, label=y_train, params={"bin_construct_sample_cnt": 100})
valid_data = train_data.create_valid(X_test, label=y_test, params={"bin_construct_sample_cnt": 100})
train_data.construct()
valid_data.construct()
def test_chunked_dataset_linear():
X_train, X_test, y_train, y_test = train_test_split(*load_breast_cancer(return_X_y=True), test_size=0.1,
random_state=2)
chunk_size = X_train.shape[0] // 10 + 1
X_train = [X_train[i * chunk_size:(i + 1) * chunk_size, :] for i in range(X_train.shape[0] // chunk_size + 1)]
X_test = [X_test[i * chunk_size:(i + 1) * chunk_size, :] for i in range(X_test.shape[0] // chunk_size + 1)]
params = {"bin_construct_sample_cnt": 100, 'linear_tree': True}
train_data = lgb.Dataset(X_train, label=y_train, params=params)
valid_data = train_data.create_valid(X_test, label=y_test, params=params)
train_data.construct()
valid_data.construct()
def test_subset_group():
rank_example_dir = Path(__file__).absolute().parents[2] / 'examples' / 'lambdarank'
X_train, y_train = load_svmlight_file(str(rank_example_dir / 'rank.train'))
q_train = np.loadtxt(str(rank_example_dir / 'rank.train.query'))
lgb_train = lgb.Dataset(X_train, y_train, group=q_train)
assert len(lgb_train.get_group()) == 201
subset = lgb_train.subset(list(range(10))).construct()
subset_group = subset.get_group()
assert len(subset_group) == 2
assert subset_group[0] == 1
assert subset_group[1] == 9
def test_add_features_throws_if_num_data_unequal():
X1 = np.random.random((100, 1))
X2 = np.random.random((10, 1))
d1 = lgb.Dataset(X1).construct()
d2 = lgb.Dataset(X2).construct()
with pytest.raises(lgb.basic.LightGBMError):
d1.add_features_from(d2)
def test_add_features_throws_if_datasets_unconstructed():
X1 = np.random.random((100, 1))
X2 = np.random.random((100, 1))
with pytest.raises(ValueError):
d1 = lgb.Dataset(X1)
d2 = lgb.Dataset(X2)
d1.add_features_from(d2)
with pytest.raises(ValueError):
d1 = lgb.Dataset(X1).construct()
d2 = lgb.Dataset(X2)
d1.add_features_from(d2)
with pytest.raises(ValueError):
d1 = lgb.Dataset(X1)
d2 = lgb.Dataset(X2).construct()
d1.add_features_from(d2)
def test_add_features_equal_data_on_alternating_used_unused(tmp_path):
X = np.random.random((100, 5))
X[:, [1, 3]] = 0
names = [f'col_{i}' for i in range(5)]
for j in range(1, 5):
d1 = lgb.Dataset(X[:, :j], feature_name=names[:j]).construct()
d2 = lgb.Dataset(X[:, j:], feature_name=names[j:]).construct()
d1.add_features_from(d2)
d1name = tmp_path / "d1.txt"
d1._dump_text(d1name)
d = lgb.Dataset(X, feature_name=names).construct()
dname = tmp_path / "d.txt"
d._dump_text(dname)
with open(d1name, 'rt') as d1f:
d1txt = d1f.read()
with open(dname, 'rt') as df:
dtxt = df.read()
assert dtxt == d1txt
def test_add_features_same_booster_behaviour(tmp_path):
X = np.random.random((100, 5))
X[:, [1, 3]] = 0
names = [f'col_{i}' for i in range(5)]
for j in range(1, 5):
d1 = lgb.Dataset(X[:, :j], feature_name=names[:j]).construct()
d2 = lgb.Dataset(X[:, j:], feature_name=names[j:]).construct()
d1.add_features_from(d2)
d = lgb.Dataset(X, feature_name=names).construct()
y = np.random.random(100)
d1.set_label(y)
d.set_label(y)
b1 = lgb.Booster(train_set=d1)
b = lgb.Booster(train_set=d)
for k in range(10):
b.update()
b1.update()
dname = tmp_path / "d.txt"
d1name = tmp_path / "d1.txt"
b1.save_model(d1name)
b.save_model(dname)
with open(dname, 'rt') as df:
dtxt = df.read()
with open(d1name, 'rt') as d1f:
d1txt = d1f.read()
assert dtxt == d1txt
def test_add_features_from_different_sources():
pd = pytest.importorskip("pandas")
n_row = 100
n_col = 5
X = np.random.random((n_row, n_col))
xxs = [X, sparse.csr_matrix(X), pd.DataFrame(X)]
names = [f'col_{i}' for i in range(n_col)]
seq = _create_sequence_from_ndarray(X, 1, 30)
seq_ds = lgb.Dataset(seq, feature_name=names, free_raw_data=False).construct()
npy_list_ds = lgb.Dataset([X[:n_row // 2, :], X[n_row // 2:, :]],
feature_name=names, free_raw_data=False).construct()
immergeable_dds = [seq_ds, npy_list_ds]
for x_1 in xxs:
# test that method works even with free_raw_data=True
d1 = lgb.Dataset(x_1, feature_name=names, free_raw_data=True).construct()
d2 = lgb.Dataset(x_1, feature_name=names, free_raw_data=True).construct()
d1.add_features_from(d2)
assert d1.data is None
# test that method works but sets raw data to None in case of immergeable data types
d1 = lgb.Dataset(x_1, feature_name=names, free_raw_data=False).construct()
for d2 in immergeable_dds:
d1.add_features_from(d2)
assert d1.data is None
# test that method works for different data types
d1 = lgb.Dataset(x_1, feature_name=names, free_raw_data=False).construct()
res_feature_names = [name for name in names]
for idx, x_2 in enumerate(xxs, 2):
original_type = type(d1.get_data())
d2 = lgb.Dataset(x_2, feature_name=names, free_raw_data=False).construct()
d1.add_features_from(d2)
assert isinstance(d1.get_data(), original_type)
assert d1.get_data().shape == (n_row, n_col * idx)
res_feature_names += [f'D{idx}_{name}' for name in names]
assert d1.feature_name == res_feature_names
def test_cegb_affects_behavior(tmp_path):
X = np.random.random((100, 5))
X[:, [1, 3]] = 0
y = np.random.random(100)
names = [f'col_{i}' for i in range(5)]
ds = lgb.Dataset(X, feature_name=names).construct()
ds.set_label(y)
base = lgb.Booster(train_set=ds)
for k in range(10):
base.update()
basename = tmp_path / "basename.txt"
base.save_model(basename)
with open(basename, 'rt') as f:
basetxt = f.read()
# Set extremely harsh penalties, so CEGB will block most splits.
cases = [{'cegb_penalty_feature_coupled': [50, 100, 10, 25, 30]},
{'cegb_penalty_feature_lazy': [1, 2, 3, 4, 5]},
{'cegb_penalty_split': 1}]
for case in cases:
booster = lgb.Booster(train_set=ds, params=case)
for k in range(10):
booster.update()
casename = tmp_path / "casename.txt"
booster.save_model(casename)
with open(casename, 'rt') as f:
casetxt = f.read()
assert basetxt != casetxt
def test_cegb_scaling_equalities(tmp_path):
X = np.random.random((100, 5))
X[:, [1, 3]] = 0
y = np.random.random(100)
names = [f'col_{i}' for i in range(5)]
ds = lgb.Dataset(X, feature_name=names).construct()
ds.set_label(y)
# Compare pairs of penalties, to ensure scaling works as intended
pairs = [({'cegb_penalty_feature_coupled': [1, 2, 1, 2, 1]},
{'cegb_penalty_feature_coupled': [0.5, 1, 0.5, 1, 0.5], 'cegb_tradeoff': 2}),
({'cegb_penalty_feature_lazy': [0.01, 0.02, 0.03, 0.04, 0.05]},
{'cegb_penalty_feature_lazy': [0.005, 0.01, 0.015, 0.02, 0.025], 'cegb_tradeoff': 2}),
({'cegb_penalty_split': 1},
{'cegb_penalty_split': 2, 'cegb_tradeoff': 0.5})]
for (p1, p2) in pairs:
booster1 = lgb.Booster(train_set=ds, params=p1)
booster2 = lgb.Booster(train_set=ds, params=p2)
for k in range(10):
booster1.update()
booster2.update()
p1name = tmp_path / "p1.txt"
# Reset booster1's parameters to p2, so the parameter section of the file matches.
booster1.reset_parameter(p2)
booster1.save_model(p1name)
with open(p1name, 'rt') as f:
p1txt = f.read()
p2name = tmp_path / "p2.txt"
booster2.save_model(p2name)
with open(p2name, 'rt') as f:
p2txt = f.read()
assert p1txt == p2txt
def test_consistent_state_for_dataset_fields():
def check_asserts(data):
np.testing.assert_allclose(data.label, data.get_label())
np.testing.assert_allclose(data.label, data.get_field('label'))
assert not np.isnan(data.label[0])
assert not np.isinf(data.label[1])
np.testing.assert_allclose(data.weight, data.get_weight())
np.testing.assert_allclose(data.weight, data.get_field('weight'))
assert not np.isnan(data.weight[0])
assert not np.isinf(data.weight[1])
np.testing.assert_allclose(data.init_score, data.get_init_score())
np.testing.assert_allclose(data.init_score, data.get_field('init_score'))
assert not np.isnan(data.init_score[0])
assert not np.isinf(data.init_score[1])
assert np.all(np.isclose([data.label[0], data.weight[0], data.init_score[0]],
data.label[0]))
assert data.label[1] == pytest.approx(data.weight[1])
assert data.feature_name == data.get_feature_name()
X, y = load_breast_cancer(return_X_y=True)
sequence = np.ones(y.shape[0])
sequence[0] = np.nan
sequence[1] = np.inf
feature_names = [f'f{i}'for i in range(X.shape[1])]
lgb_data = lgb.Dataset(X, sequence,
weight=sequence, init_score=sequence,
feature_name=feature_names).construct()
check_asserts(lgb_data)
lgb_data = lgb.Dataset(X, y).construct()
lgb_data.set_label(sequence)
lgb_data.set_weight(sequence)
lgb_data.set_init_score(sequence)
lgb_data.set_feature_name(feature_names)
check_asserts(lgb_data)
def test_choose_param_value():
original_params = {
"local_listen_port": 1234,
"port": 2222,
"metric": "auc",
"num_trees": 81
}
# should resolve duplicate aliases, and prefer the main parameter
params = lgb.basic._choose_param_value(
main_param_name="local_listen_port",
params=original_params,
default_value=5555
)
assert params["local_listen_port"] == 1234
assert "port" not in params
# should choose a value from an alias and set that value on main param
# if only an alias is used
params = lgb.basic._choose_param_value(
main_param_name="num_iterations",
params=params,
default_value=17
)
assert params["num_iterations"] == 81
assert "num_trees" not in params
# should use the default if main param and aliases are missing
params = lgb.basic._choose_param_value(
main_param_name="learning_rate",
params=params,
default_value=0.789
)
assert params["learning_rate"] == 0.789
# all changes should be made on copies and not modify the original
expected_params = {
"local_listen_port": 1234,
"port": 2222,
"metric": "auc",
"num_trees": 81
}
assert original_params == expected_params
@pytest.mark.parametrize('collection', ['1d_np', '2d_np', 'pd_float', 'pd_str', '1d_list', '2d_list'])
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
def test_list_to_1d_numpy(collection, dtype):
collection2y = {
'1d_np': np.random.rand(10),
'2d_np': np.random.rand(10, 1),
'pd_float': np.random.rand(10),
'pd_str': ['a', 'b'],
'1d_list': [1] * 10,
'2d_list': [[1], [2]],
}
y = collection2y[collection]
if collection.startswith('pd'):
if not PANDAS_INSTALLED:
pytest.skip('pandas is not installed')
else:
y = pd_Series(y)
if isinstance(y, np.ndarray) and len(y.shape) == 2:
with pytest.warns(UserWarning, match='column-vector'):
lgb.basic.list_to_1d_numpy(y)
return
elif isinstance(y, list) and isinstance(y[0], list):
with pytest.raises(TypeError):
lgb.basic.list_to_1d_numpy(y)
return
elif isinstance(y, pd_Series) and y.dtype == object:
with pytest.raises(ValueError):
lgb.basic.list_to_1d_numpy(y)
return
result = lgb.basic.list_to_1d_numpy(y, dtype=dtype)
assert result.size == 10
assert result.dtype == dtype
@pytest.mark.parametrize('init_score_type', ['array', 'dataframe', 'list'])
def test_init_score_for_multiclass_classification(init_score_type):
init_score = [[i * 10 + j for j in range(3)] for i in range(10)]
if init_score_type == 'array':
init_score = np.array(init_score)
elif init_score_type == 'dataframe':
if not PANDAS_INSTALLED:
pytest.skip('Pandas is not installed.')
init_score = pd_DataFrame(init_score)
data = np.random.rand(10, 2)
ds = lgb.Dataset(data, init_score=init_score).construct()
np.testing.assert_equal(ds.get_field('init_score'), init_score)
np.testing.assert_equal(ds.init_score, init_score)
def test_smoke_custom_parser(tmp_path):
data_path = Path(__file__).absolute().parents[2] / 'examples' / 'binary_classification' / 'binary.train'
parser_config_file = tmp_path / 'parser.ini'
with open(parser_config_file, 'w') as fout:
fout.write('{"className": "dummy", "id": "1"}')
data = lgb.Dataset(data_path, params={"parser_config_file": parser_config_file})
with pytest.raises(lgb.basic.LightGBMError,
match="Cannot find parser class 'dummy', please register first or check config format"):
data.construct()
def test_param_aliases():
aliases = lgb.basic._ConfigAliases.aliases
assert isinstance(aliases, dict)
assert len(aliases) > 100
assert all(isinstance(i, set) for i in aliases.values())
assert all(len(i) >= 1 for i in aliases.values())
assert all(k in v for k, v in aliases.items())
assert lgb.basic._ConfigAliases.get('config', 'task') == {'config', 'config_file', 'task', 'task_type'}
def _bad_gradients(preds, _):
return np.random.randn(len(preds) + 1), np.random.rand(len(preds) + 1)
def _good_gradients(preds, _):
return np.random.randn(*preds.shape), np.random.rand(*preds.shape)
def test_custom_objective_safety():
nrows = 100
X = np.random.randn(nrows, 5)
y_binary = np.arange(nrows) % 2
classes = [0, 1, 2]
nclass = len(classes)
y_multiclass = np.arange(nrows) % nclass
ds_binary = lgb.Dataset(X, y_binary).construct()
ds_multiclass = lgb.Dataset(X, y_multiclass).construct()
bad_bst_binary = lgb.Booster({'objective': "none"}, ds_binary)
good_bst_binary = lgb.Booster({'objective': "none"}, ds_binary)
bad_bst_multi = lgb.Booster({'objective': "none", "num_class": nclass}, ds_multiclass)
good_bst_multi = lgb.Booster({'objective': "none", "num_class": nclass}, ds_multiclass)
good_bst_binary.update(fobj=_good_gradients)
with pytest.raises(ValueError, match=re.escape("number of models per one iteration (1)")):
bad_bst_binary.update(fobj=_bad_gradients)
good_bst_multi.update(fobj=_good_gradients)
with pytest.raises(ValueError, match=re.escape(f"number of models per one iteration ({nclass})")):
bad_bst_multi.update(fobj=_bad_gradients)
@pytest.mark.parametrize('dtype', [np.float32, np.float64])
def test_no_copy_when_single_float_dtype_dataframe(dtype):
pd = pytest.importorskip('pandas')
X = np.random.rand(10, 2).astype(dtype)
df = pd.DataFrame(X)
# feature names are required to not make a copy (rename makes a copy)
feature_name = ['x1', 'x2']
built_data = lgb.basic._data_from_pandas(df, feature_name, None, None)[0]
assert built_data.dtype == dtype
assert np.shares_memory(X, built_data)
| microsoft/LightGBM | tests/python_package_test/test_basic.py | Python | mit | 24,734 |
import pandas as pd
import matplotlib.pyplot as plt
batch_result = [[13.36782193183899, 0.33670586840811867, 0.91220000000000001], [10.780983924865723, 0.3779737023271133, 0.90410000000000001]]
cols = ['Time', 'Loss', 'Accuracy']
idx = ['128', '256']
df = pd.DataFrame(batch_result, index=idx, columns=cols)
print(df)
df.Time.plot(kind='bar', color='r')
df.Loss.plot(kind='bar', color='b')
df.Accuracy.plot(kind='bar', color='g')
#plt.plot(idx, df['Time'], 'bo', label='Real data')
#plt.plot(X, pred_Y, 'ro', label='Predicted data')
#plt.xlabel('Standardized X')
#plt.ylabel('Y')
plt.legend()
plt.show()
'''
cols = ['Time', 'Avg. Loss', 'Accuracy']
idx = ['128', '256']
df = pd.DataFrame(batch_result, index=idx, columns=cols)
print(df)
X, Y = all_xs, all_ys
plt.plot(X, Y, 'bo', label='Real data')
plt.plot(X, pred_Y, 'ro', label='Predicted data')
plt.xlabel('Standardized X')
plt.ylabel('Y')
''' | shanaka-desoysa/tensorflow | hello_world/plot.py | Python | apache-2.0 | 900 |
from __future__ import absolute_import
from Components.Converter.Converter import Converter
from Components.Element import cached
class ValueRange(Converter, object):
def __init__(self, arg):
Converter.__init__(self, arg)
(self.lower, self.upper) = [int(x) for x in arg.split(',')]
@cached
def getBoolean(self):
try:
sourcevalue = int(self.source.value)
except:
sourcevalue = self.source.value
if self.lower <= self.upper:
return self.lower <= sourcevalue <= self.upper
else:
return not (self.upper < sourcevalue < self.lower)
boolean = property(getBoolean)
| atvcaptain/enigma2 | lib/python/Components/Converter/ValueRange.py | Python | gpl-2.0 | 589 |
from pygame import Surface
from thorpy.miscgui import constants, functions, style
from thorpy.painting.painters.optionnal.illuminer import IlluminerAlphaExcept as Illuminer
from thorpy.painting import pilgraphics
def get_already_illuminated_title(hoverable, state, color=None):
color = style.COLOR_TXT_HOVER if color is None else color
fusionner = hoverable._states[state].fusionner
old_color_target = fusionner.painter.color_target
old_color_bulk = fusionner.painter.color_bulk
fusionner.painter.color_target = color
fusionner.painter.color_bulk = color
img = fusionner.get_hover_fusion()
fusionner.painter.color_target = old_color_target
fusionner.painter.color_bulk = old_color_bulk
return img
def get_not_already_illuminated_title(hoverable, state, color=None):
color = style.COLOR_TXT_HOVER if color is None else color
#produce illumination then blit it on the painter not fusionned.
fusionner = hoverable._states[state].fusionner
target_img = hoverable.get_image(state)
r = target_img.get_rect()
#the shadow will be larger in order to make free space for fadeout.
shadow_radius = 2
r.inflate_ip(2*shadow_radius, 2*shadow_radius)
img = Surface(r.size)
img.fill((255, 255, 255, 255))
img.blit(target_img, (shadow_radius, shadow_radius))
shadow = pilgraphics.get_shadow(img,
radius=shadow_radius,
black=255,
alpha_factor=0.95,
decay_mode="exponential",
color=color)
shadow = shadow.subsurface(shadow.get_rect().inflate((-2*shadow_radius, -2*shadow_radius)))
return shadow
def get_illuminated_title(hoverable, state, color=None):
if is_illuminated(hoverable, state):
return get_already_illuminated_title(hoverable, state, color)
else:
return get_not_already_illuminated_title(hoverable, state, color)
def get_highlighted_title(hoverable, state, color=None):
color = style.COLOR_TXT_HOVER if color is None else color
return hoverable._states[state].fusionner.get_hover_fusion(color=color)
def get_all_highlighted_title(hoverable, state, colors):
color_text, color_bulk = colors
if not color_text:
color_text=style.COLOR_TXT_HOVER
if not color_bulk:
color_bulk=style.COLOR_BULK_HOVER
fusionner = hoverable._states[state].fusionner
old_color_painter = None
if hasattr(fusionner, "painter"):
if hasattr(fusionner.painter, "color"):
old_color_painter = fusionner.painter.color
fusionner.painter.color = color_bulk
illuminer = is_illuminated(hoverable, state)
if illuminer:
old_color_target = fusionner.painter.color_target
old_color_bulk = fusionner.painter.color_bulk
fusionner.painter.color_target = color_text
fusionner.painter.color_bulk = color_text
img = fusionner.get_hover_fusion()
if old_color_painter:
fusionner.painter.color = old_color_painter
if illuminer:
fusionner.painter.color_target = old_color_target
fusionner.painter.color_bulk = old_color_bulk
return img
def is_illuminated(hoverable, state):
fusionner = hoverable._states[state].fusionner
if hasattr(fusionner, "painter"):
if isinstance(fusionner.painter, Illuminer):
return True
return False
def get_img_highlighted(hoverable, state, color=None):
if is_illuminated(hoverable, state):
return get_illuminated_title(hoverable, state, color)
else:
return get_highlighted_title(hoverable, state, color)
def get_img_painter(hoverable, state, color=None):
color = style.COLOR_TXT_HOVER if color is None else color
fusionner = hoverable._states[state].fusionner
if hasattr(fusionner, "painter"):
fusionner.painter.hovered=True
illuminer = is_illuminated(hoverable, state)
if illuminer:
old_color_target = fusionner.painter.color_target
old_color_bulk = fusionner.painter.color_bulk
fusionner.painter.color_target = color
fusionner.painter.color_bulk = color
img = fusionner.get_hover_fusion()
if illuminer:
fusionner.painter.color_target = old_color_target
fusionner.painter.color_bulk = old_color_bulk
if hasattr(fusionner, "painter"):
fusionner.painter.hovered=False
return img
def get_img_redraw(hoverable, state, params):
paint_params = params["params"]
paint_params["size"] = hoverable.get_ghost_size()
painter = functions.obtain_valid_painter(params["painter"],
**paint_params)
return painter.get_fusion(hoverable.get_title(), True)
| YannThorimbert/ThorPy-1.4.3 | thorpy/elements/_hoverutils/_hovergetter.py | Python | mit | 4,792 |
# (C) British Crown Copyright 2016, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import Iris tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from . import extest_util
with extest_util.add_examples_to_path():
import coriolis_plot
class TestCoriolisPlot(tests.GraphicsTest):
"""Test the Coriolis Plot example code."""
def test_coriolis_plot(self):
with extest_util.show_replaced_by_check_graphic(self):
coriolis_plot.main()
if __name__ == '__main__':
tests.main()
| LukeC92/iris | docs/iris/example_tests/test_coriolis_plot.py | Python | lgpl-3.0 | 1,333 |
from django.test import TestCase
from django.core.urlresolvers import reverse
from common.factories import UserFactory
import contacts as contact_constants
from contacts import factories
from contacts import models
class ContactModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create()
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
def test_contact_name(self):
"""String repr of contact should be name."""
self.assertEqual(self.contact.name, str(self.contact))
def test_contact_url(self):
expected_url = reverse('contacts-view', kwargs={
'pk': self.contact.id,
'book': self.book.id,
})
self.assertEqual(self.contact.get_absolute_url(), expected_url)
def test_contact_last_contacted(self):
log = factories.LogFactory.create(contact=self.contact)
self.contact.update_last_contact_from_log(log)
self.assertEqual(self.contact.last_contacted(), log.created)
def test_contact_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.contact.can_be_viewed_by(user))
def test_contact_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.contact.can_be_edited_by(user))
def test_contact_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.contact.can_be_viewed_by(user))
def test_contact_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.contact.can_be_edited_by(user))
def test_get_contacts_for_user(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertEqual(
[self.contact],
list(models.Contact.objects.get_contacts_for_user(user)),
)
def test_get_contacts_for_user_bad_user(self):
user = UserFactory.create(username="nicholle")
self.assertFalse(
list(models.Contact.objects.get_contacts_for_user(user)),
)
def test_preferred_address_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_ADDRESS,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_address, field.value)
def test_preferred_address_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_ADDRESS,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_address, field.value)
def test_preferred_address_no_address(self):
self.assertEqual(self.contact.preferred_address, '')
def test_preferred_email_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_EMAIL,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_email, field.value)
def test_preferred_email_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_EMAIL,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_email, field.value)
def test_preferred_email_no_email(self):
self.assertEqual(self.contact.preferred_email, '')
def test_preferred_phone_with_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_PHONE,
value='1600 Pennsylvania Ave.',
preferred=True,
)
self.assertEqual(self.contact.preferred_phone, field.value)
def test_preferred_phone_without_preferred(self):
field = factories.ContactFieldFactory(
contact=self.contact,
kind=contact_constants.FIELD_TYPE_PHONE,
value='1600 Pennsylvania Ave.',
)
self.assertEqual(self.contact.preferred_phone, field.value)
def test_preferred_phone_no_phone(self):
self.assertEqual(self.contact.preferred_phone, '')
class TagModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create()
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
self.tag = factories.TagFactory.create(
tag='Family',
book=self.book,
)
def test_tag_name(self):
self.assertEqual(self.tag.tag, str(self.tag))
def test_get_tags_for_user(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertEqual(
[self.tag],
list(models.Tag.objects.get_tags_for_user(user)),
)
def test_tag_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.tag.can_be_viewed_by(user))
def test_tag_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.tag.can_be_edited_by(user))
def test_tag_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.tag.can_be_viewed_by(user))
def test_tag_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.tag.can_be_edited_by(user))
def test_corrected_color(self):
self.assertEqual(self.tag.corrected_color, '#123456')
self.tag.color = '#c0ffee'
self.assertEqual(self.tag.corrected_color, '#c0ffee')
self.tag.color = 'c0ffee'
self.assertEqual(self.tag.corrected_color, '#c0ffee')
class BookModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
def test_book_name(self):
self.assertEqual(self.book.name, str(self.book))
def test_book_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.book.can_be_viewed_by(user))
def test_book_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.book.can_be_edited_by(user))
def test_book_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.book.can_be_viewed_by(user))
def test_book_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.book.can_be_edited_by(user))
class BookOwnerModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
self.user = UserFactory(username="phildini")
def test_book_owner_repr(self):
bookowner = factories.BookOwnerFactory(book=self.book, user=self.user)
expected = "{} is an owner of {}".format(self.user, self.book)
self.assertEqual(str(bookowner), expected)
class LogEntryModelTests(TestCase):
def setUp(self):
self.book = factories.BookFactory.create(name="James Family")
self.user = UserFactory(username="phildini")
self.bookowner = factories.BookOwnerFactory(book=self.book, user=self.user)
self.contact = factories.ContactFactory.create(
name="Philip James",
book=self.book,
)
self.log = factories.LogFactory.create(contact=self.contact)
self.contact.update_last_contact_from_log(self.log)
def test_tag_repr(self):
expected = "Log on %s" % (self.contact)
self.assertEqual(str(self.log), expected)
def test_log_can_be_viewed_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.log.can_be_viewed_by(user))
def test_log_can_be_edited_by(self):
bookowner = factories.BookOwnerFactory.create(book=self.book)
user = bookowner.user
self.assertTrue(self.log.can_be_edited_by(user))
def test_log_cant_be_viewed_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.log.can_be_viewed_by(user))
def test_log_cant_be_edited_by_bad(self):
user = UserFactory.create(username='asheesh')
self.assertFalse(self.log.can_be_edited_by(user))
def test_creating_log_updates_contact(self):
self.assertTrue(self.contact.last_contact)
self.contact.update_last_contact_from_log(self.log)
self.assertEqual(self.log.created, self.contact.last_contact)
class ContactFieldModelTests(TestCase):
def test_for_user(self):
book = factories.BookFactory.create()
user = UserFactory.create()
contact = factories.ContactFactory.create(book=book)
bookowner = factories.BookOwnerFactory.create(user=user,book=book)
contactField1 = factories.ContactFieldFactory.create(contact=contact)
contactField2 = factories.ContactFieldFactory.create()
fields = models.ContactField.objects.for_user(user=user)
self.assertEqual(1, len(fields))
| phildini/logtacts | contacts/tests/test_models.py | Python | mit | 9,784 |
# -*- coding: utf-8 -*-
import subprocess
import signal
import time
def handler(signum, frame):
print 'Start kill "run.py" ...'
subprocess.Popen(['pkill', '-SIGINT', 'run.py'])
if __name__ == "__main__":
process_count = 10
for i in range(process_count):
subprocess.Popen(['python', 'run.py'])
signal.signal(signal.SIGINT, handler)
while True:
time.sleep(1)
| Lamzin/wiki-parser | launcher.py | Python | gpl-3.0 | 406 |
# Copyright (C) 2007, One Laptop Per Child
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import os
import logging
import urllib.parse
from gi.repository import Gio
from gi.repository import Gtk
from gettext import gettext as _
from sugar3 import mime
from sugar3.bundle.activitybundle import ActivityBundle
class ClipboardObject(object):
def __init__(self, object_path, name):
self._id = object_path
self._name = name
self._percent = 0
self._formats = {}
def destroy(self):
for format_ in list(self._formats.values()):
format_.destroy()
def get_id(self):
return self._id
def get_name(self):
name = self._name
if not name:
mime_type = mime.get_mime_description(self.get_mime_type())
if not mime_type:
mime_type = 'Data'
name = _('%s clipping') % mime_type
return name
def get_icon(self):
mime_type = self.get_mime_type()
generic_types = mime.get_all_generic_types()
for generic_type in generic_types:
if mime_type in generic_type.mime_types:
return generic_type.icon
icons = Gio.content_type_get_icon(mime_type)
icon_name = None
if icons is not None:
icon_theme = Gtk.IconTheme.get_default()
for icon_name in icons.props.names:
icon_info = (
icon_theme.lookup_icon(icon_name,
Gtk.IconSize.LARGE_TOOLBAR, 0))
if icon_info is not None:
del icon_info
return icon_name
return 'application-octet-stream'
def get_preview(self):
for mime_type in ['UTF8_STRING']:
if mime_type in self._formats:
return self._formats[mime_type].get_data()
return ''
def is_bundle(self):
# A bundle will have only one format.
if not self._formats:
return False
else:
return list(self._formats.keys())[0] == ActivityBundle.MIME_TYPE
def get_percent(self):
return self._percent
def set_percent(self, percent):
self._percent = percent
def add_format(self, format_):
self._formats[format_.get_type()] = format_
def get_formats(self):
return self._formats
def get_mime_type(self):
if not self._formats:
return ''
format_ = mime.choose_most_significant(list(self._formats.keys()))
if format_ == 'text/uri-list':
uri_data = self._formats[format_].get_data()
uri = urllib.parse.urlparse(uri_data, 'file')
scheme = uri.scheme # pylint: disable=E1101
if scheme == 'file':
path = uri.path # pylint: disable=E1101
if os.path.exists(path):
format_ = mime.get_for_file(path)
else:
format_ = mime.get_from_file_name(path)
logging.debug('Chose %r!', format_)
return format_
class Format(object):
def __init__(self, mime_type, data, on_disk):
self.owns_disk_data = False
self._type = mime_type
self._data = data
self._on_disk = on_disk
def destroy(self):
if self._on_disk:
uri = urllib.parse.urlparse(self._data)
path = uri.path # pylint: disable=E1101
if os.path.isfile(path):
os.remove(path)
def get_type(self):
return self._type
def get_data(self):
return self._data
def set_data(self, data):
self._data = data
def is_on_disk(self):
return self._on_disk
| quozl/sugar | src/jarabe/frame/clipboardobject.py | Python | gpl-3.0 | 4,332 |
import json
import os
from marvin.cloudstackException import printException
from marvin.codes import *
from optparse import OptionParser
import jsonHelper
from config.test_data import test_data
class managementServer(object):
def __init__(self):
self.mgtSvrIp = None
self.port = 8096
self.apiKey = None
self.securityKey = None
self.useHttps = None
self.certCAPath = None
self.certPath = None
class dbServer(object):
def __init__(self):
self.dbSvr = None
self.port = 3306
self.user = "cloud"
self.passwd = "cloud"
self.db = "cloud"
class configuration(object):
def __init__(self):
self.name = None
self.value = None
class logger(object):
def __init__(self):
self.LogFolderPath = None
class cloudstackConfiguration(object):
def __init__(self):
self.zones = []
self.mgtSvr = []
self.dbSvr = None
self.globalConfig = []
self.logger = None
self.TestData = None
class zone(object):
def __init__(self):
self.dns1 = None
self.internaldns1 = None
self.name = None
'''Basic or Advanced'''
self.networktype = None
self.dns2 = None
self.internaldns2 = None
self.localstorageenabled = None
'''default public network, in advanced mode'''
self.ipranges = []
self.physical_networks = []
self.pods = []
self.secondaryStorages = []
self.cacheStorages = []
self.domain = None
class trafficType(object):
def __init__(self, typ, labeldict=None):
self.typ = typ # Guest/Management/Public
if labeldict:
self.xen = labeldict['xen'] if 'xen' in labeldict.keys() else None
self.kvm = labeldict['kvm'] if 'kvm' in labeldict.keys() else None
self.simulator = labeldict['simulator'] \
if 'simulator' in labeldict.keys() else None
class pod(object):
def __init__(self):
self.gateway = None
self.name = None
self.netmask = None
self.startip = None
self.endip = None
self.zoneid = None
self.clusters = []
'''Used in basic network mode'''
self.guestIpRanges = []
class cluster(object):
def __init__(self):
self.clustername = None
self.clustertype = None
self.hypervisor = None
self.zoneid = None
self.podid = None
self.password = None
self.url = None
self.username = None
self.hosts = []
self.primaryStorages = []
class host(object):
def __init__(self):
self.hypervisor = None
self.password = None
self.url = None
self.username = None
self.zoneid = None
self.podid = None
self.clusterid = None
self.clustername = None
self.hosttags = None
self.allocationstate = None
class physicalNetwork(object):
def __init__(self):
self.name = None
self.tags = []
self.traffictypes = []
self.broadcastdomainrange = 'Zone'
self.vlan = None
self.isolationmethods = []
'''enable default virtual router provider'''
vrouter = provider()
vrouter.name = 'VirtualRouter'
self.providers = [vrouter]
class provider(object):
def __init__(self, name=None):
self.name = name
self.state = None
self.broadcastdomainrange = 'ZONE'
self.zoneid = None
self.servicelist = []
self.devices = []
class network(object):
def __init__(self):
self.displaytext = None
self.name = None
self.zoneid = None
self.acltype = None
self.domainid = None
self.networkdomain = None
self.networkofferingid = None
self.ipranges = []
class iprange(object):
def __init__(self):
'''tagged/untagged'''
self.gateway = None
self.netmask = None
self.startip = None
self.endip = None
self.vlan = None
'''for account specific '''
self.account = None
self.domain = None
class primaryStorage(object):
def __init__(self):
self.name = None
self.url = None
self.details = None
class secondaryStorage(object):
def __init__(self):
self.url = None
self.provider = None
self.details = None
class cacheStorage(object):
def __init__(self):
self.url = None
self.provider = None
self.details = None
class srx(object):
def __init__(self, hostname=None, username='root', password='admin'):
self.hostname = hostname
self.username = username
self.password = password
self.networkdevicetype = 'JuniperSRXFirewall'
self.publicinterface = '1/1'
self.privateinterface = '1/1'
self.numretries = '2'
self.fwdevicededicated = 'false'
self.timeout = '300'
self.publicnetwork = 'untrusted'
self.privatenetwork = 'trusted'
def getUrl(self):
return repr(self)
def __repr__(self):
req = zip(self.__dict__.keys(), self.__dict__.values())
return self.hostname + "?" + "&".join(["=".join([r[0], r[1]])
for r in req])
class bigip(object):
def __init__(self, hostname=None, username='root', password='default'):
self.hostname = hostname
self.username = username
self.password = password
self.networkdevicetype = 'F5BigIpLoadBalancer'
self.publicinterface = '1/1'
self.privateinterface = '1/1'
self.numretries = '2'
self.lbdevicededicated = 'false'
self.lbdevicecapacity = '50'
def getUrl(self):
return repr(self)
def __repr__(self):
req = zip(self.__dict__.keys(), self.__dict__.values())
return self.hostname + "?" + "&".join(["=".join([r[0], r[1]])
for r in req])
class ConfigManager(object):
'''
@Name: ConfigManager
@Desc: 1. It provides the basic configuration facilities to marvin.
2. User can just add configuration files for his tests, deployment
etc, under one config folder before running their tests.
cs/tools/marvin/marvin/config.
They can remove all hard coded values from code and separate
it out as config at this location.
Either add this to the existing setup.cfg as separate section
or add new configuration.
3. This will thus removes hard coded tests and separate
data from tests.
4. This API is provided as an additional facility under
cloudstackTestClient and users can get the
configuration object as similar to apiclient,dbconnection
etc to drive their test.
5. They just add their configuration for a test,
setup etc,at one single place under configuration dir
and use "getConfigParser" API of cloudstackTestClient
It will give them "configObj".They can either pass their own
config file for parsing to "getConfig" or it will use
default config file @ config/setup.cfg.
6. They will then get the dictionary of parsed
configuration and can use it further to drive their tests or
config drive
7. Test features, can drive their setups thus removing hard coded
values. Configuration default file will be under config and as
setup.cfg.
8. Users can use their own configuration file passed to
"getConfig" API,once configObj is returned.
'''
def __init__(self, cfg_file=None):
self.__filePath = cfg_file
self.__parsedCfgDict = None
'''
Set the Configuration
'''
self.__setConfig()
def __setConfig(self):
if not self.__verifyFile():
dirPath = os.path.dirname(__file__)
self.__filePath = str(os.path.join(dirPath, "config/test_data.py"))
self.__parsedCfgDict = self.__parseConfig()
def __parseConfig(self):
'''
@Name : __parseConfig
@Description: Parses the Input configuration Json file
and returns a dictionary from the file.
@Input : NA
@Output : Returns the parsed dictionary from json file
Returns None for invalid input or if parsing failed
'''
config_dict = None
try:
if self.__filePath.endswith(".py"):
config_dict = test_data
else:
configLines = []
with open(self.__filePath, 'r') as fp:
for line in fp:
ws = line.strip()
if not ws.startswith("#"):
configLines.append(ws)
config = json.loads("\n".join(configLines))
config_dict = config
except Exception as e:
printException(e)
finally:
return config_dict
def __verifyFile(self):
'''
@Name : __parseConfig
@Description: Parses the Input configuration Json file
and returns a dictionary from the file.
@Input : NA
@Output : True or False based upon file input validity
and availability
'''
if self.__filePath is None or self.__filePath == '':
return False
return os.path.exists(self.__filePath)
def getSectionData(self, section=None):
'''
@Name: getSectionData
@Desc: Gets the Section data of a particular section
under parsed dictionary
@Input: Parsed Dictionary from configuration file
section to be returned from this dict
@Output:Section matching inside the parsed data
'''
if self.__parsedCfgDict is None or section is None:
print "\nEither Parsed Dictionary is None or Section is None"
return INVALID_INPUT
if section is not None:
return self.__parsedCfgDict.get(section)
def getConfig(self):
'''
@Name : getConfig
@Desc : Returns the Parsed Dictionary of Config Provided
@Input : NA
@Output: ParsedDict if successful if cfg file provided is valid
None if cfg file is invalid or not able to be parsed
'''
out = self.__parsedCfgDict
return out
def getDeviceUrl(obj):
req = zip(obj.__dict__.keys(), obj.__dict__.values())
if obj.hostname:
return "http://" + obj.hostname + "?" + "&".join(["=".join([r[0],
r[1]])
for r in req])
else:
return None
def descSetupInAdvancedMode():
'''sample code to generate setup configuration file'''
zs = cloudstackConfiguration()
for l in range(1):
z = zone()
z.dns1 = "8.8.8.8"
z.dns2 = "8.8.4.4"
z.internaldns1 = "192.168.110.254"
z.internaldns2 = "192.168.110.253"
z.name = "test" + str(l)
z.networktype = 'Advanced'
z.guestcidraddress = "10.1.1.0/24"
z.vlan = "100-2000"
pn = physicalNetwork()
pn.name = "test-network"
pn.traffictypes = [trafficType("Guest"), trafficType("Management"),
trafficType("Public")]
vpcprovider = provider('VpcVirtualRouter')
srxprovider = provider('JuniperSRX')
srxprovider.devices.append(srx(hostname='10.147.40.3'))
f5provider = provider('F5BigIp')
f5provider.devices.append(bigip(hostname='10.147.40.3'))
pn.providers.extend([vpcprovider, nsprovider, srxprovider, f5provider])
z.physical_networks.append(pn)
'''create 10 pods'''
for i in range(2):
p = pod()
p.name = "test" + str(l) + str(i)
p.gateway = "192.168.%d.1" % i
p.netmask = "255.255.255.0"
p.startip = "192.168.%d.200" % i
p.endip = "192.168.%d.220" % i
'''add 10 clusters'''
for j in range(2):
c = cluster()
c.clustername = "test" + str(l) + str(i) + str(j)
c.clustertype = "CloudManaged"
c.hypervisor = "Simulator"
'''add 10 hosts'''
for k in range(2):
h = host()
h.username = "root"
h.password = "password"
memory = 8 * 1024 * 1024 * 1024
localstorage = 1 * 1024 * 1024 * 1024 * 1024
# h.url = "http://sim/%d%d%d%d/cpucore=1&cpuspeed=8000&\
# memory=%d&localstorage=%d"%(l, i, j, k, memory,
# localstorage)
h.url = "http://sim/%d%d%d%d" % (l, i, j, k)
c.hosts.append(h)
'''add 2 primary storages'''
for m in range(2):
primary = primaryStorage()
primary.name = "primary" + \
str(l) + str(i) + str(j) + str(m)
# primary.url = "nfs://localhost/path%s/size=%d" %
# (str(l) + str(i) + str(j) + str(m), size)
primary.url = "nfs://localhost/path%s" % (str(l) + str(i)
+ str(j)
+ str(m))
c.primaryStorages.append(primary)
p.clusters.append(c)
z.pods.append(p)
'''add two secondary'''
for i in range(5):
secondary = secondaryStorage()
secondary.url = "nfs://localhost/path" + str(l) + str(i)
z.secondaryStorages.append(secondary)
'''add default public network'''
ips = iprange()
ips.vlan = "26"
ips.startip = "172.16.26.2"
ips.endip = "172.16.26.100"
ips.gateway = "172.16.26.1"
ips.netmask = "255.255.255.0"
z.ipranges.append(ips)
zs.zones.append(z)
'''Add one mgt server'''
mgt = managementServer()
mgt.mgtSvrIp = "localhost"
zs.mgtSvr.append(mgt)
'''Add a database'''
db = dbServer()
db.dbSvr = "localhost"
zs.dbSvr = db
'''add global configuration'''
global_settings = { 'expunge.delay': '60',
'expunge.interval': '60',
'expunge.workers': '3',
}
for k, v in global_settings.iteritems():
cfg = configuration()
cfg.name = k
cfg.value = v
zs.globalConfig.append(cfg)
return zs
def generate_setup_config(config, file=None):
describe = config
if file is None:
return json.dumps(jsonHelper.jsonDump.dump(describe))
else:
fp = open(file, 'w')
json.dump(jsonHelper.jsonDump.dump(describe), fp, indent=4)
fp.close()
def getSetupConfig(file):
try:
config = cloudstackConfiguration()
configLines = []
with open(file, 'r') as fp:
for line in fp:
ws = line.strip()
if not ws.startswith("#"):
configLines.append(ws)
config = json.loads("\n".join(configLines))
return jsonHelper.jsonLoader(config)
except Exception as e:
printException(e)
if __name__ == "__main__":
parser = OptionParser()
parser.add_option("-i", "--input", action="store", default=None,
dest="inputfile", help="input file")
parser.add_option("-a", "--advanced", action="store_true", default=False,
dest="advanced", help="use advanced networking")
parser.add_option("-o", "--output", action="store",
default="./datacenterCfg", dest="output",
help="the path where the json config file generated, \
by default is ./datacenterCfg")
(options, args) = parser.parse_args()
if options.inputfile:
config = getSetupConfig(options.inputfile)
if options.advanced:
config = descSetupInAdvancedMode()
generate_setup_config(config, options.output)
| MissionCriticalCloud/cosmic | cosmic-marvin/marvin/configGenerator.py | Python | apache-2.0 | 16,647 |
"""
Tests covering the Open edX Paver prequisites installation workflow
"""
from __future__ import absolute_import
import os
import unittest
from mock import patch
from paver.easy import BuildFailure
import pavelib.prereqs
from pavelib.paver_tests.utils import PaverTestCase, fail_on_npm_install, unexpected_fail_on_npm_install
class TestPaverPrereqInstall(unittest.TestCase):
"""
Test the status of the NO_PREREQ_INSTALL variable, its presence and how
paver handles it.
"""
def check_val(self, set_val, expected_val):
"""
Verify that setting the variable to a certain value returns
the expected boolean for it.
As environment variables are only stored as strings, we have to cast
whatever it's set at to a boolean that does not violate expectations.
"""
_orig_environ = dict(os.environ)
os.environ['NO_PREREQ_INSTALL'] = set_val
self.assertEqual(
pavelib.prereqs.no_prereq_install(),
expected_val,
u'NO_PREREQ_INSTALL is set to {}, but we read it as {}'.format(
set_val, expected_val),
)
# Reset Environment back to original state
os.environ.clear()
os.environ.update(_orig_environ)
def test_no_prereq_install_true_lowercase(self):
"""
Ensure that 'true' will be True.
"""
self.check_val('true', True)
def test_no_prereq_install_false_lowercase(self):
"""
Ensure that 'false' will be False.
"""
self.check_val('false', False)
def test_no_prereq_install_true(self):
"""
Ensure that 'True' will be True.
"""
self.check_val('True', True)
def test_no_prereq_install_false(self):
"""
Ensure that 'False' will be False.
"""
self.check_val('False', False)
def test_no_prereq_install_0(self):
"""
Ensure that '0' will be False.
"""
self.check_val('0', False)
def test_no_prereq_install_1(self):
"""
Ensure that '1' will be True.
"""
self.check_val('1', True)
class TestPaverNodeInstall(PaverTestCase):
"""
Test node install logic
"""
def setUp(self):
super(TestPaverNodeInstall, self).setUp()
# Ensure prereqs will be run
os.environ['NO_PREREQ_INSTALL'] = 'false'
def test_npm_install_with_subprocess_error(self):
"""
An exit with subprocess exit 1 is what paver receives when there is
an npm install error ("cb() never called!"). Test that we can handle
this kind of failure. For more info see TE-1767.
"""
with patch('subprocess.Popen') as _mock_popen:
_mock_popen.side_effect = fail_on_npm_install
with self.assertRaises(BuildFailure):
pavelib.prereqs.node_prereqs_installation()
# npm install will be called twice
self.assertEquals(_mock_popen.call_count, 2)
def test_npm_install_called_once_when_successful(self):
"""
Vanilla npm install should only be calling npm install one time
"""
with patch('subprocess.Popen') as _mock_popen:
pavelib.prereqs.node_prereqs_installation()
# when there's no failure, npm install is only called once
self.assertEquals(_mock_popen.call_count, 1)
def test_npm_install_with_unexpected_subprocess_error(self):
"""
If there's some other error, only call npm install once, and raise a failure
"""
with patch('subprocess.Popen') as _mock_popen:
_mock_popen.side_effect = unexpected_fail_on_npm_install
with self.assertRaises(BuildFailure):
pavelib.prereqs.node_prereqs_installation()
self.assertEquals(_mock_popen.call_count, 1)
| ESOedX/edx-platform | pavelib/paver_tests/test_prereqs.py | Python | agpl-3.0 | 3,866 |
#problem 14
def collatz(x):
sum = 1
while x != 1:
if x % 2 == 0:
x = x / 2
else:
x = 3 * x + 1
sum += 1
return sum
maxlength = 0
value = 0
for i in range(1,1000000):
length = collatz(i)
if length > maxlength:
maxlength = length
value = i
print value
print maxlength
| jhuang314/euler | p14.py | Python | mit | 355 |
import numpy
import theano
from fuel.datasets import IterableDataset
from numpy.testing import assert_allclose
from theano import tensor
from blocks.extensions import TrainingExtension, FinishAfter
from blocks.extensions.monitoring import TrainingDataMonitoring
from blocks.monitoring import aggregation
from blocks.algorithms import GradientDescent, Scale
from blocks.utils import shared_floatx
from blocks.main_loop import MainLoop
def test_training_data_monitoring():
weights = numpy.array([-1, 1], dtype=theano.config.floatX)
features = [numpy.array(f, dtype=theano.config.floatX)
for f in [[1, 2], [3, 4], [5, 6]]]
targets = [(weights * f).sum() for f in features]
n_batches = 3
dataset = IterableDataset(dict(features=features, targets=targets))
x = tensor.vector('features')
y = tensor.scalar('targets')
W = shared_floatx([0, 0], name='W')
V = shared_floatx(7, name='V')
W_sum = W.sum().copy(name='W_sum')
cost = ((x * W).sum() - y) ** 2
cost.name = 'cost'
class TrueCostExtension(TrainingExtension):
def before_batch(self, data):
self.main_loop.log.current_row['true_cost'] = (
((W.get_value() * data["features"]).sum() -
data["targets"]) ** 2)
main_loop = MainLoop(
model=None, data_stream=dataset.get_example_stream(),
algorithm=GradientDescent(cost=cost, parameters=[W],
step_rule=Scale(0.001)),
extensions=[
FinishAfter(after_n_epochs=1),
TrainingDataMonitoring([W_sum, cost, V], prefix="train1",
after_batch=True),
TrainingDataMonitoring([aggregation.mean(W_sum), cost],
prefix="train2", after_epoch=True),
TrueCostExtension()])
main_loop.run()
# Check monitoring of a shared varible
assert_allclose(main_loop.log.current_row['train1_V'], 7.0)
for i in range(n_batches):
# The ground truth is written to the log before the batch is
# processed, where as the extension writes after the batch is
# processed. This is why the iteration numbers differs here.
assert_allclose(main_loop.log[i]['true_cost'],
main_loop.log[i + 1]['train1_cost'])
assert_allclose(
main_loop.log[n_batches]['train2_cost'],
sum([main_loop.log[i]['true_cost']
for i in range(n_batches)]) / n_batches)
assert_allclose(
main_loop.log[n_batches]['train2_W_sum'],
sum([main_loop.log[i]['train1_W_sum']
for i in range(1, n_batches + 1)]) / n_batches)
| rizar/attention-lvcsr | libs/blocks/tests/extensions/test_monitoring.py | Python | mit | 2,680 |
# Portions Copyright (c) Facebook, Inc. and its affiliates.
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2.
# Copyright 2013 Mercurial Contributors
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
from __future__ import absolute_import
import array
import errno
import fcntl
import os
import sys
from . import encoding, pycompat, util
# BSD 'more' escapes ANSI color sequences by default. This can be disabled by
# $MORE variable, but there's no compatible option with Linux 'more'. Given
# OS X is widely used and most modern Unix systems would have 'less', setting
# 'less' as the default seems reasonable.
fallbackpager = "less"
def _rcfiles(path):
rcs = [os.path.join(path, "hgrc")]
rcdir = os.path.join(path, "hgrc.d")
try:
rcs.extend(
[
os.path.join(rcdir, f)
for f, kind in util.listdir(rcdir)
if f.endswith(".rc")
]
)
except OSError:
pass
return rcs
def systemrcpath():
path = []
if pycompat.sysplatform == "plan9":
root = "lib/mercurial"
else:
root = "etc/mercurial"
# old mod_python does not set sys.argv
if len(getattr(sys, "argv", [])) > 0:
p = os.path.dirname(os.path.dirname(pycompat.sysargv[0]))
if p != "/":
path.extend(_rcfiles(os.path.join(p, root)))
path.extend(_rcfiles("/" + root))
return path
def userrcpath():
if pycompat.sysplatform == "plan9":
return [encoding.environ["home"] + "/lib/hgrc"]
elif pycompat.isdarwin:
return [os.path.expanduser("~/.hgrc")]
else:
confighome = encoding.environ.get("XDG_CONFIG_HOME")
if confighome is None or not os.path.isabs(confighome):
confighome = os.path.expanduser("~/.config")
return [os.path.expanduser("~/.hgrc"), os.path.join(confighome, "hg", "hgrc")]
def termsize(ui):
try:
import termios
TIOCGWINSZ = termios.TIOCGWINSZ # unavailable on IRIX (issue3449)
except (AttributeError, ImportError):
return 80, 24
for dev in (ui.ferr, ui.fout, ui.fin):
try:
try:
fd = dev.fileno()
except AttributeError:
continue
if not os.isatty(fd):
continue
arri = fcntl.ioctl(fd, TIOCGWINSZ, "\0" * 8)
height, width = array.array(r"h", arri)[:2]
if width > 0 and height > 0:
return width, height
except ValueError:
pass
except IOError as e:
if e[0] == errno.EINVAL:
pass
else:
raise
return 80, 24
| facebookexperimental/eden | eden/hg-server/edenscm/mercurial/scmposix.py | Python | gpl-2.0 | 2,839 |
import pkgutil
import importlib
import logging
import os
from flask import Blueprint
def register_blueprints(app, package_name, package_path):
"""Register all Blueprint instances on the specified Flask application found
in all modules for the specified package.
:param app: the Flask application
:param package_name: the package name
:param package_path: the package path
"""
rv = []
for _, name, _ in pkgutil.iter_modules(package_path):
m = importlib.import_module('%s.%s' % (package_name, name))
for item in dir(m):
item = getattr(m, item)
if isinstance(item, Blueprint):
app.register_blueprint(item)
rv.append(item)
return rv
def init_logging(app):
if app.config:
handler = logging.FileHandler(os.path.join(app.config['FLASK_LOG_DIR'], 'flask.log'))
formater = logging.Formatter(
fmt='[%(asctime)s](%(pathname)s):%(levelname)s - %(message)s',
datefmt='%d-%m-%Y %H:%M:%S')
handler.setFormatter(formater)
handler.setLevel(logging.WARN)
if app.debug:
handler.setLevel(logging.NOTSET)
if app.config['TESTING']:
handler.setLevel(logging.DEBUG)
app.logger.setLevel(logging.DEBUG)
app.logger.addHandler(handler)
else:
raise AttributeError('config not found')
def print_params(params):
return ''.join(['%s-> %s' % (key, value) for (key, value) in params.items()])
| stepanov-valentin/lpschedule | schedule/helpers.py | Python | mit | 1,506 |
# Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from google.cloud.kms_v1.proto import (
resources_pb2 as google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2,
)
from google.cloud.kms_v1.proto import (
service_pb2 as google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2,
)
class KeyManagementServiceStub(object):
"""Google Cloud Key Management Service
Manages cryptographic keys and operations using those keys. Implements a REST
model with the following objects:
* [KeyRing][google.cloud.kms.v1.KeyRing]
* [CryptoKey][google.cloud.kms.v1.CryptoKey]
* [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
If you are using manual gRPC libraries, see
[Using gRPC with Cloud KMS](https://cloud.google.com/kms/docs/grpc).
"""
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.ListKeyRings = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListKeyRings",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListKeyRingsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListKeyRingsResponse.FromString,
)
self.ListCryptoKeys = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListCryptoKeys",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeysRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeysResponse.FromString,
)
self.ListCryptoKeyVersions = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/ListCryptoKeyVersions",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeyVersionsRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeyVersionsResponse.FromString,
)
self.GetKeyRing = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetKeyRing",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetKeyRingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.KeyRing.FromString,
)
self.GetCryptoKey = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetCryptoKey",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetCryptoKeyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.FromString,
)
self.GetCryptoKeyVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetCryptoKeyVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetCryptoKeyVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.FromString,
)
self.GetPublicKey = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/GetPublicKey",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetPublicKeyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.PublicKey.FromString,
)
self.CreateKeyRing = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateKeyRing",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateKeyRingRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.KeyRing.FromString,
)
self.CreateCryptoKey = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateCryptoKey",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateCryptoKeyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.FromString,
)
self.CreateCryptoKeyVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/CreateCryptoKeyVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateCryptoKeyVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.FromString,
)
self.UpdateCryptoKey = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKey",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.FromString,
)
self.UpdateCryptoKeyVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.FromString,
)
self.Encrypt = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/Encrypt",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.EncryptRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.EncryptResponse.FromString,
)
self.Decrypt = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/Decrypt",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DecryptRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DecryptResponse.FromString,
)
self.AsymmetricSign = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/AsymmetricSign",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricSignRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricSignResponse.FromString,
)
self.AsymmetricDecrypt = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/AsymmetricDecrypt",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricDecryptRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricDecryptResponse.FromString,
)
self.UpdateCryptoKeyPrimaryVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/UpdateCryptoKeyPrimaryVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyPrimaryVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.FromString,
)
self.DestroyCryptoKeyVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/DestroyCryptoKeyVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DestroyCryptoKeyVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.FromString,
)
self.RestoreCryptoKeyVersion = channel.unary_unary(
"/google.cloud.kms.v1.KeyManagementService/RestoreCryptoKeyVersion",
request_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.RestoreCryptoKeyVersionRequest.SerializeToString,
response_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.FromString,
)
class KeyManagementServiceServicer(object):
"""Google Cloud Key Management Service
Manages cryptographic keys and operations using those keys. Implements a REST
model with the following objects:
* [KeyRing][google.cloud.kms.v1.KeyRing]
* [CryptoKey][google.cloud.kms.v1.CryptoKey]
* [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]
If you are using manual gRPC libraries, see
[Using gRPC with Cloud KMS](https://cloud.google.com/kms/docs/grpc).
"""
def ListKeyRings(self, request, context):
"""Lists [KeyRings][google.cloud.kms.v1.KeyRing].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListCryptoKeys(self, request, context):
"""Lists [CryptoKeys][google.cloud.kms.v1.CryptoKey].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def ListCryptoKeyVersions(self, request, context):
"""Lists [CryptoKeyVersions][google.cloud.kms.v1.CryptoKeyVersion].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetKeyRing(self, request, context):
"""Returns metadata for a given [KeyRing][google.cloud.kms.v1.KeyRing].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCryptoKey(self, request, context):
"""Returns metadata for a given [CryptoKey][google.cloud.kms.v1.CryptoKey], as well as its
[primary][google.cloud.kms.v1.CryptoKey.primary] [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetCryptoKeyVersion(self, request, context):
"""Returns metadata for a given [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def GetPublicKey(self, request, context):
"""Returns the public key for the given [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]. The
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must be
[ASYMMETRIC_SIGN][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_SIGN] or
[ASYMMETRIC_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ASYMMETRIC_DECRYPT].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateKeyRing(self, request, context):
"""Create a new [KeyRing][google.cloud.kms.v1.KeyRing] in a given Project and Location.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateCryptoKey(self, request, context):
"""Create a new [CryptoKey][google.cloud.kms.v1.CryptoKey] within a [KeyRing][google.cloud.kms.v1.KeyRing].
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] and
[CryptoKey.version_template.algorithm][google.cloud.kms.v1.CryptoKeyVersionTemplate.algorithm]
are required.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def CreateCryptoKeyVersion(self, request, context):
"""Create a new [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in a [CryptoKey][google.cloud.kms.v1.CryptoKey].
The server will assign the next sequential id. If unset,
[state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set to
[ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCryptoKey(self, request, context):
"""Update a [CryptoKey][google.cloud.kms.v1.CryptoKey].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCryptoKeyVersion(self, request, context):
"""Update a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion]'s metadata.
[state][google.cloud.kms.v1.CryptoKeyVersion.state] may be changed between
[ENABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.ENABLED] and
[DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED] using this
method. See [DestroyCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.DestroyCryptoKeyVersion] and [RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion] to
move between other states.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def Encrypt(self, request, context):
"""Encrypts data, so that it can only be recovered by a call to [Decrypt][google.cloud.kms.v1.KeyManagementService.Decrypt].
The [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] must be
[ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def Decrypt(self, request, context):
"""Decrypts data that was protected by [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt]. The [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
must be [ENCRYPT_DECRYPT][google.cloud.kms.v1.CryptoKey.CryptoKeyPurpose.ENCRYPT_DECRYPT].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def AsymmetricSign(self, request, context):
"""Signs data using a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with [CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose]
ASYMMETRIC_SIGN, producing a signature that can be verified with the public
key retrieved from [GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey].
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def AsymmetricDecrypt(self, request, context):
"""Decrypts data that was encrypted with a public key retrieved from
[GetPublicKey][google.cloud.kms.v1.KeyManagementService.GetPublicKey] corresponding to a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] with
[CryptoKey.purpose][google.cloud.kms.v1.CryptoKey.purpose] ASYMMETRIC_DECRYPT.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def UpdateCryptoKeyPrimaryVersion(self, request, context):
"""Update the version of a [CryptoKey][google.cloud.kms.v1.CryptoKey] that will be used in [Encrypt][google.cloud.kms.v1.KeyManagementService.Encrypt].
Returns an error if called on an asymmetric key.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def DestroyCryptoKeyVersion(self, request, context):
"""Schedule a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] for destruction.
Upon calling this method, [CryptoKeyVersion.state][google.cloud.kms.v1.CryptoKeyVersion.state] will be set to
[DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED]
and [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] will be set to a time 24
hours in the future, at which point the [state][google.cloud.kms.v1.CryptoKeyVersion.state]
will be changed to
[DESTROYED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROYED], and the key
material will be irrevocably destroyed.
Before the [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] is reached,
[RestoreCryptoKeyVersion][google.cloud.kms.v1.KeyManagementService.RestoreCryptoKeyVersion] may be called to reverse the process.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def RestoreCryptoKeyVersion(self, request, context):
"""Restore a [CryptoKeyVersion][google.cloud.kms.v1.CryptoKeyVersion] in the
[DESTROY_SCHEDULED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DESTROY_SCHEDULED]
state.
Upon restoration of the CryptoKeyVersion, [state][google.cloud.kms.v1.CryptoKeyVersion.state]
will be set to [DISABLED][google.cloud.kms.v1.CryptoKeyVersion.CryptoKeyVersionState.DISABLED],
and [destroy_time][google.cloud.kms.v1.CryptoKeyVersion.destroy_time] will be cleared.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details("Method not implemented!")
raise NotImplementedError("Method not implemented!")
def add_KeyManagementServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
"ListKeyRings": grpc.unary_unary_rpc_method_handler(
servicer.ListKeyRings,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListKeyRingsRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListKeyRingsResponse.SerializeToString,
),
"ListCryptoKeys": grpc.unary_unary_rpc_method_handler(
servicer.ListCryptoKeys,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeysRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeysResponse.SerializeToString,
),
"ListCryptoKeyVersions": grpc.unary_unary_rpc_method_handler(
servicer.ListCryptoKeyVersions,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeyVersionsRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.ListCryptoKeyVersionsResponse.SerializeToString,
),
"GetKeyRing": grpc.unary_unary_rpc_method_handler(
servicer.GetKeyRing,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetKeyRingRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.KeyRing.SerializeToString,
),
"GetCryptoKey": grpc.unary_unary_rpc_method_handler(
servicer.GetCryptoKey,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetCryptoKeyRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.SerializeToString,
),
"GetCryptoKeyVersion": grpc.unary_unary_rpc_method_handler(
servicer.GetCryptoKeyVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetCryptoKeyVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.SerializeToString,
),
"GetPublicKey": grpc.unary_unary_rpc_method_handler(
servicer.GetPublicKey,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.GetPublicKeyRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.PublicKey.SerializeToString,
),
"CreateKeyRing": grpc.unary_unary_rpc_method_handler(
servicer.CreateKeyRing,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateKeyRingRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.KeyRing.SerializeToString,
),
"CreateCryptoKey": grpc.unary_unary_rpc_method_handler(
servicer.CreateCryptoKey,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateCryptoKeyRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.SerializeToString,
),
"CreateCryptoKeyVersion": grpc.unary_unary_rpc_method_handler(
servicer.CreateCryptoKeyVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.CreateCryptoKeyVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.SerializeToString,
),
"UpdateCryptoKey": grpc.unary_unary_rpc_method_handler(
servicer.UpdateCryptoKey,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.SerializeToString,
),
"UpdateCryptoKeyVersion": grpc.unary_unary_rpc_method_handler(
servicer.UpdateCryptoKeyVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.SerializeToString,
),
"Encrypt": grpc.unary_unary_rpc_method_handler(
servicer.Encrypt,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.EncryptRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.EncryptResponse.SerializeToString,
),
"Decrypt": grpc.unary_unary_rpc_method_handler(
servicer.Decrypt,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DecryptRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DecryptResponse.SerializeToString,
),
"AsymmetricSign": grpc.unary_unary_rpc_method_handler(
servicer.AsymmetricSign,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricSignRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricSignResponse.SerializeToString,
),
"AsymmetricDecrypt": grpc.unary_unary_rpc_method_handler(
servicer.AsymmetricDecrypt,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricDecryptRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.AsymmetricDecryptResponse.SerializeToString,
),
"UpdateCryptoKeyPrimaryVersion": grpc.unary_unary_rpc_method_handler(
servicer.UpdateCryptoKeyPrimaryVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.UpdateCryptoKeyPrimaryVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKey.SerializeToString,
),
"DestroyCryptoKeyVersion": grpc.unary_unary_rpc_method_handler(
servicer.DestroyCryptoKeyVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.DestroyCryptoKeyVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.SerializeToString,
),
"RestoreCryptoKeyVersion": grpc.unary_unary_rpc_method_handler(
servicer.RestoreCryptoKeyVersion,
request_deserializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_service__pb2.RestoreCryptoKeyVersionRequest.FromString,
response_serializer=google_dot_cloud_dot_kms__v1_dot_proto_dot_resources__pb2.CryptoKeyVersion.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
"google.cloud.kms.v1.KeyManagementService", rpc_method_handlers
)
server.add_generic_rpc_handlers((generic_handler,))
| dhermes/gcloud-python | kms/google/cloud/kms_v1/proto/service_pb2_grpc.py | Python | apache-2.0 | 25,508 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from ducktape.mark import parametrize
from ducktape.mark.resource import cluster
from kafkatest.services.console_consumer import ConsoleConsumer
from kafkatest.services.kafka import KafkaService
from kafkatest.services.kafka import config_property
from kafkatest.services.verifiable_producer import VerifiableProducer
from kafkatest.services.zookeeper import ZookeeperService
from kafkatest.tests.produce_consume_validate import ProduceConsumeValidateTest
from kafkatest.utils import is_int
from kafkatest.version import LATEST_0_8_2, LATEST_0_9, LATEST_0_10, LATEST_0_10_0, LATEST_0_10_1, LATEST_0_10_2, LATEST_0_11_0, LATEST_1_0, LATEST_1_1, LATEST_2_0, LATEST_2_1, LATEST_2_2, V_0_9_0_0, DEV_BRANCH, KafkaVersion
class TestUpgrade(ProduceConsumeValidateTest):
def __init__(self, test_context):
super(TestUpgrade, self).__init__(test_context=test_context)
def setUp(self):
self.topic = "test_topic"
self.zk = ZookeeperService(self.test_context, num_nodes=1)
self.zk.start()
# Producer and consumer
self.producer_throughput = 1000
self.num_producers = 1
self.num_consumers = 1
def perform_upgrade(self, from_kafka_version, to_message_format_version=None):
self.logger.info("First pass bounce - rolling upgrade")
for node in self.kafka.nodes:
self.kafka.stop_node(node)
node.version = DEV_BRANCH
node.config[config_property.INTER_BROKER_PROTOCOL_VERSION] = from_kafka_version
node.config[config_property.MESSAGE_FORMAT_VERSION] = from_kafka_version
self.kafka.start_node(node)
self.logger.info("Second pass bounce - remove inter.broker.protocol.version config")
for node in self.kafka.nodes:
self.kafka.stop_node(node)
del node.config[config_property.INTER_BROKER_PROTOCOL_VERSION]
if to_message_format_version is None:
del node.config[config_property.MESSAGE_FORMAT_VERSION]
else:
node.config[config_property.MESSAGE_FORMAT_VERSION] = to_message_format_version
self.kafka.start_node(node)
@cluster(num_nodes=6)
@parametrize(from_kafka_version=str(LATEST_2_2), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_2_2), to_message_format_version=None, compression_types=["zstd"])
@parametrize(from_kafka_version=str(LATEST_2_1), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_2_1), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_2_0), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_2_0), to_message_format_version=None, compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_1_1), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_1_1), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_1_0), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_1_0), to_message_format_version=None, compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_0_11_0), to_message_format_version=None, compression_types=["gzip"])
@parametrize(from_kafka_version=str(LATEST_0_11_0), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_0_10_2), to_message_format_version=str(LATEST_0_9), compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_0_10_2), to_message_format_version=str(LATEST_0_10), compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_0_10_2), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_0_10_2), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_0_10_1), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_0_10_1), to_message_format_version=None, compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_0_10_0), to_message_format_version=None, compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_0_10_0), to_message_format_version=None, compression_types=["lz4"])
@cluster(num_nodes=7)
@parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["none"], security_protocol="SASL_SSL")
@cluster(num_nodes=6)
@parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["snappy"])
@parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=None, compression_types=["lz4"])
@parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9), compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_0_9), to_message_format_version=str(LATEST_0_9), compression_types=["lz4"])
@cluster(num_nodes=7)
@parametrize(from_kafka_version=str(LATEST_0_8_2), to_message_format_version=None, compression_types=["none"])
@parametrize(from_kafka_version=str(LATEST_0_8_2), to_message_format_version=None, compression_types=["snappy"])
def test_upgrade(self, from_kafka_version, to_message_format_version, compression_types,
security_protocol="PLAINTEXT"):
"""Test upgrade of Kafka broker cluster from various versions to the current version
from_kafka_version is a Kafka version to upgrade from
If to_message_format_version is None, it means that we will upgrade to default (latest)
message format version. It is possible to upgrade to 0.10 brokers but still use message
format version 0.9
- Start 3 node broker cluster on version 'from_kafka_version'
- Start producer and consumer in the background
- Perform two-phase rolling upgrade
- First phase: upgrade brokers to 0.10 with inter.broker.protocol.version set to
from_kafka_version and log.message.format.version set to from_kafka_version
- Second phase: remove inter.broker.protocol.version config with rolling bounce; if
to_message_format_version is set to 0.9, set log.message.format.version to
to_message_format_version, otherwise remove log.message.format.version config
- Finally, validate that every message acked by the producer was consumed by the consumer
"""
self.kafka = KafkaService(self.test_context, num_nodes=3, zk=self.zk,
version=KafkaVersion(from_kafka_version),
topics={self.topic: {"partitions": 3, "replication-factor": 3,
'configs': {"min.insync.replicas": 2}}})
self.kafka.security_protocol = security_protocol
self.kafka.interbroker_security_protocol = security_protocol
self.kafka.start()
self.producer = VerifiableProducer(self.test_context, self.num_producers, self.kafka,
self.topic, throughput=self.producer_throughput,
message_validator=is_int,
compression_types=compression_types,
version=KafkaVersion(from_kafka_version))
if from_kafka_version <= LATEST_0_10_0:
assert self.kafka.cluster_id() is None
new_consumer = from_kafka_version >= V_0_9_0_0
# TODO - reduce the timeout
self.consumer = ConsoleConsumer(self.test_context, self.num_consumers, self.kafka,
self.topic, new_consumer=new_consumer, consumer_timeout_ms=30000,
message_validator=is_int, version=KafkaVersion(from_kafka_version))
self.run_produce_consume_validate(core_test_action=lambda: self.perform_upgrade(from_kafka_version,
to_message_format_version))
cluster_id = self.kafka.cluster_id()
assert cluster_id is not None
assert len(cluster_id) == 22
| KevinLiLu/kafka | tests/kafkatest/tests/core/upgrade_test.py | Python | apache-2.0 | 9,230 |
# (c) 2012-2014, Michael DeHaan <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import os
from ansible import constants as C
from ansible.errors import AnsibleParserError, AnsibleUndefinedVariable, AnsibleFileNotFound
from ansible.module_utils.six import string_types
try:
from __main__ import display
except ImportError:
from ansible.utils.display import Display
display = Display()
def load_list_of_blocks(ds, play, parent_block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of mixed task/block data (parsed from YAML),
return a list of Block() objects, where implicit blocks
are created for each bare Task.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
assert isinstance(ds, (list, type(None)))
block_list = []
if ds:
for block_ds in ds:
b = Block.load(
block_ds,
play=play,
parent_block=parent_block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
# Implicit blocks are created by bare tasks listed in a play without
# an explicit block statement. If we have two implicit blocks in a row,
# squash them down to a single block to save processing time later.
if b._implicit and len(block_list) > 0 and block_list[-1]._implicit:
for t in b.block:
if isinstance(t._parent, (TaskInclude, IncludeRole)):
t._parent._parent = block_list[-1]
else:
t._parent = block_list[-1]
block_list[-1].block.extend(b.block)
else:
block_list.append(b)
return block_list
def load_list_of_tasks(ds, play, block=None, role=None, task_include=None, use_handlers=False, variable_manager=None, loader=None):
'''
Given a list of task datastructures (parsed from YAML),
return a list of Task() or TaskInclude() objects.
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.block import Block
from ansible.playbook.handler import Handler
from ansible.playbook.task import Task
from ansible.playbook.task_include import TaskInclude
from ansible.playbook.role_include import IncludeRole
from ansible.playbook.handler_task_include import HandlerTaskInclude
from ansible.template import Templar
assert isinstance(ds, list)
task_list = []
for task_ds in ds:
assert isinstance(task_ds, dict)
if 'block' in task_ds:
t = Block.load(
task_ds,
play=play,
parent_block=block,
role=role,
task_include=task_include,
use_handlers=use_handlers,
variable_manager=variable_manager,
loader=loader,
)
task_list.append(t)
else:
if 'include' in task_ds:
if use_handlers:
include_class = HandlerTaskInclude
else:
include_class = TaskInclude
t = include_class.load(
task_ds,
block=block,
role=role,
task_include=None,
variable_manager=variable_manager,
loader=loader
)
all_vars = variable_manager.get_vars(loader=loader, play=play, task=t)
templar = Templar(loader=loader, variables=all_vars)
# check to see if this include is dynamic or static:
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
if t.static is not None:
is_static = t.static
else:
is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
(not templar._contains_vars(t.args['_raw_params']) and t.all_parents_static() and not t.loop)
if is_static:
if t.loop is not None:
raise AnsibleParserError("You cannot use 'static' on an include with a loop", obj=task_ds)
# we set a flag to indicate this include was static
t.statically_loaded = True
# handle relative includes by walking up the list of parent include
# tasks and checking the relative result to see if it exists
parent_include = block
cumulative_path = None
found = False
subdir = 'tasks'
if use_handlers:
subdir = 'handlers'
while parent_include is not None:
if not isinstance(parent_include, TaskInclude):
parent_include = parent_include._parent
continue
parent_include_dir = templar.template(os.path.dirname(parent_include.args.get('_raw_params')))
if cumulative_path is None:
cumulative_path = parent_include_dir
elif not os.path.isabs(cumulative_path):
cumulative_path = os.path.join(parent_include_dir, cumulative_path)
include_target = templar.template(t.args['_raw_params'])
if t._role:
new_basedir = os.path.join(t._role._role_path, subdir, cumulative_path)
include_file = loader.path_dwim_relative(new_basedir, subdir, include_target)
else:
include_file = loader.path_dwim_relative(loader.get_basedir(), cumulative_path, include_target)
if os.path.exists(include_file):
found = True
break
else:
parent_include = parent_include._parent
if not found:
try:
include_target = templar.template(t.args['_raw_params'])
except AnsibleUndefinedVariable:
raise AnsibleParserError(
"Error when evaluating variable in include name: %s.\n\n" \
"When using static includes, ensure that any variables used in their names are defined in vars/vars_files\n" \
"or extra-vars passed in from the command line. Static includes cannot use variables from inventory\n" \
"sources like group or host vars." % t.args['_raw_params'],
obj=task_ds,
suppress_extended_error=True,
)
if t._role:
include_file = loader.path_dwim_relative(t._role._role_path, subdir, include_target)
else:
include_file = loader.path_dwim(include_target)
try:
data = loader.load_from_file(include_file)
if data is None:
return []
elif not isinstance(data, list):
raise AnsibleParserError("included task files must contain a list of tasks", obj=data)
# since we can't send callbacks here, we display a message directly in
# the same fashion used by the on_include callback. We also do it here,
# because the recursive nature of helper methods means we may be loading
# nested includes, and we want the include order printed correctly
display.vv("statically included: %s" % include_file)
except AnsibleFileNotFound:
if t.static or \
C.DEFAULT_TASK_INCLUDES_STATIC or \
C.DEFAULT_HANDLER_INCLUDES_STATIC and use_handlers:
raise
display.deprecated(
"Included file '%s' not found, however since this include is not " \
"explicitly marked as 'static: yes', we will try and include it dynamically " \
"later. In the future, this will be an error unless 'static: no' is used " \
"on the include task. If you do not want missing includes to be considered " \
"dynamic, use 'static: yes' on the include or set the global ansible.cfg " \
"options to make all inclues static for tasks and/or handlers" % include_file,
)
task_list.append(t)
continue
ti_copy = t.copy(exclude_parent=True)
ti_copy._parent = block
included_blocks = load_list_of_blocks(
data,
play=play,
parent_block=None,
task_include=ti_copy,
role=role,
use_handlers=use_handlers,
loader=loader,
variable_manager=variable_manager,
)
# pop tags out of the include args, if they were specified there, and assign
# them to the include. If the include already had tags specified, we raise an
# error so that users know not to specify them both ways
tags = ti_copy.vars.pop('tags', [])
if isinstance(tags, string_types):
tags = tags.split(',')
if len(tags) > 0:
if len(ti_copy.tags) > 0:
raise AnsibleParserError(
"Include tasks should not specify tags in more than one way (both via args and directly on the task). " \
"Mixing styles in which tags are specified is prohibited for whole import hierarchy, not only for single import statement",
obj=task_ds,
suppress_extended_error=True,
)
display.deprecated("You should not specify tags in the include parameters. All tags should be specified using the task-level option")
else:
tags = ti_copy.tags[:]
# now we extend the tags on each of the included blocks
for b in included_blocks:
b.tags = list(set(b.tags).union(tags))
# END FIXME
# FIXME: handlers shouldn't need this special handling, but do
# right now because they don't iterate blocks correctly
if use_handlers:
for b in included_blocks:
task_list.extend(b.block)
else:
task_list.extend(included_blocks)
else:
task_list.append(t)
elif 'include_role' in task_ds:
ir = IncludeRole.load(
task_ds,
block=block,
role=role,
task_include=None,
variable_manager=variable_manager,
loader=loader
)
# 1. the user has set the 'static' option to false or true
# 2. one of the appropriate config options was set
if ir.static is not None:
is_static = ir.static
else:
display.debug('Determine if include_role is static')
# Check to see if this include is dynamic or static:
all_vars = variable_manager.get_vars(loader=loader, play=play, task=ir)
templar = Templar(loader=loader, variables=all_vars)
needs_templating = False
for param in ir.args:
if templar._contains_vars(ir.args[param]):
if not templar.templatable(ir.args[param]):
needs_templating = True
break
is_static = C.DEFAULT_TASK_INCLUDES_STATIC or \
(use_handlers and C.DEFAULT_HANDLER_INCLUDES_STATIC) or \
(not needs_templating and ir.all_parents_static() and not ir.loop)
display.debug('Determined that if include_role static is %s' % str(is_static))
if is_static:
# uses compiled list from object
t = task_list.extend(ir.get_block_list(variable_manager=variable_manager, loader=loader))
else:
# passes task object itself for latter generation of list
t = task_list.append(ir)
else:
if use_handlers:
t = Handler.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
else:
t = Task.load(task_ds, block=block, role=role, task_include=task_include, variable_manager=variable_manager, loader=loader)
task_list.append(t)
return task_list
def load_list_of_roles(ds, play, current_role_path=None, variable_manager=None, loader=None):
'''
Loads and returns a list of RoleInclude objects from the datastructure
list of role definitions
'''
# we import here to prevent a circular dependency with imports
from ansible.playbook.role.include import RoleInclude
assert isinstance(ds, list)
roles = []
for role_def in ds:
i = RoleInclude.load(role_def, play=play, current_role_path=current_role_path, variable_manager=variable_manager, loader=loader)
roles.append(i)
return roles
| j00bar/ansible | lib/ansible/playbook/helpers.py | Python | gpl-3.0 | 15,612 |
from flask import make_response
from flask import jsonify
from flask import Blueprint
from flask import request
from pymongo import MongoClient
from bson.objectid import ObjectId
from datetime import datetime, time
from twilio.rest import TwilioRestClient
from twilio.twiml import Response
import re
#ACCOUNT_SID = key
#AUTH_TOKEN = key
#ORIGIN = phone num
client = MongoClient()
db = client.menudb
menu_items = db.menu_items
order_history = db.order_history
order = Blueprint('order', __name__, template_folder = 'templates')
def send_text(destination,origin,message):
try:
TwilioClient = TwilioRestClient(ACCOUNT_SID, AUTH_TOKEN)
TwilioClient.messages.create(
to = destination,
from_= origin,
body = message
)
return True
except:
return False
def write_message(name,id_card, movie):
return "Thanks for using WesFlix! "+ name + "we hope you enjoy watching " + movie + "." + "ID CARD NUMBER: " + id_card
@smshook.route('', methods = ['GET'])
def index():
dest = request.form.get('dest')
origin = request.form.get('origin')
name = request.form.get('name')
id_card = request.form.get('id_card')
movie = request.form.get('movie')
msg = write_message(name, id_card, movie)
send_text(dest, origin, msg)
| barca/WesFlix | WesFlix/src/order/order.py | Python | mit | 1,271 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2014-2017 Vincent Noel ([email protected])
#
# This file is part of libSigNetSim.
#
# libSigNetSim is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# libSigNetSim is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with libSigNetSim. If not, see <http://www.gnu.org/licenses/>.
""" test_event.py
This file...
"""
from django.conf import settings
from django.test import TestCase, Client
from signetsim.models import User, Project, SbmlModel
from libsignetsim import SbmlDocument, MathFormula
from os import mkdir
from os.path import dirname, join, isdir
from json import loads
from sympy import simplify
from shutil import rmtree
class TestEvent(TestCase):
fixtures = ["user_with_project.json"]
def testEvents(self):
user = User.objects.filter(username='test_user')[0]
self.assertEqual(len(Project.objects.filter(user=user)), 1)
project = Project.objects.filter(user=user)[0]
self.assertEqual(len(SbmlModel.objects.filter(project=project)), 0)
if isdir(join(settings.MEDIA_ROOT, project.folder)):
rmtree(join(settings.MEDIA_ROOT, project.folder))
mkdir(join(settings.MEDIA_ROOT, project.folder))
c = Client()
self.assertTrue(c.login(username='test_user', password='password'))
response_choose_project = c.get('/project/%s/' % project.folder)
self.assertRedirects(response_choose_project, '/models/', status_code=302, target_status_code=200)
files_folder = join(dirname(__file__), "files")
model_filename = join(files_folder, "modelqlzB7i.xml")
response_load_model = c.post('/models/', {
'action': 'load_model',
'docfile': open(model_filename, 'rb')
})
self.assertEqual(response_load_model.status_code, 200)
self.assertEqual(len(SbmlModel.objects.filter(project=project)), 1)
model = SbmlModel.objects.filter(project=project)[0]
sbml_doc = SbmlDocument()
sbml_doc.readSbmlFromFile(join(settings.MEDIA_ROOT, str(model.sbml_file)))
sbml_model = sbml_doc.getModelInstance()
listOfVariables = []
for variable in sbml_model.listOfVariables:
if ((variable.isParameter() and variable.isGlobal())
or variable.isSpecies()
or variable.isCompartment()
or variable.isStoichiometry()):
listOfVariables.append(variable)
response_choose_project = c.post('/models/', {
'action': 'choose_project',
'project_id': 0
})
self.assertEqual(response_choose_project.status_code, 200)
self.assertEqual(response_choose_project.context['project_name'], "Project")
response_choose_model = c.post('/edit/events/', {
'action': 'choose_model',
'model_id': 0
})
self.assertEqual(response_choose_model.status_code, 200)
self.assertEqual(response_choose_model.context['model_name'], "SOS-Ras-MAPK with n17")
response_save_event = c.post('/edit/events/', {
'action': 'save',
'event_id': '',
'event_sbmlid': 'test_event',
'event_name': "Test event",
'event_trigger': "time==0",
'event_priority': "",
'event_delay': "",
'event_assignment_0_id': listOfVariables.index(sbml_model.listOfVariables.getBySbmlId('ras_gtp')),
'event_assignment_0_expression': 'ras_gtp*2'
})
self.assertEqual(response_save_event.status_code, 200)
self.assertEqual(response_save_event.context['form'].getErrors(), [])
model = SbmlModel.objects.filter(project=project)[0]
sbml_doc = SbmlDocument()
sbml_doc.readSbmlFromFile(join(settings.MEDIA_ROOT, str(model.sbml_file)))
sbml_model = sbml_doc.getModelInstance()
listOfVariables = []
for variable in sbml_model.listOfVariables:
if ((variable.isParameter() and variable.isGlobal())
or variable.isSpecies()
or variable.isCompartment()
or variable.isStoichiometry()):
listOfVariables.append(variable)
response_get_event = c.post('/json/get_event/', {
'event_ind': '0',
})
self.assertEqual(response_get_event.status_code, 200)
json_response = loads(response_get_event.content.decode('utf-8'))
self.assertEqual(json_response[u'event_ind'], 0)
self.assertEqual(json_response[u'event_name'], "Test event")
formula = MathFormula(sbml_model)
formula.setPrettyPrintMathFormula("time==0", rawFormula=True)
formula_response = MathFormula(sbml_model)
formula_response.setPrettyPrintMathFormula(json_response[u'event_trigger'], rawFormula=True)
self.assertEqual(simplify(formula.getDeveloppedInternalMathFormula()-formula_response.getDeveloppedInternalMathFormula()), 0)
self.assertEqual(json_response[u'event_delay'], "")
self.assertEqual(json_response[u'list_of_assignments'][0][0], listOfVariables.index(sbml_model.listOfVariables.getBySbmlId('ras_gtp')))
formula = MathFormula(sbml_model)
formula.setPrettyPrintMathFormula("ras_gtp*2", rawFormula=True)
formula_response = MathFormula(sbml_model)
formula_response.setPrettyPrintMathFormula(json_response[u'list_of_assignments'][0][2], rawFormula=True)
self.assertEqual(simplify(formula.getDeveloppedInternalMathFormula()-formula_response.getDeveloppedInternalMathFormula()), 0)
self.assertTrue(u'event_assignment_variable_1' not in json_response)
# Modifying an event
response_save_event = c.post('/edit/events/', {
'action': 'save',
'event_id': '0',
'event_sbmlid': 'test_event',
'event_name': "Test event",
'event_trigger': "time==100",
'event_priority': "",
'event_delay': "",
'event_assignment_0_id': listOfVariables.index(sbml_model.listOfVariables.getBySbmlId('ras_gtp')),
'event_assignment_0_expression': 'ras_gtp*2',
'event_assignment_1_id': listOfVariables.index(sbml_model.listOfVariables.getBySbmlId('ras_gdp')),
'event_assignment_1_expression': 'ras_gdp/2',
})
self.assertEqual(response_save_event.status_code, 200)
self.assertEqual(response_save_event.context['form'].getErrors(), [])
model = SbmlModel.objects.filter(project=project)[0]
sbml_doc = SbmlDocument()
sbml_doc.readSbmlFromFile(join(settings.MEDIA_ROOT, str(model.sbml_file)))
sbml_model = sbml_doc.getModelInstance()
event = sbml_model.listOfEvents[0]
listOfVariables = []
for variable in sbml_model.listOfVariables:
if ((variable.isParameter() and variable.isGlobal())
or variable.isSpecies()
or variable.isCompartment()
or variable.isStoichiometry()):
listOfVariables.append(variable)
self.assertEqual(event.getNameOrSbmlId(), "Test event")
formula = MathFormula(sbml_model)
formula.setPrettyPrintMathFormula("time==100", rawFormula=True)
self.assertEqual(
simplify(formula.getDeveloppedInternalMathFormula() - event.trigger.getDeveloppedInternalMathFormula()),
0)
self.assertEqual(event.priority, None)
self.assertEqual(event.delay, None)
self.assertEqual(event.listOfEventAssignments[0].getVariable(), sbml_model.listOfVariables.getBySbmlId('ras_gtp'))
formula = MathFormula(sbml_model)
formula.setPrettyPrintMathFormula("ras_gtp*2")
self.assertEqual(
simplify(formula.getDeveloppedInternalMathFormula() - event.listOfEventAssignments[0].getDefinition().getDeveloppedInternalMathFormula()),
0)
self.assertEqual(event.listOfEventAssignments[1].getVariable(), sbml_model.listOfVariables.getBySbmlId('ras_gdp'))
formula = MathFormula(sbml_model)
formula.setPrettyPrintMathFormula("ras_gdp/2")
self.assertEqual(
simplify(formula.getDeveloppedInternalMathFormula() - event.listOfEventAssignments[1].getDefinition().getDeveloppedInternalMathFormula()),
0)
response_delete_event = c.post('/edit/events/', {
'action': 'delete',
'event_id': 0
})
self.assertEqual(response_delete_event.status_code, 200)
self.assertEqual(response_delete_event.context['form'].getErrors(), [])
model = SbmlModel.objects.filter(project=project)[0]
sbml_doc = SbmlDocument()
sbml_doc.readSbmlFromFile(join(settings.MEDIA_ROOT, str(model.sbml_file)))
sbml_model = sbml_doc.getModelInstance()
self.assertEqual(len(sbml_model.listOfEvents), 0)
| msreis/SigNetSim | signetsim/tests/test_event.py | Python | agpl-3.0 | 8,329 |
import glob
import data_Augment as da
norm = glob.glob("/home/neo/work/cnn_down/data/baby_box/*nor*")
down = glob.glob("/home/neo/work/cnn_down/data/baby_box/*down*")
print len(norm)
print len(down)
# train_nor=nor[:int(len(nor)*0.6+1)]
# test_nor=nor[int(len(nor)*0.6+2):int(len(nor)*0.6+2)+int(len(nor)*0.2+1)]
# val_nor=nor[int(len(nor)*0.6+2)+int(len(nor)*0.2+2):]
# down=da.get_data(down)
# translated_d=da.translate(down)
# rotate_d=da.rotate(down)
#da.out_img(translated_d, rotate_d, "b_down")
# norm=da.get_data(norm)
# translated_n=da.translate(norm)
# rotate_n=da.rotate(norm)
#da.out_img(translated_n, rotate_n, "b_nor")
# print len(train_nor) ,train_nor[-1:]
# print len(test_nor), test_nor[0], test_nor[-1:]
# print len(val_nor), val_nor[0]
labels = open("/home/neo/work/cnn_down/data/224_align_col/labels.txt", "a")
for i in norm:
labels.write(i + " 0\n")
for i in down:
labels.write(i + " 1\n")
file.close(labels)
| ishank26/Kutils | img/preprocess/label.py | Python | mit | 947 |
"""Definitions for all core logical instructions."""
import operator as op
from typing import Tuple
from pyshgp.push.type_library import PushTypeLibrary
from pyshgp.push.instruction import SimpleInstruction
def _and(a: bool, b: bool) -> Tuple[bool]:
return a and b,
def _or(a: bool, b: bool) -> Tuple[bool]:
return a or b,
def _not(a: bool) -> Tuple[bool]:
return not a,
def _xor(a: bool, b: bool) -> Tuple[bool]:
return op.xor(a, b),
def _invert_first_then_and(a: bool, b: bool) -> Tuple[bool]:
return (not a) and b,
def _invert_second_then_and(a: bool, b: bool) -> Tuple[bool]:
return a and (not b),
def _bool_from_int(i: int) -> Tuple[bool]:
return bool(i),
def _bool_from_float(f: float) -> Tuple[bool]:
return bool(f),
def instructions(type_library: PushTypeLibrary):
"""Return all core numeric instructions."""
i = []
i.append(SimpleInstruction(
"bool_and",
_and,
input_stacks=["bool", "bool"],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes the result of and-ing the top two booleans."
))
i.append(SimpleInstruction(
"bool_or",
_or,
input_stacks=["bool", "bool"],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes the result of or-ing the top two booleans."
))
i.append(SimpleInstruction(
"bool_not",
_not,
input_stacks=["bool"],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes the inverse of the boolean."
))
i.append(SimpleInstruction(
"bool_xor",
_xor,
input_stacks=["bool", "bool"],
output_stacks=["bool"],
code_blocks=0,
docstring="Pushes the result of xor-ing the top two booleans."
))
i.append(SimpleInstruction(
"bool_invert_first_then_and",
_invert_first_then_and,
input_stacks=["bool", "bool"],
output_stacks=["bool"],
code_blocks=0,
docstring=""""Pushes the result of and-ing the top two booleans after inverting the
top boolean."""
))
i.append(SimpleInstruction(
"bool_second_first_then_and",
_invert_second_then_and,
input_stacks=["bool", "bool"],
output_stacks=["bool"],
code_blocks=0,
docstring=""""Pushes the result of and-ing the top two booleans after inverting the
second boolean."""
))
i.append(SimpleInstruction(
"bool_from_int",
_bool_from_int,
input_stacks=["int"],
output_stacks=["bool"],
code_blocks=0,
docstring="If the top int is 0, pushes False. Pushes True for any other int value."
))
i.append(SimpleInstruction(
"bool_from_float",
_bool_from_float,
input_stacks=["float"],
output_stacks=["bool"],
code_blocks=0,
docstring="If the top float is 0.0, pushes False. Pushes True for any other float value."
))
return i
| erp12/pyshgp | pyshgp/push/instructions/logical.py | Python | mit | 3,014 |
import astropy.units as u
from astropy.coordinates import SkyOffsetFrame, SphericalRepresentation, UnitSphericalRepresentation
__all__ = ['NorthOffsetFrame']
class NorthOffsetFrame(object):
"""
A frame which is relative to some position and another frame. Based on
`astropy.coordinates.SkyOffsetFrame`
Coordinates in a NorthOffsetFrame are both centered on the position
specified by the ``north`` keyword *and* they are oriented in the same
manner as the ``north`` frame.
Unlike `~astropy.coordinates.SkyOffsetFrame` a `NorthOffsetFrame` allows
you to specify the position of the new north pole rather than the new
origin to centre the new frame.
Examples
--------
A common use for this is to create a frame derived from Heliographic, which
has the north pole at some point of interest. In this new frame, lines of
longitude form great circles radially away from the point, and lines of
latitude measure angular distance from the point.
In this example the new frame is shifted so the new north pole is at (20,
20) in the Heliographic Stonyhurst frame. The new grid is overplotted in
blue.
.. plot::
:include-source:
import matplotlib.pyplot as plt
import astropy.units as u
from astropy.coordinates import SkyCoord
from sunpy.coordinates import NorthOffsetFrame
import sunpy.map
from sunpy.data.sample import AIA_171_IMAGE
m = sunpy.map.Map(AIA_171_IMAGE)
north = SkyCoord(20*u.deg, 20*u.deg, frame="heliographic_stonyhurst")
new_frame = NorthOffsetFrame(north=north)
ax = plt.subplot(projection=m)
m.plot()
overlay = ax.get_coords_overlay('heliographic_stonyhurst')
overlay[0].set_ticks(spacing=30. * u.deg, color='white')
overlay.grid(ls='-', color='white')
overlay = ax.get_coords_overlay(new_frame)
overlay[0].set_ticks(spacing=30. * u.deg)
overlay.grid(ls='-', color='blue')
Notes
-----
``NorthOffsetFrame`` is a wrapper around the
`~astropy.coordinates.SkyOffsetFrame` factory class. This class will
calculate the desired coordianates of the ``origin`` from the ``north``
keyword argument and then create a `~astropy.coordinates.SkyOffsetFrame`.
Using this frame is equivalent to using
`~astropy.coordinates.SkyOffsetFrame` with ``lat = lat - 90*u.deg`` for a
position of the north pole in the original northern hemisphere.
This class will only work for Heliographic-Stonyhurst and Heliographic
Carrington frames, and not helioprojective. If you want to rotate a
helioprojective frame, it would be more natural to use the
`~astropy.coordinates.SkyOffsetFrame`.
"""
def __new__(cls, *args, **kwargs):
origin_frame = kwargs.pop('north', None)
if origin_frame is None:
raise TypeError("Can't initialize an NorthOffsetFrame without origin= keyword.")
if hasattr(origin_frame, 'frame'):
origin_frame = origin_frame.frame
if not isinstance(origin_frame.data, SphericalRepresentation):
rep = origin_frame.represent_as(SphericalRepresentation)
else:
rep = origin_frame.data
lon = rep.lon
lat = rep.lat
if lat > 0*u.deg:
lat = lat - 90*u.deg
rotation = None
else:
lon = lon - 180*u.deg
lat = -90*u.deg - lat
rotation = 180*u.deg
if isinstance(origin_frame.data, UnitSphericalRepresentation):
new_rep = origin_frame.representation_type(lon=lon,
lat=lat)
else:
new_rep = origin_frame.representation_type(lon=lon,
lat=lat,
distance=rep.distance)
new_origin = origin_frame.realize_frame(new_rep)
kwargs['origin'] = new_origin
kwargs['rotation'] = rotation
return SkyOffsetFrame(*args, **kwargs)
| dpshelio/sunpy | sunpy/coordinates/offset_frame.py | Python | bsd-2-clause | 4,113 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import, unicode_literals
from django.conf import settings
from django.conf.urls import include, url
from django.conf.urls.static import static
from django.contrib import admin
from django.views import defaults as default_views
from django.views.generic import TemplateView
urlpatterns = [
url(r'^$', TemplateView.as_view(
template_name='landing.html'), name='landing'),
url(settings.ADMIN_URL, include(admin.site.urls)),
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
# Test pages normally not reachable when DEBUG = True
if settings.DEBUG:
urlpatterns += [
url(r'^400/$', default_views.bad_request, {'exception':
Exception('Bad request')}),
url(r'^403/$', default_views.permission_denied, {'exception':
Exception('Permission denied')}),
url(r'^404/$', default_views.page_not_found, {'exception':
Exception('Page not found')}),
url(r'^500/$', default_views.server_error),
]
| rdmurphy/django-template | config/urls.py | Python | mit | 1,047 |
def entry_to_offset(n):
""" Convert an entry number to an actual bytes offset. """
return n * (4*7)
| isra17/nrs | nrs/__init__.py | Python | gpl-3.0 | 109 |
from util import *
update_addonxml('video game') | bbr25/plugin.program.iarlnes | resources/lib/update_addonxml_provides_video.py | Python | gpl-2.0 | 49 |
# -*- coding: utf-8 -*-
from django.shortcuts import render, redirect
from django.views.generic import View
from django.http import HttpResponse
from reportlab.pdfgen import canvas
from reportlab.lib.pagesizes import letter
from reportlab.lib.styles import getSampleStyleSheet
from reportlab.lib.units import inch
from reportlab.platypus import (Flowable, Paragraph,
SimpleDocTemplate, Spacer)
from .models import TablaSolicitud
from .models import Bitacora
from .models import TablaAlumno
# Create your views here.
class ConsultarDocumento(View):
template_name = "consultarDocumento.html"
def get(self, request):
return render(
request,
self.template_name,
)
class VerDocumento(View):
template_name = "verDocumento.html"
model = TablaAlumno
model2 = TablaSolicitud
def get(self, request, folio):
self.request.session['errorConsulta'] = None
print(folio)
context = dict()
try:
alumn=self.model.objects.get(codigo = folio)
except:
self.request.session['errorConsulta'] = "Es incorrecto el código insertado"
return redirect('consultar')
context['solicitudes'] = self.model2.objects.filter(alumno_id=alumn.id)
return render(
request,
self.template_name,
context
)
class VerPdf(View):
template_name = "verPdf.html"
model = TablaSolicitud
model2 = TablaAlumno
def get(self, request, id, solicitudId):
response = HttpResponse(content_type='application/pdf')
response['Content-Disposition'] = 'attachment; filename="Documento.pdf"'
p = canvas.Canvas(response)
alumno = self.model2.objects.get(codigo=id)
bitacora = self.model.objects.get(id = solicitudId)
# x,y
p.setFont("Helvetica", 16)
p.drawCentredString(260,800,"INSTITUTO POLITECNICO NACIONAL")
p.drawCentredString(260,770,"ESCUELA SUPERIOR DE COMPUTO")
p.drawCentredString(280,740,"SUBDIRECCION DE SERVIVIOS EDUCATIVOS E INTEGRACION SOCIAL")
p.line(120,700,580,700)
p.setFont("Helvetica", 12)
p.drawCentredString(260,715,"DEPARTAMENTO DE GESTION ESCOLAR")
p.drawCentredString(260,700,str(bitacora.documento))
p.drawCentredString(100,695,"A QUIEN CORRESPONDA:")
p.drawCentredString(100,670,"HACE CONSTAR QUE EL ALUMNO")
p.drawCentredString(260,650,str(bitacora.alumno))
p.drawCentredString(100,630,"CON NUMERO DE BOLETA")
p.drawCentredString(230,630,str(bitacora.alumno.boleta))
p.drawCentredString(380,630,"ESTA INSCRITO EN ESTE PLANTEL");
p.drawCentredString(200, 600, str(bitacora.fecha))
p.drawCentredString(200, 610, str(bitacora.estado))
p.drawCentredString(200, 620, str(bitacora.folio))
p.showPage()
p.save()
return response
| CallmeTorre/Idalia | ESCOM/ConsultarDocumento/views.py | Python | apache-2.0 | 2,955 |
from numpy import *
from subprocess import Popen, PIPE
from meshUtils import *
def aftMesh(xy, d=None):
'''
xy should be an n by 2 array describing the boundary:
1. The boundary is a union of loops.
2. In each loop, the first node and the last node must be identical.
This fact is used to distinguish different loops.
3. When moving to the next node within one loop,
the fluid domain must be located on the right.
4. Loops can overlap in any way, in this case the shared node(s)
must be presented in each loop.
Returns v, t, b
v: nv by 2 float array, vertices
t: nt by 3 int array, triangles
b: nt by 2 int array, boundaries
'''
xy = array(xy, float)
assert xy.shape[1] == 2
#assert (xy[-1] == xy[0]).all() # that's right, comparing floating poinst
center, diameter = centerDiameter(xy)
if d is not None: diameter = d
# far field
s = linspace(0, 2*pi, 17)
s[-1] = 0
far = transpose([sin(s), cos(s)]) * diameter * 10 + center
xy = vstack([xy, far])
#
proc = Popen('./aftMesh', stdin=PIPE, stdout=PIPE)
proc.stdin.write('{0}\n'.format(xy.shape[0]))
for x, y in xy:
proc.stdin.write('{0} {1}\n'.format(x, y))
return readOutput(proc.stdout)
def mbaMesh(v, t, b, nE, metric):
'''
Both input and return are in the form of v, t, b
v: nv by 2 float array, vertices
t: nt by 3 int array, triangles
b: nt by 2 int array, boundaries
Additional input:
nE: desired number of elements
metric: nv by 3 float array controlling density
[metric[0] metric[2]]
[metric[2] metric[1]]
'''
t, b = t + 1, b + 1
#
proc = Popen('./mbaMesh', stdin=PIPE, stdout=PIPE)
proc.stdin.write('{0} {1} {2} {3}\n'.format(nE, \
v.shape[0], t.shape[0], b.shape[0]))
for x, y in v:
proc.stdin.write('{0} {1}\n'.format(x, y))
for i1, i2, i3 in t:
proc.stdin.write('{0} {1} {2}\n'.format(i1, i2, i3))
for i1, i2 in b:
proc.stdin.write('{0} {1}\n'.format(i1, i2))
for mxx, myy, mxy in metric:
proc.stdin.write('{0} {1} {2}\n'.format(mxx, myy, mxy))
PASSCODE = '=== Output Data Starts Here 09887654321 ==='
lines = [proc.stdout.readline()]
while lines[-1] != '' and PASSCODE not in lines[-1]:
lines.append(proc.stdout.readline())
if lines[-1] != '':
return readOutput(proc.stdout)
else:
print ''.join(lines)
def readOutput(f):
'''
Used in aftMesh and mbaMesh, reads FORTRAN output of mesh
'''
nv, nt, nb = array(f.readline().strip().split(), int)
lines = f.readlines()
v = array([l.strip().split() for l in lines[:nv]], float)
t = array([l.strip().split() for l in lines[nv:nv+nt]], int)
b = array([l.strip().split() for l in lines[nv+nt:nv+nt+nb]], int)
assert v.shape[1] == 2 and t.shape[1] == 3 and b.shape[1] == 2
# shift for python indexing
return v, t-1, b-1
| gomezstevena/x-wind | src/meshAni2D.py | Python | gpl-3.0 | 2,998 |
#!/usr/bin/env python
import copy, errno, sys, stat, re
from bup import options, git, metadata, vfs
from bup.helpers import *
from bup._helpers import write_sparsely
optspec = """
bup restore [-C outdir] </branch/revision/path/to/dir ...>
--
C,outdir= change to given outdir before extracting files
numeric-ids restore numeric IDs (user, group, etc.) rather than names
exclude-rx= skip paths matching the unanchored regex (may be repeated)
exclude-rx-from= skip --exclude-rx patterns in file (may be repeated)
sparse create sparse files
v,verbose increase log output (can be used more than once)
map-user= given OLD=NEW, restore OLD user as NEW user
map-group= given OLD=NEW, restore OLD group as NEW group
map-uid= given OLD=NEW, restore OLD uid as NEW uid
map-gid= given OLD=NEW, restore OLD gid as NEW gid
q,quiet don't show progress meter
"""
total_restored = 0
# stdout should be flushed after each line, even when not connected to a tty
sys.stdout.flush()
sys.stdout = os.fdopen(sys.stdout.fileno(), 'w', 1)
def verbose1(s):
if opt.verbose >= 1:
print s
def verbose2(s):
if opt.verbose >= 2:
print s
def plog(s):
if opt.quiet:
return
qprogress(s)
def valid_restore_path(path):
path = os.path.normpath(path)
if path.startswith('/'):
path = path[1:]
if '/' in path:
return True
def print_info(n, fullname):
if stat.S_ISDIR(n.mode):
verbose1('%s/' % fullname)
elif stat.S_ISLNK(n.mode):
verbose2('%s@ -> %s' % (fullname, n.readlink()))
else:
verbose2(fullname)
def create_path(n, fullname, meta):
if meta:
meta.create_path(fullname)
else:
# These fallbacks are important -- meta could be null if, for
# example, save created a "fake" item, i.e. a new strip/graft
# path element, etc. You can find cases like that by
# searching for "Metadata()".
unlink(fullname)
if stat.S_ISDIR(n.mode):
mkdirp(fullname)
elif stat.S_ISLNK(n.mode):
os.symlink(n.readlink(), fullname)
def parse_owner_mappings(type, options, fatal):
"""Traverse the options and parse all --map-TYPEs, or call Option.fatal()."""
opt_name = '--map-' + type
value_rx = r'^([^=]+)=([^=]*)$'
if type in ('uid', 'gid'):
value_rx = r'^(-?[0-9]+)=(-?[0-9]+)$'
owner_map = {}
for flag in options:
(option, parameter) = flag
if option != opt_name:
continue
match = re.match(value_rx, parameter)
if not match:
raise fatal("couldn't parse %s as %s mapping" % (parameter, type))
old_id, new_id = match.groups()
if type in ('uid', 'gid'):
old_id = int(old_id)
new_id = int(new_id)
owner_map[old_id] = new_id
return owner_map
def apply_metadata(meta, name, restore_numeric_ids, owner_map):
m = copy.deepcopy(meta)
m.user = owner_map['user'].get(m.user, m.user)
m.group = owner_map['group'].get(m.group, m.group)
m.uid = owner_map['uid'].get(m.uid, m.uid)
m.gid = owner_map['gid'].get(m.gid, m.gid)
m.apply_to_path(name, restore_numeric_ids = restore_numeric_ids)
# Track a list of (restore_path, vfs_path, meta) triples for each path
# we've written for a given hardlink_target. This allows us to handle
# the case where we restore a set of hardlinks out of order (with
# respect to the original save call(s)) -- i.e. when we don't restore
# the hardlink_target path first. This data also allows us to attempt
# to handle other situations like hardlink sets that change on disk
# during a save, or between index and save.
targets_written = {}
def hardlink_compatible(target_path, target_vfs_path, target_meta,
src_node, src_meta):
global top
if not os.path.exists(target_path):
return False
target_node = top.lresolve(target_vfs_path)
if src_node.mode != target_node.mode \
or src_node.mtime != target_node.mtime \
or src_node.ctime != target_node.ctime \
or src_node.hash != target_node.hash:
return False
if not src_meta.same_file(target_meta):
return False
return True
def hardlink_if_possible(fullname, node, meta):
"""Find a suitable hardlink target, link to it, and return true,
otherwise return false."""
# Expect the caller to handle restoring the metadata if
# hardlinking isn't possible.
global targets_written
target = meta.hardlink_target
target_versions = targets_written.get(target)
if target_versions:
# Check every path in the set that we've written so far for a match.
for (target_path, target_vfs_path, target_meta) in target_versions:
if hardlink_compatible(target_path, target_vfs_path, target_meta,
node, meta):
try:
os.link(target_path, fullname)
return True
except OSError, e:
if e.errno != errno.EXDEV:
raise
else:
target_versions = []
targets_written[target] = target_versions
full_vfs_path = node.fullname()
target_versions.append((fullname, full_vfs_path, meta))
return False
def write_file_content(fullname, n):
outf = open(fullname, 'wb')
try:
for b in chunkyreader(n.open()):
outf.write(b)
finally:
outf.close()
def write_file_content_sparsely(fullname, n):
outfd = os.open(fullname, os.O_WRONLY | os.O_CREAT | os.O_TRUNC, 0600)
try:
trailing_zeros = 0;
for b in chunkyreader(n.open()):
trailing_zeros = write_sparsely(outfd, b, 512, trailing_zeros)
pos = os.lseek(outfd, trailing_zeros, os.SEEK_END)
os.ftruncate(outfd, pos)
finally:
os.close(outfd)
def find_dir_item_metadata_by_name(dir, name):
"""Find metadata in dir (a node) for an item with the given name,
or for the directory itself if the name is ''."""
meta_stream = None
try:
mfile = dir.metadata_file() # VFS file -- cannot close().
if mfile:
meta_stream = mfile.open()
# First entry is for the dir itself.
meta = metadata.Metadata.read(meta_stream)
if name == '':
return meta
for sub in dir:
if stat.S_ISDIR(sub.mode):
meta = find_dir_item_metadata_by_name(sub, '')
else:
meta = metadata.Metadata.read(meta_stream)
if sub.name == name:
return meta
finally:
if meta_stream:
meta_stream.close()
def do_root(n, sparse, owner_map, restore_root_meta = True):
# Very similar to do_node(), except that this function doesn't
# create a path for n's destination directory (and so ignores
# n.fullname). It assumes the destination is '.', and restores
# n's metadata and content there.
global total_restored, opt
meta_stream = None
try:
# Directory metadata is the first entry in any .bupm file in
# the directory. Get it.
mfile = n.metadata_file() # VFS file -- cannot close().
root_meta = None
if mfile:
meta_stream = mfile.open()
root_meta = metadata.Metadata.read(meta_stream)
print_info(n, '.')
total_restored += 1
plog('Restoring: %d\r' % total_restored)
for sub in n:
m = None
# Don't get metadata if this is a dir -- handled in sub do_node().
if meta_stream and not stat.S_ISDIR(sub.mode):
m = metadata.Metadata.read(meta_stream)
do_node(n, sub, sparse, owner_map, meta = m)
if root_meta and restore_root_meta:
apply_metadata(root_meta, '.', opt.numeric_ids, owner_map)
finally:
if meta_stream:
meta_stream.close()
def do_node(top, n, sparse, owner_map, meta = None):
# Create n.fullname(), relative to the current directory, and
# restore all of its metadata, when available. The meta argument
# will be None for dirs, or when there is no .bupm (i.e. no
# metadata).
global total_restored, opt
meta_stream = None
write_content = sparse and write_file_content_sparsely or write_file_content
try:
fullname = n.fullname(stop_at=top)
# Match behavior of index --exclude-rx with respect to paths.
exclude_candidate = '/' + fullname
if(stat.S_ISDIR(n.mode)):
exclude_candidate += '/'
if should_rx_exclude_path(exclude_candidate, exclude_rxs):
return
# If this is a directory, its metadata is the first entry in
# any .bupm file inside the directory. Get it.
if(stat.S_ISDIR(n.mode)):
mfile = n.metadata_file() # VFS file -- cannot close().
if mfile:
meta_stream = mfile.open()
meta = metadata.Metadata.read(meta_stream)
print_info(n, fullname)
created_hardlink = False
if meta and meta.hardlink_target:
created_hardlink = hardlink_if_possible(fullname, n, meta)
if not created_hardlink:
create_path(n, fullname, meta)
if meta:
if stat.S_ISREG(meta.mode):
write_content(fullname, n)
elif stat.S_ISREG(n.mode):
write_content(fullname, n)
total_restored += 1
plog('Restoring: %d\r' % total_restored)
for sub in n:
m = None
# Don't get metadata if this is a dir -- handled in sub do_node().
if meta_stream and not stat.S_ISDIR(sub.mode):
m = metadata.Metadata.read(meta_stream)
do_node(top, sub, sparse, owner_map, meta = m)
if meta and not created_hardlink:
apply_metadata(meta, fullname, opt.numeric_ids, owner_map)
finally:
if meta_stream:
meta_stream.close()
n.release()
handle_ctrl_c()
o = options.Options(optspec)
(opt, flags, extra) = o.parse(sys.argv[1:])
git.check_repo_or_die()
top = vfs.RefList(None)
if not extra:
o.fatal('must specify at least one filename to restore')
exclude_rxs = parse_rx_excludes(flags, o.fatal)
owner_map = {}
for map_type in ('user', 'group', 'uid', 'gid'):
owner_map[map_type] = parse_owner_mappings(map_type, flags, o.fatal)
if opt.outdir:
mkdirp(opt.outdir)
os.chdir(opt.outdir)
ret = 0
for d in extra:
if not valid_restore_path(d):
add_error("ERROR: path %r doesn't include a branch and revision" % d)
continue
path,name = os.path.split(d)
try:
n = top.lresolve(d)
except vfs.NodeError, e:
add_error(e)
continue
isdir = stat.S_ISDIR(n.mode)
if not name or name == '.':
# Source is /foo/what/ever/ or /foo/what/ever/. -- extract
# what/ever/* to the current directory, and if name == '.'
# (i.e. /foo/what/ever/.), then also restore what/ever's
# metadata to the current directory.
if not isdir:
add_error('%r: not a directory' % d)
else:
do_root(n, opt.sparse, owner_map, restore_root_meta = (name == '.'))
else:
# Source is /foo/what/ever -- extract ./ever to cwd.
if isinstance(n, vfs.FakeSymlink):
# Source is actually /foo/what, i.e. a top-level commit
# like /foo/latest, which is a symlink to ../.commit/SHA.
# So dereference it, and restore ../.commit/SHA/. to
# "./what/.".
target = n.dereference()
mkdirp(n.name)
os.chdir(n.name)
do_root(target, opt.sparse, owner_map)
else: # Not a directory or fake symlink.
meta = find_dir_item_metadata_by_name(n.parent, n.name)
do_node(n.parent, n, opt.sparse, owner_map, meta = meta)
if not opt.quiet:
progress('Restoring: %d, done.\n' % total_restored)
if saved_errors:
log('WARNING: %d errors encountered while restoring.\n' % len(saved_errors))
sys.exit(1)
| jbaber/bup | cmd/restore-cmd.py | Python | lgpl-2.1 | 12,215 |
#!/usr/bin/env python3
import sys
import csv
import calcoohija
import calcplus
if __name__ == "__main__":
with open(sys.argv[1]) as fichero:
contenido = csv.reader(fichero)
for linea in contenido:
calcplus.operacion(linea)
| rpaunero/ptavi-p2 | calcplusplus.py | Python | gpl-2.0 | 260 |
# Copyright 2016 F5 Networks Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import pytest
from f5.bigip.mixins import UnsupportedMethod
@pytest.mark.skipif(pytest.config.getoption('--release') != '12.0.0',
reason='Needs v12 TMOS to pass')
class TestBigIPFailoverState(object):
def test_load(self, request, bigip):
a = bigip.shared.bigip_failover_state.load()
assert hasattr(a, 'generation')
def test_update(self, request, bigip):
with pytest.raises(UnsupportedMethod):
bigip.shared.bigip_failover_state.update()
| wojtek0806/f5-common-python | test/functional/shared/test_bigip_failover_state.py | Python | apache-2.0 | 1,084 |
from rx.scheduledobserver import ScheduledObserver
class ObserveOnObserver(ScheduledObserver):
def __init__(self, scheduler, observer):
super(ObserveOnObserver, self).__init__(scheduler, observer)
def next(self, value):
super(ObserveOnObserver, self).next(value);
self.ensure_active()
def error(self, e):
super(ObserveOnObserver, self).error(e)
self.ensure_active()
def completed(self):
super(ObserveOnObserver, self).completed()
self.ensure_active()
| Reactive-Extensions/RxPy | rx/observeonobserver.py | Python | apache-2.0 | 539 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.5 on 2016-04-19 22:40
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
initial = True
dependencies = []
operations = [
migrations.CreateModel(
name='FaqCategory',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('name', models.CharField(max_length=255,
verbose_name='Name')),
],
),
migrations.CreateModel(
name='Question',
fields=[
('id', models.AutoField(
auto_created=True,
primary_key=True,
serialize=False,
verbose_name='ID')),
('text', models.CharField(
max_length=255, verbose_name='Frage')),
('answer', models.TextField(verbose_name='Antwort')),
('category', models.ForeignKey(
on_delete=django.db.models.deletion.CASCADE,
to='faq.FaqCategory')),
],
),
]
| d120/pyofahrt | faq/migrations/0001_initial.py | Python | agpl-3.0 | 1,361 |
#!/usr/bin/env python2
# find-kam-node.py: python2 example of loading kam, resolving kam node, and
# printing out BEL terms
#
# usage: find-kam-node.py <kam name> <source_bel_term>
from random import choice
from suds import *
from ws import *
import time
def load_kam(client, kam_name):
'''
Loads a KAM by name. This function will sleep until the KAM's
loadStatus is 'COMPLETE'.
'''
def call():
'''
Load the KAM and return result. Exit with error if 'loadStatus'
is FAILED.
'''
kam = client.create('Kam')
kam.name = kam_name
result = client.service.LoadKam(kam)
status = result['loadStatus']
if status == 'FAILED':
print 'FAILED!'
print sys.exc_info()[1]
exit_failure()
return result
# load kam and wait for completion
result = call()
while result['loadStatus'] != 'COMPLETE':
time.sleep(0.5)
result = call()
return result['handle']
if __name__ == '__main__':
from sys import argv, exit, stderr
if len(argv) != 3:
msg = 'usage: find-kam-node.py <kam name> <source_bel_term>\n'
stderr.write(msg)
exit(1)
# unpack command-line arguments; except the first script name argument
(kam_name, source_term) = argv[1:]
client = WS('http://localhost:8080/openbel-ws/belframework.wsdl')
handle = load_kam(client, kam_name)
print "loaded kam '%s', handle '%s'" % (kam_name, handle.handle)
# create nodes using BEL term labels from command-line
node = client.create("Node")
node.label = source_term
# resolve node
result = client.service.ResolveNodes(handle, [node], None)
if len(result) == 1 and result[0]:
the_node = result[0]
print "found node, id: %s" % (the_node.id)
terms = client.service.GetSupportingTerms(the_node, None)
for t in terms:
print t
else:
print "edge not found"
exit_success()
| OpenBEL/openbel-framework-examples | web-api/python/find-kam-node.py | Python | apache-2.0 | 2,020 |
# Copyright (c) 2010-2012 OpenStack Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from swift.common.swob import Request, Response
from swift.common.middleware import gatekeeper
class FakeApp(object):
def __init__(self, headers=None):
if headers is None:
headers = {}
self.headers = headers
self.req = None
def __call__(self, env, start_response):
self.req = Request(env)
return Response(request=self.req, body='FAKE APP',
headers=self.headers)(env, start_response)
class FakeMiddleware(object):
def __init__(self, app, conf, header_list=None):
self.app = app
self.conf = conf
self.header_list = header_list
def __call__(self, env, start_response):
def fake_resp(status, response_headers, exc_info=None):
for i in self.header_list:
response_headers.append(i)
return start_response(status, response_headers, exc_info)
return self.app(env, fake_resp)
class TestGatekeeper(unittest.TestCase):
methods = ['PUT', 'POST', 'GET', 'DELETE', 'HEAD', 'COPY', 'OPTIONS']
allowed_headers = {'xx-account-sysmeta-foo': 'value',
'xx-container-sysmeta-foo': 'value',
'xx-object-sysmeta-foo': 'value',
'x-account-meta-foo': 'value',
'x-container-meta-foo': 'value',
'x-object-meta-foo': 'value',
'x-timestamp-foo': 'value'}
sysmeta_headers = {'x-account-sysmeta-': 'value',
'x-container-sysmeta-': 'value',
'x-object-sysmeta-': 'value',
'x-account-sysmeta-foo': 'value',
'x-container-sysmeta-foo': 'value',
'x-object-sysmeta-foo': 'value',
'X-Account-Sysmeta-BAR': 'value',
'X-Container-Sysmeta-BAR': 'value',
'X-Object-Sysmeta-BAR': 'value'}
x_backend_headers = {'X-Backend-Replication': 'true',
'X-Backend-Replication-Headers': 'stuff'}
object_transient_sysmeta_headers = {
'x-object-transient-sysmeta-': 'value',
'x-object-transient-sysmeta-foo': 'value'}
x_timestamp_headers = {'X-Timestamp': '1455952805.719739'}
forbidden_headers_out = dict(sysmeta_headers)
forbidden_headers_out.update(x_backend_headers)
forbidden_headers_out.update(object_transient_sysmeta_headers)
forbidden_headers_in = dict(forbidden_headers_out)
shunted_headers_in = dict(x_timestamp_headers)
def _assertHeadersEqual(self, expected, actual):
for key in expected:
self.assertIn(key.lower(), actual)
def _assertHeadersAbsent(self, unexpected, actual):
for key in unexpected:
self.assertNotIn(key.lower(), actual)
def get_app(self, app, global_conf, **local_conf):
factory = gatekeeper.filter_factory(global_conf, **local_conf)
return factory(app)
def test_ok_header(self):
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': 'PUT'},
headers=self.allowed_headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertEqual(resp.body, b'FAKE APP')
self._assertHeadersEqual(self.allowed_headers, fake_app.req.headers)
def _test_reserved_header_removed_inbound(self, method):
headers = dict(self.forbidden_headers_in)
headers.update(self.allowed_headers)
headers.update(self.shunted_headers_in)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
# shunt_inbound_x_timestamp should be enabled by default
expected_headers.update({'X-Backend-Inbound-' + k: v
for k, v in self.shunted_headers_in.items()})
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
unexpected_headers = dict(self.forbidden_headers_in)
unexpected_headers.update(self.shunted_headers_in)
self._assertHeadersAbsent(unexpected_headers, fake_app.req.headers)
def test_reserved_header_removed_inbound(self):
for method in self.methods:
self._test_reserved_header_removed_inbound(method)
def _test_reserved_header_shunted_inbound(self, method):
headers = dict(self.shunted_headers_in)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {}, shunt_inbound_x_timestamp='true')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
expected_headers.update({'X-Backend-Inbound-' + k: v
for k, v in self.shunted_headers_in.items()})
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
self._assertHeadersAbsent(self.shunted_headers_in,
fake_app.req.headers)
def test_reserved_header_shunted_inbound(self):
for method in self.methods:
self._test_reserved_header_shunted_inbound(method)
def _test_reserved_header_shunt_bypassed_inbound(self, method):
headers = dict(self.shunted_headers_in)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method},
headers=headers)
fake_app = FakeApp()
app = self.get_app(fake_app, {}, shunt_inbound_x_timestamp='false')
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
expected_headers = dict(self.allowed_headers)
expected_headers.update(self.shunted_headers_in)
self._assertHeadersEqual(expected_headers, fake_app.req.headers)
def test_reserved_header_shunt_bypassed_inbound(self):
for method in self.methods:
self._test_reserved_header_shunt_bypassed_inbound(method)
def _test_reserved_header_removed_outbound(self, method):
headers = dict(self.forbidden_headers_out)
headers.update(self.allowed_headers)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method})
fake_app = FakeApp(headers=headers)
app = self.get_app(fake_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self._assertHeadersEqual(self.allowed_headers, resp.headers)
self._assertHeadersAbsent(self.forbidden_headers_out, resp.headers)
def test_reserved_header_removed_outbound(self):
for method in self.methods:
self._test_reserved_header_removed_outbound(method)
def _test_duplicate_headers_not_removed(self, method, app_hdrs):
def fake_factory(global_conf, **local_conf):
conf = global_conf.copy()
conf.update(local_conf)
headers = [('X-Header', 'xxx'),
('X-Header', 'yyy')]
def fake_filter(app):
return FakeMiddleware(app, conf, headers)
return fake_filter
def fake_start_response(status, response_headers, exc_info=None):
hdr_list = []
for k, v in response_headers:
if k == 'X-Header':
hdr_list.append(v)
self.assertTrue('xxx' in hdr_list)
self.assertTrue('yyy' in hdr_list)
self.assertEqual(len(hdr_list), 2)
req = Request.blank('/v/a/c', environ={'REQUEST_METHOD': method})
fake_app = FakeApp(headers=app_hdrs)
factory = gatekeeper.filter_factory({})
factory_wrap = fake_factory({})
app = factory(factory_wrap(fake_app))
app(req.environ, fake_start_response)
def test_duplicate_headers_not_removed(self):
for method in self.methods:
for app_hdrs in ({}, self.forbidden_headers_out):
self._test_duplicate_headers_not_removed(method, app_hdrs)
def _test_location_header(self, location_path):
headers = {'Location': location_path}
req = Request.blank(
'/v/a/c', environ={'REQUEST_METHOD': 'GET',
'swift.leave_relative_location': True})
class SelfishApp(FakeApp):
def __call__(self, env, start_response):
self.req = Request(env)
resp = Response(request=self.req, body='FAKE APP',
headers=self.headers)
# like webob, middlewares in the pipeline may rewrite
# location header from relative to absolute
resp.location = resp.absolute_location()
return resp(env, start_response)
selfish_app = SelfishApp(headers=headers)
app = self.get_app(selfish_app, {})
resp = req.get_response(app)
self.assertEqual('200 OK', resp.status)
self.assertIn('Location', resp.headers)
self.assertEqual(resp.headers['Location'], location_path)
def test_location_header_fixed(self):
self._test_location_header('/v/a/c/o2')
self._test_location_header('/v/a/c/o2?query=path&query2=doit')
self._test_location_header('/v/a/c/o2?query=path#test')
self._test_location_header('/v/a/c/o2;whatisparam?query=path#test')
if __name__ == '__main__':
unittest.main()
| matthewoliver/swift | test/unit/common/middleware/test_gatekeeper.py | Python | apache-2.0 | 10,406 |
"""Uses the same strategy as
``adjacency_list.py``, but associates each DOM row with its owning
document row, so that a full document of DOM nodes can be loaded
using O(1) queries - the construction of the "hierarchy" is performed
after the load in a non-recursive fashion and is more
efficient.
"""
##################### PART I - Imports/Configuration #########################
from sqlalchemy import (MetaData, Table, Column, Integer, String, ForeignKey,
Unicode, and_, create_engine)
from sqlalchemy.orm import mapper, relationship, Session, lazyload
import sys, os, io, re
from xml.etree import ElementTree
e = create_engine('sqlite://', echo=True)
meta = MetaData()
####################### PART II - Table Metadata #############################
# stores a top level record of an XML document.
documents = Table('documents', meta,
Column('document_id', Integer, primary_key=True),
Column('filename', String(30), unique=True),
)
# stores XML nodes in an adjacency list model. This corresponds to
# Element and SubElement objects.
elements = Table('elements', meta,
Column('element_id', Integer, primary_key=True),
Column('parent_id', Integer, ForeignKey('elements.element_id')),
Column('document_id', Integer, ForeignKey('documents.document_id')),
Column('tag', Unicode(30), nullable=False),
Column('text', Unicode),
Column('tail', Unicode)
)
# stores attributes. This corresponds to the dictionary of attributes
# stored by an Element or SubElement.
attributes = Table('attributes', meta,
Column('element_id', Integer, ForeignKey('elements.element_id'), primary_key=True),
Column('name', Unicode(100), nullable=False, primary_key=True),
Column('value', Unicode(255)))
meta.create_all(e)
########################### PART III - Model #################################
# our document class. contains a string name,
# and the ElementTree root element.
class Document(object):
def __init__(self, name, element):
self.filename = name
self.element = element
def __str__(self):
buf = io.StringIO()
self.element.write(buf)
return buf.getvalue()
########################## PART IV - Persistence Mapping #####################
# Node class. a non-public class which will represent
# the DB-persisted Element/SubElement object. We cannot create mappers for
# ElementTree elements directly because they are at the very least not new-style
# classes, and also may be backed by native implementations.
# so here we construct an adapter.
class _Node(object):
pass
# Attribute class. also internal, this will represent the key/value attributes stored for
# a particular Node.
class _Attribute(object):
def __init__(self, name, value):
self.name = name
self.value = value
# setup mappers. Document will eagerly load a list of _Node objects.
# they will be ordered in primary key/insert order, so that we can reconstruct
# an ElementTree structure from the list.
mapper(Document, documents, properties={
'_nodes':relationship(_Node, lazy='joined', cascade="all, delete-orphan")
})
# the _Node objects change the way they load so that a list of _Nodes will organize
# themselves hierarchically using the ElementTreeMarshal. this depends on the ordering of
# nodes being hierarchical as well; relationship() always applies at least ROWID/primary key
# ordering to rows which will suffice.
mapper(_Node, elements, properties={
'children':relationship(_Node, lazy=None), # doesnt load; used only for the save relationship
'attributes':relationship(_Attribute, lazy='joined', cascade="all, delete-orphan"), # eagerly load attributes
})
mapper(_Attribute, attributes)
# define marshalling functions that convert from _Node/_Attribute to/from ElementTree objects.
# this will set the ElementTree element as "document._element", and append the root _Node
# object to the "_nodes" mapped collection.
class ElementTreeMarshal(object):
def __get__(self, document, owner):
if document is None:
return self
if hasattr(document, '_element'):
return document._element
nodes = {}
root = None
for node in document._nodes:
if node.parent_id is not None:
parent = nodes[node.parent_id]
elem = ElementTree.SubElement(parent, node.tag)
nodes[node.element_id] = elem
else:
parent = None
elem = root = ElementTree.Element(node.tag)
nodes[node.element_id] = root
for attr in node.attributes:
elem.attrib[attr.name] = attr.value
elem.text = node.text
elem.tail = node.tail
document._element = ElementTree.ElementTree(root)
return document._element
def __set__(self, document, element):
def traverse(node):
n = _Node()
n.tag = str(node.tag)
n.text = str(node.text)
n.tail = str(node.tail)
document._nodes.append(n)
n.children = [traverse(n2) for n2 in node]
n.attributes = [_Attribute(str(k), str(v)) for k, v in node.attrib.items()]
return n
traverse(element.getroot())
document._element = element
def __delete__(self, document):
del document._element
document._nodes = []
# override Document's "element" attribute with the marshaller.
Document.element = ElementTreeMarshal()
###################### PART V - Basic Persistence Example ####################
line = "\n--------------------------------------------------------"
# save to DB
session = Session(e)
# get ElementTree documents
for file in ('test.xml', 'test2.xml', 'test3.xml'):
filename = os.path.join(os.path.dirname(__file__), file)
doc = ElementTree.parse(filename)
session.add(Document(file, doc))
print("\nSaving three documents...", line)
session.commit()
print("Done.")
print("\nFull text of document 'text.xml':", line)
document = session.query(Document).filter_by(filename="test.xml").first()
print(document)
######################## PART VI - Searching for Paths #######################
# manually search for a document which contains "/somefile/header/field1:hi"
print("\nManual search for /somefile/header/field1=='hi':", line)
d = session.query(Document).join('_nodes', aliased=True).\
filter(and_(_Node.parent_id==None, _Node.tag=='somefile')).\
join('children', aliased=True, from_joinpoint=True).\
filter(_Node.tag=='header').\
join('children', aliased=True, from_joinpoint=True).\
filter(and_(_Node.tag=='field1', _Node.text=='hi')).\
one()
print(d)
# generalize the above approach into an extremely impoverished xpath function:
def find_document(path, compareto):
j = documents
prev_elements = None
query = session.query(Document)
first = True
for i, match in enumerate(re.finditer(r'/([\w_]+)(?:\[@([\w_]+)(?:=(.*))?\])?', path)):
(token, attrname, attrvalue) = match.group(1, 2, 3)
if first:
query = query.join('_nodes', aliased=True).filter(_Node.parent_id==None)
first = False
else:
query = query.join('children', aliased=True, from_joinpoint=True)
query = query.filter(_Node.tag==token)
if attrname:
query = query.join('attributes', aliased=True, from_joinpoint=True)
if attrvalue:
query = query.filter(and_(_Attribute.name==attrname, _Attribute.value==attrvalue))
else:
query = query.filter(_Attribute.name==attrname)
return query.options(lazyload('_nodes')).filter(_Node.text==compareto).all()
for path, compareto in (
('/somefile/header/field1', 'hi'),
('/somefile/field1', 'hi'),
('/somefile/header/field2', 'there'),
('/somefile/header/field2[@attr=foo]', 'there')
):
print("\nDocuments containing '%s=%s':" % (path, compareto), line)
print([d.filename for d in find_document(path, compareto)])
| wfxiang08/sqlalchemy | examples/elementtree/optimized_al.py | Python | mit | 8,157 |
# global imports
import numpy as np
import scipy.special as scsp
import scipy.optimize as scop
# local imports
import helper as bhlp
"""
ref Helias14:
Helias, Tetzlaff, Diesmann (2014) The Correlation Structure of
Local Neuronal Networks Intrinsically Results from Recurrent Dynamics
PLoS Comput Biol 10(1): e1003428
DOI: 10.1371/journal.pcbi.1003428
"""
class BinaryMeanfield(object):
"""
this module allows one to calculate the stationary firing rate and
average correlations in a network of binary neurons with one
excitatory and one inhibitory population from connectivity
statistics
"""
def __init__(self, epsilon, N, gamma, g, w, b, K=None):
"""
epsilon: connectivity
N: total number of neurons
gamma: relative size of the excitatory population
g: relative weight of inhibitory connections
w: weight of excitatory connections
b: biases (2d vector), corresponds to -1. * threshold
K: indegree, can be provided as alternative to connectivity
"""
if epsilon is not None:
assert(K is None), 'Please provide connectivity OR indegree.'
elif epsilon is None:
assert(K is not None), 'Please provide connectivity OR indegree.'
self.NE = int(gamma * N)
self.NI = int(N - self.NE)
if epsilon is not None:
KE = int(epsilon * self.NE)
KI = int(epsilon * self.NI)
else:
KE = int(gamma * K)
KI = int(K - KE)
self.K = np.array([[KE, KI],
[KE, KI]])
self.J = np.array([[w, -g * w],
[w, -g * w]])
self.b = np.array(b)
self.C = np.array([[0., 0.],
[0., 0.]])
self.mu = np.array([0., 0.])
def get_mu_meanfield(self, mu0, C=None):
"""
Self-consistent rate
Formula (7) in Helias14
mu0: average rates
C: average correlations
"""
if C is None:
C = np.array([[0., 0.],
[0., 0.]])
def f(mu):
h_mu = self.get_mu_input(mu)
h_sigma = self.get_sigma_input(mu, C)
return mu - 0.5 * scsp.erfc((-self.b - h_mu) / (np.sqrt(2.) * h_sigma))
return scop.fsolve(f, mu0)
def get_mu_input(self, mu):
"""
Mean input given presynaptic activity mu
Formula (4) in Helias14
mu: average rates
"""
mu = np.array(mu)
if np.shape(mu) != (2,):
raise ValueError(
'Mean activity needs to be given for both populations.')
return np.dot(self.K * self.J, mu)
def get_sigma_input(self, mu, C=None):
"""
Standard deviation of input given presynaptic activity mu
(and correlations C)
For C=None: formula (6) in Helias14
For C given: formula (13) in Helias14
mu: averages rates
C: average correlations
"""
mu = np.array(mu)
if np.shape(mu) != (2,):
raise ValueError(
'Mean activity needs to be given for both populations.')
if C is None:
C = np.array([[0., 0.],
[0., 0.]])
else:
C = np.array(C)
if np.shape(C) != (2, 2):
raise ValueError(
'Correlation needs to be given for all combinations of both populations.')
a = bhlp.get_sigma2(mu)
sigma_shared = np.dot(self.K * self.J * self.J, a)
sigma_corr = np.diag(
np.dot(np.dot(self.K * self.J, C), (self.K * self.J).T))
return np.sqrt(sigma_shared + sigma_corr)
def get_suszeptibility(self, mu, sigma):
"""
Suszeptibility (i.e., derivative of Gain function) for Gaussian
input distribution
Formula (8) in Helias14
mu: mean of input
sigma: std of input
"""
return 1. / (np.sqrt(2. * np.pi) * sigma) * np.exp(-1. * (mu + self.b) ** 2 / (2. * sigma ** 2))
def get_w_meanfield(self, mu, C=None):
"""
Linearized population averaged weights
Formula (10) in Helias14
mu: average rates
"""
h_mu = self.get_mu_input(mu)
h_sigma = self.get_sigma_input(mu, C)
return ((self.K * self.J).T * self.get_suszeptibility(h_mu, h_sigma)).T
def get_c_meanfield(self, mu, C=None):
"""
Self-consistent correlations
Formula (24) without external input in Helias14
mu: average rates
"""
a = bhlp.get_sigma2(mu)
A = np.zeros(2)
A[0] = a[0] * 1. / self.NE if self.NE > 0 else 0.
A[1] = a[1] * 1. / self.NI if self.NI > 0 else 0.
W = self.get_w_meanfield(mu, C)
M = np.array([[2. - 2. * W[0, 0], -2. * W[0, 1], 0.],
[-1. * W[1, 0], 2. - (W[0, 0] + W[1, 1]), -1. * W[0, 1]],
[0, -2. * W[1, 0], 2. - 2. * W[1, 1]]])
B = np.array([[2. * W[0, 0], 0],
[W[1, 0], W[0, 1]],
[0, 2. * W[1, 1]]])
rhs = np.dot(B, A)
c = np.linalg.solve(M, rhs)
C = np.array([[c[0], c[1]],
[c[1], c[2]]])
return C
def get_m_c_iter(self, mu0):
"""Calculate mean activity and mean correlations in a recurrent
network iteratively, using the improved meanfield approach from
Helias14
mu0: initial guess for average rates
"""
if np.shape(mu0) != (2,):
raise ValueError('Initial guess for mean activity needs to be given for both populations.')
Dmu = 1e10
Dc = 1e10
mu = mu0
C = self.C
while Dmu > 1e-15 and Dc > 1e-15:
mu_old = np.sum(mu)
c_old = np.sum(C)
mu = self.get_mu_meanfield(mu, C)
C = self.get_c_meanfield(mu, C)
Dmu = abs(np.sum(mu) - mu_old)
Dc = abs(np.sum(C) - c_old)
self.mu = mu
self.C = C
return mu, C
def get_m(self, mu0):
"""Calculate mean activity in a recurrent
network using meanfield approach
mu0: initial guess for average rates
"""
if np.shape(mu0) != (2,):
raise ValueError('Initial guess for mean activity needs to be given for both populations.')
mu = mu0
mu = self.get_mu_meanfield(mu)
return mu
| jakobj/binary_network | meanfield.py | Python | bsd-2-clause | 6,477 |
__author__ = 'Nicholas C Pandolfi'
import os
import sys
from .lrbuilder import cachebuild
from .lrtools import ClosedError
class copen(object):
def __init__(self, file, overwrite = False):
object.__init__(self)
self.current = 1
self.filename = file
self.lines, self.__cache = cachebuild(file, overwrite)
def __del__(self):
del self.__cache
def __getattr__(self, attr):
if attr == '_copen__cache':
raise ClosedError('Operations cannot be done on a closed file')
else:
raise AttributeError("'{}' object has no attribute '{}'".format('copen', attr))
def __len__(self):
return os.path.getsize(self.filename) + int(self.lines)
def __bool__(self):
return bool(len(self))
def __contains__(self, item):
return item in self.__cache
def __iter__(self):
lines = self.lines
cache = self.getcache()
for count in cache:
yield count
def __sizeof__(self):
memory = 0
for item in dir(self):
memory += sys.getsizeof(eval('self.{}'.format(item)))
return memory
def readline(self):
line = self.__cache[self.current - 1]
self.current += 1
return line
def seekline(self, line):
self.current = line
def readlines(self):
return self.__cache
def getline(self, number):
return self.__cache[number - 1]
def getlines(self, start, stop):
return self.__cache[start - 1 : stop]
def readnext(self, number):
selection = self.__cache[self.current - 1 : self.current + number - 1]
self.current += number
return selection
def wrapcache(self):
return lambda number: self.__cache[number - 1]
def getcache(self):
return self.__cache
def storecache(self):
global cache
cache[self.filename] = self.__cache
def close(self):
del self.__cache
| nickpandolfi/linereader | previous/c_reader.py | Python | mit | 2,050 |
import rabbitpy
with rabbitpy.Connection() as connection:
with connection.channel() as channel:
exchange = rabbitpy.Exchange(channel, 'chapter4-example')
exchange.declare()
channel.enable_publisher_confirms()
message = rabbitpy.Message(channel,
'This is an important message',
{'content_type': 'text/plain',
'message_type': 'very important'})
if message.publish('chapter4-example', 'important.message'):
print('The message was confirmed')
| gmr/RabbitMQ-in-Depth | Examples/4.1.3 Publisher Confirms.py | Python | bsd-3-clause | 603 |
import random
import pyjokes
from sopel import module
@module.commands('pyjoke')
@module.example('.pyjoke')
def pyjoke(bot, trigger):
"""Prints a random joke from the pyjoke repository"""
j1 = pyjokes.get_jokes(category='neutral')
j2 = pyjokes.get_jokes(category='adult')
bot.say(random.choice(j1 + j2))
| situx/tuxbot | pyjoke.py | Python | gpl-3.0 | 332 |
# setup.py
# This script will build the main subpackages
# See LICENSE for details
from distutils.util import get_platform
from numpy.distutils.misc_util import Configuration, get_info
from numpy.distutils.core import setup
from os.path import join
import sys
TTFORT_DIR = 'tt-fort'
TTFORT_SRC = [
'nan.f90',
'default.f90',
'timef.f90',
'say.f90',
'rnd.f90',
'ptype.f90',
'sort.f90',
'trans.f90',
'ort.f90',
'mat.f90',
'check.f90',
'lr.f90',
'maxvol.f90',
'svd.f90',
'matrix_util.f90',
'tt.f90',
'ttaux.f90',
'ttop.f90',
'ttio.f90',
'tts.f90',
'python_conv.f90',
'tt_linalg.f90',
'ttlocsolve.f90',
'ttnodeop.f90',
'ttamen.f90',
]
PRINT_DIR = 'tt-fort/print'
PRINT_SRC = [
'putstrmodule.F90',
'dispmodule.f90',
]
def configuration(parent_package='', top_path=None):
plat_specifier = ".%s-%s" % (get_platform(), sys.version[0:3])
inc_dir = ['build/temp%s' % plat_specifier]
config = Configuration('tt', parent_package, top_path)
config.add_include_dirs(inc_dir)
config.set_options(
ignore_setup_xxx_py=True,
assume_default_configuration=True,
delegate_options_to_subpackages=True,
quiet=False,
)
config.add_library('print_lib', sources=[join(PRINT_DIR, x) for x in PRINT_SRC])
config.add_library('mytt', sources=[join(TTFORT_DIR, x) for x in TTFORT_SRC])
config.add_subpackage('core')
config.add_subpackage('amen')
config.add_subpackage('ksl')
config.add_subpackage('eigb')
config.add_subpackage('maxvol')
config.add_subpackage('cross')
config.add_subpackage('optimize')
config.add_subpackage('utils')
config.add_subpackage('riemannian')
return config
if __name__ == '__main__':
print('This is the wrong setup.py to run')
| uranix/ttpy | tt/setup.py | Python | mit | 1,856 |
# Generated by Django 2.2.9 on 2020-04-03 05:01
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('siteconfig', '0001_initial'),
]
operations = [
migrations.RenameField(
model_name='siteconfig',
old_name='hs_active_semester',
new_name='active_semester',
),
migrations.AlterField(
model_name='siteconfig',
name='approve_oldest_first',
field=models.BooleanField(default=False, help_text='Check this if you want to have the quest that have been waiting the longed to appear on top of the list.', verbose_name='Sort quests awaiting approval with oldest on top'),
),
migrations.AlterField(
model_name='siteconfig',
name='color_headers_by_mark',
field=models.BooleanField(default=False, help_text='Set up at least one Mark Range in admin for this to do anything.', verbose_name='Activate Header Colors by Mark'),
),
]
| timberline-secondary/hackerspace | src/siteconfig/migrations/0002_auto_20200402_2201.py | Python | gpl-3.0 | 1,045 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.