code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# -*- coding: utf-8 -*-
'''
Use the "reclass" database as a Pillar source
.. |reclass| replace:: **reclass**
This ``ext_pillar`` plugin provides access to the |reclass| database, such
that Pillar data for a specific minion are fetched using |reclass|.
You can find more information about |reclass| at
http://reclass.pantsfullofunix.net.
To use the plugin, add it to the ``ext_pillar`` list in the Salt master config
and tell |reclass| by way of a few options how and where to find the
inventory:
.. code-block:: yaml
ext_pillar:
- reclass:
storage_type: yaml_fs
inventory_base_uri: /srv/salt
This would cause |reclass| to read the inventory from YAML files in
``/srv/salt/nodes`` and ``/srv/salt/classes``.
If you are also using |reclass| as ``master_tops`` plugin, and you want to
avoid having to specify the same information for both, use YAML anchors (take
note of the differing data types for ``ext_pillar`` and ``master_tops``):
.. code-block:: yaml
reclass: &reclass
storage_type: yaml_fs
inventory_base_uri: /srv/salt
reclass_source_path: ~/code/reclass
ext_pillar:
- reclass: *reclass
master_tops:
reclass: *reclass
If you want to run reclass from source, rather than installing it, you can
either let the master know via the ``PYTHONPATH`` environment variable, or by
setting the configuration option, like in the example above.
'''
# This file cannot be called reclass.py, because then the module import would
# not work. Thanks to the __virtual__ function, however, the plugin still
# responds to the name 'reclass'.
from salt.exceptions import SaltInvocationError
from salt.utils.reclass import (
prepend_reclass_source_path,
filter_out_source_path_option,
set_inventory_base_uri_default
)
# Define the module's virtual name
__virtualname__ = 'reclass'
def __virtual__(retry=False):
try:
import reclass
return __virtualname__
except ImportError as e:
if retry:
return False
for pillar in __opts__.get('ext_pillar', []):
if 'reclass' not in pillar.keys():
continue
# each pillar entry is a single-key hash of name -> options
opts = pillar.values()[0]
prepend_reclass_source_path(opts)
break
return __virtual__(retry=True)
def ext_pillar(minion_id, pillar, **kwargs):
'''
Obtain the Pillar data from **reclass** for the given ``minion_id``.
'''
# If reclass is installed, __virtual__ put it onto the search path, so we
# don't need to protect against ImportError:
from reclass.adapters.salt import ext_pillar as reclass_ext_pillar
from reclass.errors import ReclassException
try:
# the source path we used above isn't something reclass needs to care
# about, so filter it:
filter_out_source_path_option(kwargs)
# if no inventory_base_uri was specified, initialize it to the first
# file_roots of class 'base' (if that exists):
set_inventory_base_uri_default(__opts__, kwargs)
# I purposely do not pass any of __opts__ or __salt__ or __grains__
# to reclass, as I consider those to be Salt-internal and reclass
# should not make any assumptions about it.
return reclass_ext_pillar(minion_id, pillar, **kwargs)
except TypeError as e:
if 'unexpected keyword argument' in e.message:
arg = e.message.split()[-1]
raise SaltInvocationError('ext_pillar.reclass: unexpected option: '
+ arg)
else:
raise
except KeyError as e:
if 'id' in e.message:
raise SaltInvocationError('ext_pillar.reclass: __opts__ does not '
'define minion ID')
else:
raise
except ReclassException as e:
raise SaltInvocationError('ext_pillar.reclass: {0}'.format(e.message))
| MadeiraCloud/salt | sources/salt/pillar/reclass_adapter.py | Python | apache-2.0 | 4,015 |
#!/usr/bin/python
# -*- Mode: Python; coding: utf-8; indent-tabs-mode: nil; tab-width: 4 -*-
### BEGIN LICENSE
# Copyright (C) 2013 <Kyle Francis> <[email protected]>
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU General Public License version 3, as published
# by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program. If not, see <http://www.gnu.org/licenses/>.
### END LICENSE
import sys
import os.path
import unittest
sys.path.insert(0, os.path.realpath(os.path.join(os.path.dirname(__file__), "..")))
from metau import AboutMetauDialog
class TestExample(unittest.TestCase):
def setUp(self):
self.AboutMetauDialog_members = [
'AboutDialog', 'AboutMetauDialog', 'gettext', 'logger', 'logging']
def test_AboutMetauDialog_members(self):
all_members = dir(AboutMetauDialog)
public_members = [x for x in all_members if not x.startswith('_')]
public_members.sort()
self.assertEqual(self.AboutMetauDialog_members, public_members)
if __name__ == '__main__':
unittest.main()
| guitarmanusa/metau | tests/test_example.py | Python | gpl-3.0 | 1,452 |
"""
Validate spreadsheet data is same as db data
"""
from __future__ import division
from __future__ import print_function
# import datetime
# import re
# import textwrap
# import numpy as np
# import pandas as pd
# import webbrowser
# import bs4
# import requests
# import email
# import smtplib
# from email.mime.text import MIMEText
import argparse
import pickle
import time
from selenium import webdriver
from datetime import datetime
# from PollyReports import *
# from reportlab.pdfgen.canvas import Canvas
import home_lib as hlib
import home_sendmail as hmail
# --------------------------------------------------------------------
#
# Global / Constants
#
# --------------------------------------------------------------------
# --- process
# --------------------------------------------------------------------
#
# setup load details
#
# --------------------------------------------------------------------
def generate_report(p_all_info):
""" Generate html report """
""" hmmmm, will do the html stuff later. Bigger fish to fry now!"""
rep_format = "{:15s} {:15s} {:45s} {}\r\n"
output_data = rep_format.format('Rego', 'Date', 'Descripion', 'Days to go')
output_data += rep_format.format('-'*15, '-'*15, '-'*45, '-'*45)
for row in p_all_info:
curr_rego = ''
curr_date = ''
curr_desc = ''
for key, val in row.items():
print('key = {}, val ={}'.format(key, val))
if key == 'Description':
curr_desc = val
if key == 'Registration number':
curr_rego = val
if key == 'Expiry':
curr_date = val
actual_date = datetime.strptime(curr_date,'%d/%m/%Y')
today = datetime.now()
date_diff = actual_date - today
output_data += rep_format.format(curr_rego, curr_date, curr_desc, date_diff)
hlib.p_i(output_data)
return output_data
# rpt = Report
# --------------------------------------------------------------------
#
# process
#
# --------------------------------------------------------------------
def process(p_rego_plates):
"""
run a process
"""
print('Start process')
all_vehicles = []
for vehicle in p_rego_plates:
dot_info = fetch_plate_info(vehicle)
all_vehicles.append(dot_info)
return all_vehicles
# Send the message via our own SMTP server.
# s = smtplib.SMTP('smtp.gmail.com')
# s.send_message(msg)
# s.quit()
# --------------------------------------------------------------------
#
# Fetch plate info
#
# --------------------------------------------------------------------
def fetch_plate_info(p_vehicle):
"""
run a process
"""
veh_plate = p_vehicle[0]
veh_email = p_vehicle[1]
veh_desc = p_vehicle[2]
browser = webdriver.Firefox()
browser.get('https://www.service.transport.qld.gov.au/checkrego/application/TermAndConditions.xhtml?windowId=3ab')
conf_button = browser.find_element_by_name('tAndCForm:confirmButton')
conf_button.click()
time.sleep(1)
reg_field = browser.find_element_by_id('vehicleSearchForm:plateNumber')
reg_field.send_keys(veh_plate)
time.sleep(0.5)
conf_button = browser.find_element_by_name('vehicleSearchForm:confirmButton')
conf_button.click()
time.sleep(1)
dot_info = {}
dot_info['orig_rego'] = veh_plate
dot_info['email'] = veh_email
dot_info['veh_desc'] = veh_desc
for row in browser.find_elements_by_css_selector("dl.data"):
cell_names = row.find_elements_by_tag_name('dt')
cell_data = row.find_elements_by_tag_name('dd')
cell_counter = 0
for c in cell_names:
dot_info[c.text] = cell_data[cell_counter].text
cell_counter += 1
browser.quit()
return dot_info
# --- save data
# --------------------------------------------------------------------
#
# save data
#
# --------------------------------------------------------------------
def save_data(p_all_info):
"""
save all data to a csv file, and pickle file
"""
pickle_file = '{}/{}'.format(hlib.SAVE_DIR, 'dept_of_transport.pickle')
with open(pickle_file, 'w') as pfile:
pickle.dump(p_all_info, pfile)
# --- Program Init
# --------------------------------------------------------------------
#
# initialise
#
# --------------------------------------------------------------------
def initialise(p_filename=None):
"""
Necessary initialisations for command line arguments
"""
# Logfile for logging
log_filename = hlib.log_filename_init(p_filename)
if log_filename is None:
print("\nError: Failed to initialise Log File Name. aborting\n")
return hlib.FAIL_GENERIC
parser = argparse.ArgumentParser(description="""
Example command lines:
Run on local machine:
-d DEBUG -t table -f file.xlsx --target_conn localhost
Run on Terminal Server:
-d DEBUG -t table -f file.xlsx --target_db instance.user@host:db (this may change)
-d DEBUG -t table -f file.csv --target_db instance.user@host:db (this may change)
--target_db localhost
--short_code "unit agency restriction"
""", formatter_class=argparse.RawTextHelpFormatter)
# Add debug arguments
parser.add_argument('-d', '--debug',
help='Log messages verbosity: NONE (least), DEBUG (most)',
choices=('NONE', 'CRITICAL', 'ERROR', 'WARNING', 'INFO', 'DEBUG'),
default="INFO",
required=False)
# Sort though the arguments, ensure mandatory are populated
args = hlib.args_validate(parser, log_filename)
return (args, log_filename)
# --------------------------------------------------------------------
#
# main
#
# --------------------------------------------------------------------
def main():
"""
This program tests that data in CSV file matches data in a table.
"""
args, dummy_l_log_filename_s = initialise()
# -- Initialise
if not hlib.init_app(args):
print('Failed to init app, aborting')
return hlib.FAIL_GENERIC
# send_email(email_config)
# -------------------------------------
# Fetch program arguments
# -------------------------------------
# Fetch config
rego_plates = hlib.fetch_rego_plates()
# p_debug_type = args['debug_type']
all_info = process(rego_plates)
# if not save_data(all_info):
# return(hlib.FAIL_GENERIC)
text_report = generate_report(all_info)
rep_file = '{}/{}'.format(hlib.SAVE_DIR, 'dept_of_transport_report.txt')
file = open(rep_file, 'w')
file.write(text_report)
file.close()
# if not save_report(all_info):
# return(hlib.FAIL_GENERIC)
hmail.send_email('[email protected]',
p_subject='Weekly Rego Report',
p_inline=rep_file)
# hmail.send_email('[email protected]',
# p_subject='Weekly Rego Report',
# p_inline=rep_file)
retval = hlib.SUCCESS
print('Done...')
return retval
# ------------------------------------------------------------------------------
if __name__ == "__main__":
l_retval = main()
if l_retval == hlib.SUCCESS:
exit
else:
exit(l_retval)
# --- eof ---
| proetman/checkit | dept_transport/validate_dept_transport.py | Python | gpl-2.0 | 7,554 |
import argparse
import os
import sys
from signal import signal, SIGPIPE, SIG_DFL
from Bio import SeqIO
import Bio.motifs as motifs
sys.path.append(os.path.join(os.path.dirname(__file__), ".."))
import lib
class SeqSearchResults(object):
def __init__(self, seq_name, sequence, tf_names, tf_lengths):
self.seq_name = seq_name
self.sequence = sequence
self.tfs = tf_names
self.tf_length_dict = {tf: length for tf, length in zip(tf_names, tf_lengths)}
self.tf_dict = {tf: [] for tf in tf_names}
def fill_matches(self, matches):
for i, tf in enumerate(self.tfs):
self.tf_dict[tf] = matches[i]
def best_match(self, tf_name):
tf_len = self.tf_length_dict[tf_name]
matches = self.tf_dict[tf_name]
half_seq_len = len(self.sequence) / 2
best_match = (0, sys.float_info.min)
for match in matches:
if match[1] > best_match[1]:
best_match = match
elif match[1] == best_match[1]:
match_pos = match[0] if match[0] >= 0 else match[0] + tf_len
best_match_pos = best_match[0] if best_match[0] >= 0 else best_match[0] + tf_len
match_dist_to_center = abs(half_seq_len - (match_pos + tf_len / 2))
best_match_dist_to_center = abs(half_seq_len - (best_match_pos + tf_len / 2))
if match_dist_to_center < best_match_dist_to_center:
best_match = match
return best_match
def match_subseq(self, pos, tf_name, delta):
tf_len = self.tf_length_dict[tf_name]
if pos >= 0:
left = max(pos - delta, 0)
right = min(pos + tf_len + delta, len(self.sequence))
subseq = self.sequence[left: right]
else:
pos += len(self.sequence)
rc_seq = lib.complement(self.sequence)
left = max(pos - delta, 0)
right = min(pos + tf_len + delta, len(rc_seq))
subseq = rc_seq[left: right]
subseq = subseq[::-1]
return subseq
class ReadFastaAction(argparse.Action):
def __call__(self, parser, args, fasta_handler, option_string=None):
seqs = list(SeqIO.parse(fasta_handler, "fasta"))
fasta_handler.close()
setattr(args, self.dest, seqs)
class ReadPWMAction(argparse.Action):
def __call__(self, parser, args, pwm_handler, option_string=None):
pwm_records = motifs.parse(pwm_handler, "TRANSFAC")
pwm_handler.close()
setattr(args, self.dest, pwm_records)
class UpperCaseAction(argparse.Action):
def __call__(self, parser, args, tf_names, option_string=None):
tf_names = [val.upper() for val in tf_names]
setattr(args, self.dest, tf_names)
def create_parser():
parser = argparse.ArgumentParser(description="Matching position frequency matrices (PFM) against DNA sequences")
parser.add_argument("fasta", type=argparse.FileType('r'),
action=ReadFastaAction,
help="fasta file with DNA sequences")
parser.add_argument("pwm", type=argparse.FileType('r'),
action=ReadPWMAction,
help="file with position weight matrices (PWM)")
parser.add_argument("-o", "--output", nargs='?', dest="output",
type=argparse.FileType('w'), default=sys.stdout,
help="output file with matching results. "
"Default stdout.")
parser.add_argument("-tf", "--factor", nargs='+', dest="tf", type=str,
action=UpperCaseAction,
help="transcription factor name in pwm file. "
"Default matching with all tf in pwm file.")
parser.add_argument("-th", "--threshold", dest="threshold", type=float, default=0.7,
help="The parameter threshold split for better control on what parts of the scoring are used. "
"Default 0.7.")
parser.add_argument("-rc", "--reverse-complement", dest="reverse_complement", action="store_true", default=False,
help="Scans against reverse complement sequence in addition to "
"the input sequence. Hits on reverse complement are reported "
"at position [position - sequence_length] "
"in complement of input sequence, which is always "
"negative. The actual hit site for any hit is always "
"seq[pos, pos + matrix_length]. "
"Default False.")
parser.add_argument("-e", "--excel", dest="excel", action="store_true", default=False,
help="For saving results in easy paste to excel format. "
"Default human readable format.")
return parser
def process(args):
if args.tf is None:
args.tf = lib.get_pwm_id_names(args.pwm)
pwms = lib.filter_pwms_in_tfs(args.pwm, args.tf)
matrices = lib.create_matrices_from_pwms(pwms, args.tf)
tf_lengths = [len(m[0]) for m in matrices]
results = []
for seq in args.fasta:
sequence = str(seq.seq)
matches = lib.search_motif(sequence, matrices, args.threshold, args.reverse_complement)
seq_result = SeqSearchResults(seq.description, sequence, args.tf, tf_lengths)
seq_result.fill_matches(matches)
results.append(seq_result)
return results
def save_excel(result, args):
for seq_result in result:
seq_length = len(seq_result.sequence)
args.output.write('[' + seq_result.seq_name + ']')
for tf in seq_result.tfs:
matches_tf = seq_result.tf_dict[tf]
if not matches_tf:
args.output.write('\t\t')
continue
positions = [pos for pos, _ in matches_tf]
positions_str = lib.get_join_position_str(positions, seq_length)
best_match = seq_result.best_match(tf)
best_subseq = seq_result.match_subseq(best_match[0], tf, 0)
args.output.write('\t' + positions_str + '\t' + best_subseq)
args.output.write('\n')
def save_human_readable(result, args):
for seq_result in result:
seq_length = len(seq_result.sequence)
args.output.write('>' + seq_result.seq_name + '\n')
for tf in seq_result.tfs:
args.output.write(tf + ' ')
matches_tf = seq_result.tf_dict[tf]
positions = [pos_tuple[0] for pos_tuple in matches_tf]
positions_str = lib.get_join_position_str(positions, seq_length)
args.output.write(positions_str + '\n')
def save(result, args):
if args.excel:
save_excel(result, args)
else:
save_human_readable(result, args)
def main():
parser = create_parser()
args = parser.parse_args()
result = process(args)
save(result, args)
args.output.close()
if __name__ == "__main__":
signal(SIGPIPE, SIG_DFL)
main() | SvichkarevAnatoly/Bioinformatics-DNA-Motifs-Search | src/utils/pattern_matching.py | Python | gpl-2.0 | 7,102 |
from __future__ import absolute_import, unicode_literals
from .base.SetupLcioDictionary import setupLcioDictionary
setupLcioDictionary()
import inspect, sys, ROOT, pyLCIO
namespaces = ["EVENT", "IMPL", "IO", "IOIMPL", "UTIL"]
for namespace in namespaces:
module = getattr( ROOT, namespace )
setattr(pyLCIO, namespace, module)
sys.modules['pyLCIO.' + namespace] = module
from .base.DecorateLcioClasses import decorateLcioClasses
decorateLcioClasses()
from .base.HandleExceptions import setupExceptionHandling
setupExceptionHandling()
| petricm/LCIO | src/python/pyLCIO/__init__.py | Python | bsd-3-clause | 549 |
#!/usr/bin/python2.7
#coding:utf-8
import sys
import thread
import time
def timer(threadname):
saveout = sys.stdout
fsock = open(threadname,'w')
sys.stdout = fsock
for i in xrange(1,10):
print '-'
time.sleep(2)
sys.stdout = saveout
fsock.close()
def test(): #Use thread.start_new_thread() to create 2 new threads
thread.start_new_thread(timer, ('1.txt',))
thread.start_new_thread(timer, ('2.txt',))
if __name__=='__main__':
timer('1.log') | xujun10110/Hammer | temp/mutlistdouttest.py | Python | gpl-2.0 | 495 |
from django.core.management.base import BaseCommand, CommandError
from canvas.models import User, Count, Visibility, redis
class Command(BaseCommand):
args = ''
help = "Run our nightly cron tasks"
def handle(self, *args, **options):
self.annotate_accurate_flaggers()
def annotate_accurate_flaggers(self):
curators = []
ignored = []
# Figure out who are good and bad flaggers are.
for user in User.objects.annotate(fc=Count('flags')).filter(fc__gt=0).order_by('-fc'):
flagged = user.fc
unmoderated = user.flags.filter(comment__visibility=Visibility.PUBLIC).count()
accuracy = 1 - (1.0 * unmoderated / flagged)
if accuracy <= 0.2 and flagged >= 5:
ignored.append(user)
elif accuracy > 0.8 and flagged >= 20:
curators.append(user)
# Update the redis sets.
for userlist, key in zip([ignored, curators], ["user:flags_ignored", "user:flags_curate"]):
redis.delete(key)
for user in userlist:
redis.sadd(key, user.id)
print "Successfully annotated flaggers: %s curators and %s ignored." % (len(curators), len(ignored))
| drawquest/drawquest-web | website/canvas/management/commands/cron_nightly.py | Python | bsd-3-clause | 1,277 |
# coding: utf-8
#
# Copyright 2014 The Oppia Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS-IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Provides email services."""
import feconf
from google.appengine.api import mail
def get_incoming_email_address(reply_to_id):
"""Gets the incoming email address. The client is responsible for recording
any audit logs.
Args:
reply_to_id: str. The unique id of the sender.
Returns:
str. The email address of the sender.
"""
return 'reply+%s@%s' % (reply_to_id, feconf.INCOMING_EMAILS_DOMAIN_NAME)
def send_mail(
sender_email, recipient_email, subject, plaintext_body, html_body,
bcc_admin=False, reply_to_id=None):
"""Sends an email. The client is responsible for recording any audit logs.
In general this function should only be called from
email_manager._send_email().
Args:
sender_email: str. The email address of the sender. This should be in
the form 'SENDER_NAME <SENDER_EMAIL_ADDRESS>'.
recipient_email: str. The email address of the recipient.
subject: str. The subject line of the email.
plaintext_body: str. The plaintext body of the email.
html_body: str. The HTML body of the email. Must fit in a datastore
entity.
bcc_admin: bool. Whether to bcc feconf.ADMIN_EMAIL_ADDRESS on the email.
reply_to_id: str or None. The unique reply-to id used in reply-to email
sent to recipient.
Raises:
ValueError: If 'sender_email' or 'recipient_email' is invalid, according
to App Engine.
Exception: If the configuration in feconf.py forbids emails from being
sent.
"""
if not feconf.CAN_SEND_EMAILS:
raise Exception('This app cannot send emails.')
if not mail.is_email_valid(sender_email):
raise ValueError(
'Malformed sender email address: %s' % sender_email)
if not mail.is_email_valid(recipient_email):
raise ValueError(
'Malformed recipient email address: %s' % recipient_email)
msg = mail.EmailMessage(
sender=sender_email, to=recipient_email,
subject=subject, body=plaintext_body, html=html_body)
if bcc_admin:
msg.bcc = [feconf.ADMIN_EMAIL_ADDRESS]
if reply_to_id:
msg.reply_to = get_incoming_email_address(reply_to_id)
# Send message.
msg.send()
def send_bulk_mail(
sender_email, recipient_emails, subject, plaintext_body, html_body):
"""Sends an email. The client is responsible for recording any audit logs.
In general this function should only be called from
email_manager._send_email().
Args:
sender_email: str. The email address of the sender. This should be in
the form 'SENDER_NAME <SENDER_EMAIL_ADDRESS>'.
recipient_emails: list(str). The list of recipients' email addresses.
subject: str. The subject line of the email.
plaintext_body: str. The plaintext body of the email.
html_body: str. The HTML body of the email. Must fit in a datastore
entity.
Raises:
ValueError: If 'sender_email' or 'recipient_email' is invalid, according
to App Engine.
Exception: If the configuration in feconf.py forbids emails from being
sent.
"""
if not feconf.CAN_SEND_EMAILS:
raise Exception('This app cannot send emails.')
if not mail.is_email_valid(sender_email):
raise ValueError(
'Malformed sender email address: %s' % sender_email)
for recipient_email in recipient_emails:
if not mail.is_email_valid(recipient_email):
raise ValueError(
'Malformed recipient email address: %s' % recipient_email)
for recipient_email in recipient_emails:
mail.send_mail(
sender_email, recipient_email, subject, plaintext_body,
html=html_body)
| AllanYangZhou/oppia | core/platform/email/gae_email_services.py | Python | apache-2.0 | 4,444 |
# coding: utf-8
"""
Swaggy Jenkins
Jenkins API clients generated from Swagger / Open API specification # noqa: E501
The version of the OpenAPI document: 1.1.2-pre.0
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
import typing # noqa: F401
from frozendict import frozendict # noqa: F401
import decimal # noqa: F401
from datetime import date, datetime # noqa: F401
from frozendict import frozendict # noqa: F401
from openapi_client.schemas import ( # noqa: F401
AnyTypeSchema,
ComposedSchema,
DictSchema,
ListSchema,
StrSchema,
IntSchema,
Int32Schema,
Int64Schema,
Float32Schema,
Float64Schema,
NumberSchema,
DateSchema,
DateTimeSchema,
DecimalSchema,
BoolSchema,
BinarySchema,
NoneSchema,
none_type,
InstantiationMetadata,
Unset,
unset,
ComposedBase,
ListBase,
DictBase,
NoneBase,
StrBase,
IntBase,
NumberBase,
DateBase,
DateTimeBase,
BoolBase,
BinaryBase,
Schema,
_SchemaValidator,
_SchemaTypeChecker,
_SchemaEnumMaker
)
class PipelineImpl(
DictSchema
):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
"""
_class = StrSchema
displayName = StrSchema
estimatedDurationInMillis = IntSchema
fullName = StrSchema
latestRun = StrSchema
name = StrSchema
organization = StrSchema
weatherScore = IntSchema
@classmethod
@property
def _links(cls) -> typing.Type['PipelineImpllinks']:
return PipelineImpllinks
def __new__(
cls,
*args: typing.Union[dict, frozendict, ],
_class: typing.Union[_class, Unset] = unset,
displayName: typing.Union[displayName, Unset] = unset,
estimatedDurationInMillis: typing.Union[estimatedDurationInMillis, Unset] = unset,
fullName: typing.Union[fullName, Unset] = unset,
latestRun: typing.Union[latestRun, Unset] = unset,
name: typing.Union[name, Unset] = unset,
organization: typing.Union[organization, Unset] = unset,
weatherScore: typing.Union[weatherScore, Unset] = unset,
_links: typing.Union['PipelineImpllinks', Unset] = unset,
_instantiation_metadata: typing.Optional[InstantiationMetadata] = None,
**kwargs: typing.Type[Schema],
) -> 'PipelineImpl':
return super().__new__(
cls,
*args,
_class=_class,
displayName=displayName,
estimatedDurationInMillis=estimatedDurationInMillis,
fullName=fullName,
latestRun=latestRun,
name=name,
organization=organization,
weatherScore=weatherScore,
_links=_links,
_instantiation_metadata=_instantiation_metadata,
**kwargs,
)
from openapi_client.model.pipeline_impllinks import PipelineImpllinks
| cliffano/swaggy-jenkins | clients/python-experimental/generated/openapi_client/model/pipeline_impl.py | Python | mit | 3,057 |
# Copyright (c) 2017 Huawei, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
IMAGE_DIR = 'conveyordashboard/img/'
NOVA_SERVER = 'OS::Nova::Server'
NOVA_KEYPAIR = 'OS::Nova::KeyPair'
NOVA_FLAVOR = 'OS::Nova::Flavor'
NOVA_AZ = 'OS::Nova::AvailabilityZone'
CINDER_VOLUME = 'OS::Cinder::Volume'
CINDER_VOL_TYPE = 'OS::Cinder::VolumeType'
CINDER_QOS = 'OS::Cinder::Qos'
CINDER_CONSISGROUP = 'OS::Cinder::ConsisGroup'
NEUTRON_NET = 'OS::Neutron::Net'
NEUTRON_SUBNET = 'OS::Neutron::Subnet'
NEUTRON_PORT = 'OS::Neutron::Port'
NEUTRON_ROUTER = 'OS::Neutron::Router'
NEUTRON_SECGROUP = 'OS::Neutron::SecurityGroup'
NEUTRON_ROUTER_IF = 'OS::Neutron::RouterInterface'
NEUTRON_FLOATINGIP = 'OS::Neutron::FloatingIP'
NEUTRON_FIP_ASSO = 'OS::Neutron::FloatingIPAssociation'
NEUTRON_VIP = 'OS::Neutron::Vip'
NEUTRON_LISTENER = 'OS::Neutron::Listener'
NEUTRON_POOL = 'OS::Neutron::Pool'
NEUTRON_POOLMEMBER = 'OS::Neutron::PoolMember'
NEUTRON_HEALTHMONITOR = 'OS::Neutron::HealthMonitor'
GLANCE_IMAGE = 'OS::Glance::Image'
HEAT_STACK = 'OS::Heat::Stack'
TAG_RES_TYPE = 'resource_type'
TAG_RES_ID = 'resource_id'
TAG_FROM = 'from'
TAG_FROM_ID = 'res_id_from'
TAG_UPDATED = 'res_updated'
RES_ACTION_KEY = 'action'
RES_ACTIONS = (ACTION_EDIT, ACTION_DELETE, ACTION_ADD) \
= ('edit', 'delete', 'add')
PLAN_TYPE = (MIGRATE, CLONE) = ('migrate', 'clone')
DEPENDENCY_UPDATE_MAPPING = {
NEUTRON_NET: [NEUTRON_SUBNET],
NEUTRON_SUBNET: [NEUTRON_PORT],
CINDER_VOLUME: [CINDER_VOL_TYPE],
CINDER_VOL_TYPE: [CINDER_QOS],
}
RESOURCE_TYPE_IMAGE_MAPPINGS = {
# Nova
NOVA_SERVER: {
'green': 'server-green.svg',
'red': 'server-red.svg',
'gray': 'server-gray.svg'
},
NOVA_KEYPAIR: {
'green': 'keypair-green.svg',
'red': 'keypair-red.svg',
'gray': 'keypair-gray.svg'
},
NOVA_FLAVOR: {
'green': 'flavor-green.svg',
'red': 'flavor-red.svg',
'gray': 'flavor-gray.svg'
},
# Cinder
CINDER_VOLUME: {
'green': 'volume-green.svg',
'red': 'volume-red.svg',
'gray': 'volume-gray.svg'
},
CINDER_VOL_TYPE: {
'green': 'volumetype-green.svg',
'red': 'volumetype-red.svg',
'gray': 'volumetype-gray.svg'
},
CINDER_QOS: {
'green': 'qos-green.svg',
'red': 'qos-red.svg',
'gray': 'qos-gray.svg'
},
CINDER_CONSISGROUP: {
'green': 'consisgroup-green.svg',
'red': 'consisgroup-red.svg',
'gray': 'consisgroup-gray.svg'
},
# Neutron
NEUTRON_NET: {
'green': 'net-green.svg',
'red': 'net-red.svg',
'gray': 'net-gray.svg'
},
NEUTRON_SUBNET: {
'green': 'subnet-green.svg',
'red': 'subnet-red.svg',
'gray': 'subnet-gray.svg'
},
NEUTRON_ROUTER: {
'green': 'router-green.svg',
'red': 'router-red.svg',
'gray': 'router-gray.svg'
},
NEUTRON_SECGROUP: {
'green': 'securitygroup-green.svg',
'red': 'securitygroup-red.svg',
'gray': 'securitygroup-gray.svg'
},
NEUTRON_PORT: {
'green': 'port-green.svg',
'red': 'port-red.svg',
'gray': 'port-gray.svg'
},
NEUTRON_ROUTER_IF: {
'green': 'routerinterface-green.svg',
'red': 'routerinterface-red.svg',
'gray': 'routerinterface-gray.svg'
},
NEUTRON_FLOATINGIP: {
'green': 'floatingip-green.svg',
'red': 'floatingip-red.svg',
'gray': 'floatingip-gray.svg'
},
NEUTRON_FIP_ASSO: {
'green': 'floatingipassociation-green.svg',
'red': 'floatingipassociation-red.svg',
'gray': 'floatingipassociation-gray.svg'
},
NEUTRON_VIP: {
'green': 'vip-green.svg',
'red': 'vip-red.svg',
'gray': 'vip-gray.svg'
},
NEUTRON_LISTENER: {
'green': 'listener-green.svg',
'red': 'listener-red.svg',
'gray': 'listener-gray.svg'
},
NEUTRON_POOL: {
'green': 'pool-green.svg',
'red': 'pool-red.svg',
'gray': 'pool-gray.svg'
},
NEUTRON_POOLMEMBER: {
'green': 'poolmember-green.svg',
'red': 'poolmember-red.svg',
'gray': 'poolmember-gray.svg'
},
NEUTRON_HEALTHMONITOR: {
'green': 'healthmonitor-green.svg',
'red': 'healthmonitor-red.svg',
'gray': 'healthmonitor-gray.svg'
},
# Stack
HEAT_STACK: {
'green': 'stack-green.svg',
'red': 'stack-red.svg',
'gray': 'stack-gray.svg'
},
# Unknown type
'UNKNOWN': {
'green': 'unknown.svg',
'red': 'unknown-red.svg',
'gray': 'unknown-gray.svg'
},
}
| Hybrid-Cloud/conveyor-dashboard | conveyordashboard/common/constants.py | Python | apache-2.0 | 5,207 |
import logging
logger = logging.getLogger(__name__)
import os
import cPickle
import numpy as np
import theano
floatX = theano.config.floatX
from mozi.utils.utils import get_file, make_one_hot
from mozi.datasets.dataset import SingleBlock
class Cifar10(SingleBlock):
def __init__(self, flatten=False, **kwargs):
im_dir = os.environ['MOZI_DATA_PATH'] + '/cifar10/'
path = 'http://www.cs.toronto.edu/~kriz/cifar-10-python.tar.gz'
im_dir = get_file(fpath="{}/cifar-10-python.tar.gz".format(im_dir), origin=path, untar=True)
self.label_names = ['airplane', 'automobile', 'bird', 'cat', 'deer',
'dog', 'frog','horse','ship','truck']
self.img_shape = (3,32,32)
self.img_size = np.prod(self.img_shape)
self.n_classes = 10
fnames = ['data_batch_%i' % i for i in range(1,6)] + ['test_batch']
X = []
y = []
for fname in fnames:
data_path = "{}/{}".format(im_dir, fname)
with open(data_path) as fin:
data_batch = cPickle.load(fin)
if flatten:
X.extend(data_batch['data'].reshape((len(data_batch['data']), self.img_size)))
else:
X.extend(data_batch['data'].reshape((len(data_batch['data']),)+self.img_shape))
y.extend(data_batch['labels'])
X_npy = np.array(X, dtype=floatX)
X_npy /= 255.0
y_npy = make_one_hot(y, onehot_size=self.n_classes)
super(Cifar10, self).__init__(X=X_npy, y=y_npy, **kwargs)
| dksahuji/Mozi | mozi/datasets/cifar10.py | Python | mit | 1,575 |
import tkinter
import tkinter.ttk
from listparse.ui.common import mk_treeview, PageView
class IndexatorView(PageView):
listboxes = {}
buttons = {}
treeviews = {}
textlabels = {}
def __init__(self, root=None, main_frame=None):
super().__init__(root, main_frame)
def params(self):
param = {
'x': 0,
'y': 0,
'w': 650,
'h': 500,
'title': 'Indexator',
'bd': 5,
}
return param
def make_widgets(self, main_frame):
self.__make_drive_frame(main_frame)
self.__make_table_frame(main_frame)
def __make_drive_frame(self, main_frame):
drive_frame = tkinter.Frame(main_frame, bg='green', bd=self.bd)
drive_frame.pack(side='left', fill='both', expand=False)
self.treeviews['drives'] = mk_treeview(drive_frame)
self.buttons['run_indexator'] = tkinter.ttk.Button(drive_frame,
text='Run indexator')
self.buttons['run_indexator'].pack(side='right', fill='x', expand=False)
def __make_table_frame(self, main_frame):
table_frame = tkinter.Frame(main_frame, bg='blue', bd=self.bd)
table_frame.pack(side='left', fill='both', expand=True)
notebook = tkinter.ttk.Notebook(table_frame)
notebook.pack(fill='both', expand=True)
titles_page = tkinter.ttk.Frame(notebook)
notebook.add(titles_page, text='Titles')
self.treeviews['titles'] = mk_treeview(titles_page, sbars='xy')
self.treeviews['titles']['show'] = 'headings'
self.treeviews['titles']['columns'] = ('one', 'two', 'three')
self.treeviews['titles'].heading('one', text='Title')
self.treeviews['titles'].heading('two', text='Year')
self.treeviews['titles'].column('two', width=40)
self.treeviews['titles'].heading('three', text='Eps')
self.treeviews['titles'].column('three', width=40)
location_page = tkinter.ttk.Frame(table_frame)
notebook.add(location_page, text='Location')
self.treeviews['location'] = mk_treeview(location_page)
self.treeviews['location']['show'] = 'headings'
self.treeviews['location']['columns'] = ('one', 'two')
self.treeviews['location'].column('one', width=100)
self.treeviews['location'].heading('one', text='First')
self.treeviews['location'].heading('two', text='Second')
self.treeviews['location'].insert('', 1, text='', values=('1A', '1B'))
media_page = tkinter.ttk.Frame(table_frame)
notebook.add(media_page, text='Media')
self.treeviews['media'] = mk_treeview(media_page)
self.treeviews['media']['show'] = 'headings'
self.treeviews['media']['columns'] = ('one', 'two')
self.treeviews['media'].column('one', width=100)
self.treeviews['media'].heading('one', text='First')
self.treeviews['media'].heading('two', text='Second')
self.treeviews['media'].insert('', 1, text='', values=('1A', '1B'))
lst = (
('Shakugan no Shana', '2005', '25'),
('Neon Genesis Evangelion', '1995', '26'),
)
self.display_titles(lst)
lst = (
('Toaru Majutsu no Index', '2008', '25'),
)
self.display_titles(lst)
def display_titles(self, lst):
for i in self.treeviews['titles'].get_children():
self.treeviews['titles'].delete(i)
for title in lst:
self.treeviews['titles'].insert('', 1, text='', values=title)
| sora7/listparse | src/listparse/ui/indexator/view.py | Python | gpl-2.0 | 3,612 |
#!/usr/bin/env python3
# Copyright (c) 2019-2020 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Test the generation of UTXO snapshots using `dumptxoutset`.
"""
from test_framework.test_framework import BitcoinTestFramework
from test_framework.util import assert_equal, assert_raises_rpc_error
import hashlib
from pathlib import Path
class DumptxoutsetTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = True
self.num_nodes = 1
def run_test(self):
"""Test a trivial usage of the dumptxoutset RPC command."""
node = self.nodes[0]
mocktime = node.getblockheader(node.getblockhash(0))['time'] + 1
node.setmocktime(mocktime)
node.generate(100)
FILENAME = 'txoutset.dat'
out = node.dumptxoutset(FILENAME)
expected_path = Path(node.datadir) / self.chain / FILENAME
assert expected_path.is_file()
assert_equal(out['coins_written'], 100)
assert_equal(out['base_height'], 100)
assert_equal(out['path'], str(expected_path))
# Blockhash should be deterministic based on mocked time.
assert_equal(
out['base_hash'],
'6fd417acba2a8738b06fee43330c50d58e6a725046c3d843c8dd7e51d46d1ed6')
with open(str(expected_path), 'rb') as f:
digest = hashlib.sha256(f.read()).hexdigest()
# UTXO snapshot hash should be deterministic based on mocked time.
assert_equal(
digest, 'be032e5f248264ba08e11099ac09dbd001f6f87ffc68bf0f87043d8146d50664')
# Specifying a path to an existing file will fail.
assert_raises_rpc_error(
-8, '{} already exists'.format(FILENAME), node.dumptxoutset, FILENAME)
if __name__ == '__main__':
DumptxoutsetTest().main()
| Sjors/bitcoin | test/functional/rpc_dumptxoutset.py | Python | mit | 1,928 |
from django.db import models
from core.models import TimeStampedModel
from django.contrib.auth.models import User
from django.db.models.signals import post_save
from django.dispatch import receiver
class Organization(TimeStampedModel):
name = models.CharField(max_length=150)
description = models.CharField(max_length=250, blank=True)
def __str__(self):
return self.name
class Specialty(TimeStampedModel):
name = models.CharField(max_length=150)
description = models.CharField(max_length=250, blank=True)
def __str__(self):
return self.name
class Meta:
verbose_name_plural = 'Specialties'
class Researcher(TimeStampedModel):
user = models.OneToOneField(User, on_delete=models.CASCADE, blank=True, null=True)
cell_phone = models.CharField(max_length=100, blank=True)
work_phone = models.CharField(max_length=100, blank=True)
home_phone = models.CharField(max_length=100, blank=True)
enabled = models.BooleanField(default=False)
specialties = models.ManyToManyField(Specialty)
affiliations = models.ManyToManyField(Organization)
orcid = models.CharField(max_length=100, blank=True)
def __str__(self):
return u'%s %s' % (self.user.first_name, self.user.last_name)
| luisen14/treatment-tracking-project | treatment_tracker/researchers/models.py | Python | apache-2.0 | 1,267 |
import numpy as np
from scipy import fftpack
class Fft:
def __init__(self, x, *, sample_rate=None, padded=False):
if sample_rate is None:
raise ValueError('You must determine the sample rate')
fs = sample_rate
if padded:
padding_to = int(2**np.ceil(np.log2(len(x))))
x = np.pad(x, (0, padding_to-len(x)), 'constant')
n, X = len(x), fftpack.fft(x)
self.hz = fftpack.fftshift(fftpack.fftfreq(n, 1/fs))
self.abs = np.abs(X)
self.phase = np.angle(X)
self.values = X
self.samles = n
| viniciusd/DCO1008---Digital-Signal-Processing | projeto2/common.py | Python | mit | 596 |
from __future__ import unicode_literals
import json
from mock import patch
from tests import TestCase, with_settings
from nose.tools import eq_
class TestIndex(TestCase):
@with_settings(aws={'bucket': 'cattysnap'})
@patch('catsnap.web.controllers.find.Tag')
def test_find_a_tag(self, Tag):
Tag.get_image_data.return_value = [ ('CA7', 1, 'pet cool'),
('D06', 2, 'pet')]
response = self.app.get('/find?tags=pet')
eq_(response.status_code, 200, response.data)
link = '<a href="/image/%s">%s</a>'
cat_link = link % (1, 'pet cool')
dog_link = link % (2, 'pet')
assert cat_link in response.data, response.data
assert dog_link in response.data, response.data
@with_settings(aws={'bucket': 'cattysnap'})
@patch('catsnap.web.controllers.find.Tag')
def test_search_strings_have_whitespace_trimmed(self, Tag):
Tag.get_image_data.return_value = []
response = self.app.get('/find?tags= pet ')
eq_(response.status_code, 200, response.data)
Tag.get_image_data.assert_called_with(['pet'])
@with_settings(aws={'bucket': 'cattysnap'})
@patch('catsnap.web.controllers.find.Tag')
def test_find_a_tag__json_format(self, Tag):
image_structs = [ ('CA7', 1, 'pet cool'),
('D06', 2, 'pet' )]
Tag.get_image_data.return_value = image_structs
response = self.app.get('/find.json?tags=pet')
eq_(response.status_code, 200, response.data)
eq_(json.loads(response.data), [
{'source_url': 'https://s3.amazonaws.com/cattysnap/CA7',
'url': '/image/1',
'caption': 'pet cool',},
{'source_url': 'https://s3.amazonaws.com/cattysnap/D06',
'url': '/image/2',
'caption': 'pet'}])
| ErinCall/catsnap | tests/web/test_find.py | Python | mit | 1,866 |
"""All methods needed to bootstrap a Home Assistant instance."""
import asyncio
import logging.handlers
from timeit import default_timer as timer
from types import ModuleType
from typing import Awaitable, Callable, Optional, Dict, List
from homeassistant import requirements, core, loader, config as conf_util
from homeassistant.config import async_notify_setup_error
from homeassistant.const import EVENT_COMPONENT_LOADED, PLATFORM_FORMAT
from homeassistant.exceptions import HomeAssistantError
from homeassistant.util.async_ import run_coroutine_threadsafe
_LOGGER = logging.getLogger(__name__)
ATTR_COMPONENT = 'component'
DATA_SETUP = 'setup_tasks'
DATA_DEPS_REQS = 'deps_reqs_processed'
SLOW_SETUP_WARNING = 10
def setup_component(hass: core.HomeAssistant, domain: str,
config: Dict) -> bool:
"""Set up a component and all its dependencies."""
return run_coroutine_threadsafe( # type: ignore
async_setup_component(hass, domain, config), loop=hass.loop).result()
async def async_setup_component(hass: core.HomeAssistant, domain: str,
config: Dict) -> bool:
"""Set up a component and all its dependencies.
This method is a coroutine.
"""
if domain in hass.config.components:
return True
setup_tasks = hass.data.setdefault(DATA_SETUP, {})
if domain in setup_tasks:
return await setup_tasks[domain] # type: ignore
task = setup_tasks[domain] = hass.async_create_task(
_async_setup_component(hass, domain, config))
return await task # type: ignore
async def _async_process_dependencies(
hass: core.HomeAssistant, config: Dict, name: str,
dependencies: List[str]) -> bool:
"""Ensure all dependencies are set up."""
blacklisted = [dep for dep in dependencies
if dep in loader.DEPENDENCY_BLACKLIST]
if blacklisted and name != 'default_config':
_LOGGER.error("Unable to set up dependencies of %s: "
"found blacklisted dependencies: %s",
name, ', '.join(blacklisted))
return False
tasks = [async_setup_component(hass, dep, config) for dep
in dependencies]
if not tasks:
return True
results = await asyncio.gather(*tasks, loop=hass.loop)
failed = [dependencies[idx] for idx, res
in enumerate(results) if not res]
if failed:
_LOGGER.error("Unable to set up dependencies of %s. "
"Setup failed for dependencies: %s",
name, ', '.join(failed))
return False
return True
async def _async_setup_component(hass: core.HomeAssistant,
domain: str, config: Dict) -> bool:
"""Set up a component for Home Assistant.
This method is a coroutine.
"""
def log_error(msg: str, link: bool = True) -> None:
"""Log helper."""
_LOGGER.error("Setup failed for %s: %s", domain, msg)
async_notify_setup_error(hass, domain, link)
try:
integration = await loader.async_get_integration(hass, domain)
except loader.IntegrationNotFound:
log_error("Integration not found.", False)
return False
# Validate all dependencies exist and there are no circular dependencies
try:
await loader.async_component_dependencies(hass, domain)
except loader.IntegrationNotFound as err:
_LOGGER.error(
"Not setting up %s because we are unable to resolve "
"(sub)dependency %s", domain, err.domain)
return False
except loader.CircularDependency as err:
_LOGGER.error(
"Not setting up %s because it contains a circular dependency: "
"%s -> %s", domain, err.from_domain, err.to_domain)
return False
# Process requirements as soon as possible, so we can import the component
# without requiring imports to be in functions.
try:
await async_process_deps_reqs(hass, config, integration)
except HomeAssistantError as err:
log_error(str(err))
return False
processed_config = await conf_util.async_process_component_config(
hass, config, integration)
if processed_config is None:
log_error("Invalid config.")
return False
start = timer()
_LOGGER.info("Setting up %s", domain)
try:
component = integration.get_component()
except ImportError:
log_error("Unable to import component", False)
return False
if hasattr(component, 'PLATFORM_SCHEMA'):
# Entity components have their own warning
warn_task = None
else:
warn_task = hass.loop.call_later(
SLOW_SETUP_WARNING, _LOGGER.warning,
"Setup of %s is taking over %s seconds.",
domain, SLOW_SETUP_WARNING)
try:
if hasattr(component, 'async_setup'):
result = await component.async_setup( # type: ignore
hass, processed_config)
else:
result = await hass.async_add_executor_job(
component.setup, hass, processed_config) # type: ignore
except Exception: # pylint: disable=broad-except
_LOGGER.exception("Error during setup of component %s", domain)
async_notify_setup_error(hass, domain, True)
return False
finally:
end = timer()
if warn_task:
warn_task.cancel()
_LOGGER.info("Setup of domain %s took %.1f seconds.", domain, end - start)
if result is False:
log_error("Component failed to initialize.")
return False
if result is not True:
log_error("Component {!r} did not return boolean if setup was "
"successful. Disabling component.".format(domain))
return False
if hass.config_entries:
for entry in hass.config_entries.async_entries(domain):
await entry.async_setup(hass, integration=integration)
hass.config.components.add(component.DOMAIN) # type: ignore
# Cleanup
if domain in hass.data[DATA_SETUP]:
hass.data[DATA_SETUP].pop(domain)
hass.bus.async_fire(
EVENT_COMPONENT_LOADED,
{ATTR_COMPONENT: component.DOMAIN} # type: ignore
)
return True
async def async_prepare_setup_platform(hass: core.HomeAssistant,
hass_config: Dict,
domain: str, platform_name: str) \
-> Optional[ModuleType]:
"""Load a platform and makes sure dependencies are setup.
This method is a coroutine.
"""
platform_path = PLATFORM_FORMAT.format(domain=domain,
platform=platform_name)
def log_error(msg: str) -> None:
"""Log helper."""
_LOGGER.error("Unable to prepare setup for platform %s: %s",
platform_name, msg)
async_notify_setup_error(hass, platform_path)
try:
integration = await loader.async_get_integration(hass, platform_name)
except loader.IntegrationNotFound:
log_error("Integration not found")
return None
# Process deps and reqs as soon as possible, so that requirements are
# available when we import the platform.
try:
await async_process_deps_reqs(hass, hass_config, integration)
except HomeAssistantError as err:
log_error(str(err))
return None
try:
platform = integration.get_platform(domain)
except ImportError:
log_error("Platform not found.")
return None
# Already loaded
if platform_path in hass.config.components:
return platform
# Platforms cannot exist on their own, they are part of their integration.
# If the integration is not set up yet, and can be set up, set it up.
if integration.domain not in hass.config.components:
try:
component = integration.get_component()
except ImportError:
log_error("Unable to import the component")
return None
if (hasattr(component, 'setup')
or hasattr(component, 'async_setup')):
if not await async_setup_component(
hass, integration.domain, hass_config
):
log_error("Unable to set up component.")
return None
return platform
async def async_process_deps_reqs(
hass: core.HomeAssistant, config: Dict,
integration: loader.Integration) -> None:
"""Process all dependencies and requirements for a module.
Module is a Python module of either a component or platform.
"""
processed = hass.data.get(DATA_DEPS_REQS)
if processed is None:
processed = hass.data[DATA_DEPS_REQS] = set()
elif integration.domain in processed:
return
if integration.dependencies and not await _async_process_dependencies(
hass,
config,
integration.domain,
integration.dependencies
):
raise HomeAssistantError("Could not set up all dependencies.")
if (not hass.config.skip_pip and integration.requirements and
not await requirements.async_process_requirements(
hass, integration.domain, integration.requirements)):
raise HomeAssistantError("Could not install all requirements.")
processed.add(integration.domain)
@core.callback
def async_when_setup(
hass: core.HomeAssistant, component: str,
when_setup_cb: Callable[
[core.HomeAssistant, str], Awaitable[None]]) -> None:
"""Call a method when a component is setup."""
async def when_setup() -> None:
"""Call the callback."""
try:
await when_setup_cb(hass, component)
except Exception: # pylint: disable=broad-except
_LOGGER.exception('Error handling when_setup callback for %s',
component)
# Running it in a new task so that it always runs after
if component in hass.config.components:
hass.async_create_task(when_setup())
return
unsub = None
async def loaded_event(event: core.Event) -> None:
"""Call the callback."""
if event.data[ATTR_COMPONENT] != component:
return
unsub() # type: ignore
await when_setup()
unsub = hass.bus.async_listen(EVENT_COMPONENT_LOADED, loaded_event)
| MartinHjelmare/home-assistant | homeassistant/setup.py | Python | apache-2.0 | 10,473 |
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2009 Tiny SPRL (<http://tiny.be>). All Rights Reserved
# d$
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
from osv import osv
from osv import fields
from tools.translate import _
from datetime import datetime, timedelta
from dateutil.relativedelta import relativedelta
import time
class sale_order_line(osv.osv):
_name = 'sale.order.line'
_inherit = 'sale.order.line'
# def product_id_change(self, cr, uid, ids, pricelist, product, qty=0,
# uom=False, qty_uos=0, uos=False, name='', partner_id=False,
# lang=False, update_tax=True, date_order=False, packaging=False, fiscal_position=False, flag=False, context={}):
## if not pricelist:
## raise osv.except_osv(_('No Pricelist !'), _('You have to select a pricelist or a supplier in the purchase form !\nPlease set one before choosing a product.'))
## if not partner_id:
## raise osv.except_osv(_('No Partner!'), _('You have to select a partner in the purchase form !\nPlease set one partner before choosing a product.'))
# if not product:
# return {'value': {'th_weight': 0, 'product_packaging': False,
# 'product_uos_qty': qty, 'tax_id':[]}, 'domain': {'product_uom': [],
# 'product_uos': []}, 'domain':{'product_uom':[]}}
#
# res = {}
# #prod= self.pool.get('product.product').browse(cr, uid, product)
#
# product_uom_pool = self.pool.get('product.uom')
# lang=False
# if partner_id:
# lang=self.pool.get('res.partner').read(cr, uid, partner_id, ['lang'])['lang']
# context={'lang':lang}
# context['partner_id'] = partner_id
#
# prod = self.pool.get('product.product').browse(cr, uid, product, context=context)
# prod_uom_po = prod.uom_po_id.id
# if uom <> prod_uom_po:
# uom = prod_uom_po
# if not date_order:
# date_order = time.strftime('%Y-%m-%d')
# qty = qty or 1.0
# seller_delay = 0
# prod_name = self.pool.get('product.product').name_get(cr, uid, [prod.id], context=context)[0][1]
# res = {}
# for s in prod.seller_ids:
# if s.name.id == partner_id:
# seller_delay = s.delay
# if s.product_uom:
# temp_qty = product_uom_pool._compute_qty(cr, uid, s.product_uom.id, s.min_qty, to_uom_id=prod.uom_id.id)
# uom = s.product_uom.id #prod_uom_po
# temp_qty = s.min_qty # supplier _qty assigned to temp
# if qty < temp_qty: # If the supplier quantity is greater than entered from user, set minimal.
# qty = temp_qty
# res.update({'warning': {'title': _('Warning'), 'message': _('The selected supplier has a minimal quantity set to %s, you cannot purchase less.') % qty}})
# qty_in_product_uom = product_uom_pool._compute_qty(cr, uid, uom, qty, to_uom_id=prod.uom_id.id)
# price = self.pool.get('product.pricelist').price_get(cr,uid,[pricelist],
# product, qty_in_product_uom or 1.0, partner_id, {
# 'uom': uom,
# 'date': date_order,
# })[pricelist]
# dt = (datetime.now() + relativedelta(days=int(seller_delay) or 0.0)).strftime('%Y-%m-%d %H:%M:%S')
#
#
# res.update({'value': {'price_unit': price, 'name': prod_name,
# 'tax_id':map(lambda x: x.id, prod.supplier_taxes_id),
# # 'date_planned': date_planned or dt,'notes': notes or prod.description_purchase,
# 'product_uom_qty': qty,
# 'product_uom': uom}})
#
#
# domain = {}
#
# taxes = self.pool.get('account.tax').browse(cr, uid,map(lambda x: x.id, prod.supplier_taxes_id))
# fpos = fiscal_position and self.pool.get('account.fiscal.position').browse(cr, uid, fiscal_position) or False
# res['value']['tax_id'] = self.pool.get('account.fiscal.position').map_tax(cr, uid, fpos, taxes)
#
# res2 = self.pool.get('product.uom').read(cr, uid, [uom], ['category_id'])
# res3 = prod.uom_id.category_id.id
# domain = {'product_uom':[('category_id','=',res2[0]['category_id'][0])]}
# if res2[0]['category_id'][0] != res3:
# raise osv.except_osv(_('Wrong Product UOM !'), _('You have to select a product UOM in the same category than the purchase UOM of the product'))
#
# res['domain'] = domain
#
# return res
def _get_virtual_stock(self, cr, uid, ids, field_name, arg, context):
res = {}
for obj in self.browse(cr, uid, ids):
res[obj.id] = obj.product_id.virtual_available
return res
def _get_real_stock(self, cr, uid, ids, field_name, arg, context):
res = {}
for obj in self.browse(cr, uid, ids):
res[obj.id] = obj.product_id.qty_available
return res
_columns = {'virtual_avl': fields.function(_get_virtual_stock, method=True, string='Virtual Stock'),
'qty_avl': fields.function(_get_real_stock, method=True, string='Real Stock'),
'pricelist_id': fields.related('order_id', 'pricelist_id', type='many2one', relation='product.pricelist', string='Pricelist'),
'partner_id': fields.related('order_id', 'partner_id', type='many2one', relation='res.partner', string='Customer'),
'date_order':fields.related('order_id', 'date_order', type="date", string="Date"),
'fiscal_position': fields.related('order_id', 'fiscal_position', type='many2one', relation='account.fiscal.position', string='Fiscal Position'),
'shop_id': fields.related('order_id', 'shop_id', type='many2one', relation='sale.shop', string='Shop'),
}
_defaults = {'pricelist_id': lambda self, cr, uid, c: c.get('pricelist_id', False),
'partner_id': lambda self, cr, uid, c: c.get('partner_id', False),
'date_order': lambda self, cr, uid, c: c.get('date_order', False),
'fiscal_position': lambda self, cr, uid, c: c.get('fiscal_position', False),
'shop_id': lambda self, cr, uid, c: c.get('shop_id', False),
}
sale_order_line() | avanzosc/avanzosc6.1 | tree_grid/sale.py | Python | agpl-3.0 | 7,200 |
from flask import abort, Blueprint, flash, redirect, render_template, url_for
from flask.ext.login import current_user, login_required
from j4oauth.app import app, db
from j4oauth.forms import ClientForm
from j4oauth.models import Client, Token
account = Blueprint('account', __name__, template_folder='templates/account')
@account.route('')
@login_required
def user_account():
"""
View for user settings
"""
tokens = Token.query.filter_by(user_id=current_user.id).all()
applications = Client.query.filter_by(user_id=current_user.id).all()
return render_template('index.html',
tokens=tokens, applications=applications)
@account.route('/applications/<client_id>', methods=['GET', 'POST'])
@login_required
def application(client_id):
"""
View for application settings and stats
"""
client = Client.query.get(client_id)
client_form = ClientForm(obj=client, redirect_uri=client._redirect_uris)
if client is None:
abort(404)
if client.user_id != current_user.id:
app.logger.warning('Security issue by {}'.format(current_user.id))
flash('You are not allowed to do that', 'danger')
return redirect(url_for('.user_account'))
if client_form.validate_on_submit():
client.name = client_form.name.data
client.description = client_form.description.data
client.homepage = client_form.homepage.data
client._redirect_uris = client_form.redirect_uri.data
db.session.add(client)
try:
db.session.commit()
except Exception as e:
app.logger.exception(e)
flash('There was an issue updating your application, '
'please try again or contact support', 'danger')
else:
flash('Application updated', 'success')
return redirect(url_for('.application', client_id=client.client_id))
if client_form.is_submitted() is True:
flash('There was an issue validating your informations', 'danger')
return render_template('application.html', client=client, form=client_form)
@account.route('/applications/<client_id>/revoke_tokens', methods=['POST'])
@login_required
def application_revoke_tokens(client_id):
client = Client.query.get(client_id)
if client is None:
abort(404)
if client.user_id != current_user.id:
app.logger.warning('Security issue by {}'.format(current_user.id))
flash('You are not allowed to do that', 'danger')
return redirect(url_for('.user_account'))
if len(client.tokens) > 1:
db.session.delete(client.tokens)
try:
db.session.commit()
except Exception as e:
app.logger.exception(e)
flash('There was an issue revoking this application\'s tokens, '
'please try again or contact support', 'danger')
else:
flash('Tokens revoked with success', 'success')
return redirect(url_for('.application', client_id=client.client_id))
@account.route('/applications/<client_id>/refresh_secret', methods=['POST'])
@login_required
def application_refresh_secret(client_id):
client = Client.query.get(client_id)
if client is None:
abort(404)
if client.user_id != current_user.id:
app.logger.warning('Security issue by {}'.format(current_user.id))
flash('You are not allowed to do that', 'danger')
return redirect(url_for('.user_account'))
client.generate_secret()
db.session.add(client)
try:
db.session.commit()
except Exception as e:
app.logger.exception(e)
flash('There was an issue refreshing this application\'s secret, '
'please try again or contact support', 'danger')
else:
flash('Client secret refreshed', 'success')
return redirect(url_for('.application', client_id=client.client_id))
@account.route('/revoke/<int:token_id>')
@login_required
def revoke_token(token_id):
"""
Method to remove a user's token
:param token_id: the token primary id
"""
token = Token.query.get(token_id)
if token is not None:
try:
db.session.delete(token)
db.session.commit()
except Exception as e:
app.logger.exception(e)
flash(
'There was an issue revoking this application, please try '
'again or contact support', 'danger')
else:
flash('Authorization not found, please try again or contact support',
'danger')
return redirect(url_for('.user_account'))
@account.route('/new_application', methods=['GET', 'POST'])
@login_required
def new_application():
"""
Method to create a new client associated to the current user account
"""
if 'inpatients' not in current_user.groups:
app.logger.warning('Security issue by {}'.format(current_user.id))
flash('You do not belong to the right group for this', 'danger')
return redirect(url_for('.user_account'))
client_form = ClientForm()
if client_form.validate_on_submit():
client = Client()
client.name = client_form.name.data
client.description = client_form.description.data
client.homepage = client_form.homepage.data
client._redirect_uris = client_form.redirect_uri.data
client.generate_keys()
client.user_id = current_user.id
client.is_confidential = True
client._default_scopes = 'auth_info'
db.session.add(client)
try:
db.session.commit()
except Exception as e:
app.logger.exception(e)
flash('There was an issue saving your new application, '
'please try again or contact support', 'danger')
else:
flash('Application created', 'success')
return redirect(url_for('.user_account'))
return redirect(url_for('.new_application'))
if client_form.is_submitted() is True:
flash('There was an issue validating your demand', 'danger')
return render_template('new_application.html', form=client_form)
| J4LP/J4OAuth | j4oauth/account.py | Python | mit | 6,105 |
from PyQt4 import QtGui
from PyQt4.QtCore import pyqtSlot, pyqtSignal, Qt
import sys
import numpy
from random import random
mplPlotWindow = None
qwtPlotWindow = None
pqgPlotWindow = None
PlotWindow = None
def use_qwt_backend():
global PlotWindow, qwtPlotWindow
if qwtPlotWindow is None:
qwtPlotWindow = __import__('qt_plotwindow_qwt',
globals(), locals(),
['PlotWindow'], sys.version_info.major-3).PlotWindow
PlotWindow = qwtPlotWindow
def use_qtgraph_backend():
global PlotWindow, pqgPlotWindow
if pqgPlotWindow is None:
pqgPlotWindow = __import__('qt_plotwindow_qtgraph',
globals(), locals(),
['PlotWindow'], sys.version_info.major-3).PlotWindow
PlotWindow = pqgPlotWindow
def use_matplotlib_backend():
global PlotWindow, mplPlotWindow
if mplPlotWindow is None:
mplPlotWindow = __import__('qt_plotwindow_mpl',
globals(), locals(),
['PlotWindow'], sys.version_info.major-3).PlotWindow
PlotWindow = mplPlotWindow
def use_some_backend():
global PlotWindow
if PlotWindow is not None:
return
try:
use_qtgraph_backend()
except ImportError:
try:
use_qwt_backend()
except ImportError:
try:
use_matplotlib_backend()
except ImportError:
raise ImportError("No suitable plot backend found")
if PlotWindow is None:
raise ImportError("No suitable plot backend found")
def create_predefined_plot_window(plots):
"""Create a window with plots from plot dictionary"""
try:
use_some_backend()
except ImportError as e:
print(str(e))
return None, None
w = PlotWindow()
es = []
for plot in plots:
p = w.add_plot()
for l,e,c in plot:
p.add_curve(l,e,c)
es.append(e)
return es, w
| ZhuangER/robot_path_planning | gui/qt_plotwindow.py | Python | mit | 2,076 |
#!/usr/bin/python
from distutils.core import setup
from os import getcwd, listdir, name
from os.path import join
from glob import glob
import re
from struct import pack, unpack
from tempfile import gettempdir
import time
import numpy # pull in dependencies
import platform
win64 = (platform.architecture()[0]=='64bit')
cpu = win64 and 'amd64' or 'x86'
crt = 'Microsoft.VC90.CRT.'+cpu
import sys
sys.path.insert(0, getcwd())
from version import appname, appversion
# bogus crud to get WinXP "Visual Styles"
manifest='''<?xml version="1.0" encoding="UTF-8" standalone="yes"?>
<assembly xmlns="urn:schemas-microsoft-com:asm.v1" xmlns:asmv3="urn:schemas-microsoft-com:asm.v3" manifestVersion="1.0">
<assemblyIdentity
version="{APPVERSION:4.2f}.0.0"
processorArchitecture="{CPU}"
name="{APPNAME}"
type="win32"
/>
<description>DSF overlay editor.</description>
<asmv3:application>
<asmv3:windowsSettings xmlns="http://schemas.microsoft.com/SMI/2005/WindowsSettings">'
<dpiAware>true</dpiAware>
</asmv3:windowsSettings>
</asmv3:application>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.Windows.Common-Controls"
version="6.0.0.0"
processorArchitecture="{CPU}"
publicKeyToken="6595b64144ccf1df"
language="*"
/>
</dependentAssembly>
</dependency>
<dependency>
<dependentAssembly>
<assemblyIdentity
type="win32"
name="Microsoft.VC90.CRT"
version="9.0.30729.4940"
processorArchitecture="{CPU}"
publicKeyToken="1fc8b3b9a1e18e3b"
language="*"
/>
</dependentAssembly>
</dependency>
</assembly>
'''.format(APPNAME=appname, APPVERSION=appversion, CPU=cpu)
if sys.platform=='win32':
# http://www.py2exe.org/ Invoke with: setup.py py2exe
import py2exe
platdata=[('win32',
['win32/DSFTool.exe',
]),
('Microsoft.VC90.CRT',
['win32/'+crt+'/Microsoft.VC90.CRT.manifest',
'win32/'+crt+'/msvcp90.dll',
'win32/'+crt+'/msvcr90.dll'
]),
]
# Substitute Macisms in documentation
hin = open('Resources/OverlayEditor.html', 'rU')
hout = open(join(gettempdir(),'OverlayEditor.html'), 'wt')
subs = { 'Cmd': 'Ctrl',
'↖': 'Home',
'↘': 'End',
' ⇞ ': 'PageUp',
' ⇟ ': 'PageDn' }
regex = re.compile("(%s)" % "|".join(map(re.escape, subs.keys())))
for line in hin:
hout.write(regex.sub(lambda mo: subs[mo.string[mo.start():mo.end()]], line) +'\n')
hin.close()
hout.close()
elif sys.platform.lower().startswith('darwin'):
# http://undefined.org/python/py2app.html Invoke with: setup.py py2app
import py2app
platdata=[('MacOS',
['MacOS/DSFTool',
#'MacOS/OverlayEditor.icns',
]),
# Include wxPython 2.4
#('../Frameworks',
# ['/usr/local/lib/libwx_mac-2.4.0.rsrc',
# ]),
]
res=[join(gettempdir(),'OverlayEditor.html')]
if sys.platform=='win32':
import requests
res.append(requests.certs.where())
for f in listdir('Resources'):
if f[-3:]in ['png', '.vs', '.fs', 'obj', 'jpg']: res.append('Resources/%s' % f)
setup(name='OverlayEditor',
version=("%4.2f" % appversion),
description='DSF overlay editor',
author='Jonathan Harris',
author_email='[email protected]',
url='http://marginal.org.uk/xplanescenery',
data_files=[('Resources',
res),
('Resources/previews',
glob('Resources/previews/*.jpg')
),
] + platdata,
options = {'py2exe': {'ascii':True, # suppresss encodings?
'dist_dir':'dist.'+cpu,
'dll_excludes': ['crypt32.dll', 'msvcp90.dll', 'w9xpopen.exe'],
#'bundle_files':win64 and 3 or 2, # don't bundle pythonX.dll - causes ctypes to fail. Bundling doesn't work on win64, or woth Intel MKL lib
'compressed':True,
'includes':['OpenGL.platform.win32',
'OpenGL.arrays',
'OpenGL.arrays.ctypesarrays',
'OpenGL.arrays.ctypesparameters',
'OpenGL.arrays.ctypespointers',
'OpenGL.arrays.lists',
'OpenGL.arrays.nones',
'OpenGL.arrays.numbers',
'OpenGL.arrays.numpymodule',
'OpenGL.arrays.strings',
'OpenGL.arrays.vbo'],
# http://www.py2exe.org/index.cgi/OptimizingSize
'excludes':['Carbon', 'tcl', 'Tkinter', 'mx', 'webbrowser',
'curses', 'distutils', 'doctest', 'hotshot', 'inspect', 'pdb', 'setuptools', 'win32', # Python2.5
'Numeric', 'dotblas', 'numarray', 'scipy', 'nose', # Old Numeric stuff
'simplejson', # use built-in json
'PIL.ImageQt'], # From Pillow 3 - drags in Qt etc
'packages':['encodings.ascii', 'encodings.hex_codec', 'encodings.idna', 'encodings.latin_1', 'encodings.mbcs', 'encodings.utf_8', 'encodings.utf_16', 'encodings.cp437'],
'optimize':2,
},
'py2app': {'argv_emulation':False,
'iconfile':'MacOS/OverlayEditor.icns',
'includes':['wx'],
'packages':['wx'],
'frameworks':['wx'],
'compressed':True,
'optimize':2,
'semi_standalone':True,
},
},
# comment out for Mac
zipfile = None,
# win32
windows = [{'script':'OverlayEditor.py',
'icon_resources':[(0,'win32/OverlayEditor.ico'),
(1,'win32/fac.ico'),
(2,'win32/for.ico'),
(3,'win32/lin.ico'),
(4,'win32/obj.ico'),
(5,'win32/pol.ico'),
(6,'win32/str.ico'),
(7,'win32/agp.ico')],
'other_resources':[(24,1,manifest)],
}],
# mac
#app = ['OverlayEditor.py'],
)
# Patch the executable to add an export table containing "NvOptimusEnablement"
# http://developer.download.nvidia.com/devzone/devcenter/gamegraphics/files/OptimusRenderingPolicies.pdf
# winnt.h
IMAGE_DOS_SIGNATURE = 0x5a4d # MZ
IMAGE_DOS_HEADER_lfanew = 0x3c # location of PE header
# IMAGE_NT_HEADERS
IMAGE_NT_SIGNATURE = 0x00004550 # PE\0\0
# IMAGE_FILE_HEADER - http://msdn.microsoft.com/en-us/library/windows/desktop/ms680313%28v=vs.85%29.aspx
IMAGE_FILE_MACHINE_I386 = 0x014c
IMAGE_FILE_MACHINE_AMD64 = 0x8664
# IMAGE_OPTIONAL_HEADER
IMAGE_NT_OPTIONAL_HDR32_MAGIC = 0x10b
IMAGE_NT_OPTIONAL_HDR64_MAGIC = 0x20b
h = open('dist.%s/OverlayEditor.exe' % cpu, 'rb+')
assert h
# IMAGE_DOS_HEADER
(magic,) = unpack('<H', h.read(2))
assert magic == IMAGE_DOS_SIGNATURE
h.seek(IMAGE_DOS_HEADER_lfanew)
(nt_header,) = unpack('<I', h.read(4))
# IMAGE_NT_HEADERS
h.seek(nt_header)
(magic,) = unpack('<I', h.read(4))
assert magic == IMAGE_NT_SIGNATURE
# IMAGE_FILE_HEADER
(Machine, NumberOfSections, TimeDateStamp, PointerToSymbolTable, NumberOfSymbols, SizeOfOptionalHeader, Characteristics) = unpack('<HHIIIHH', h.read(20))
assert cpu=='x86' and Machine==IMAGE_FILE_MACHINE_I386 or Machine==IMAGE_FILE_MACHINE_AMD64
assert SizeOfOptionalHeader
optional_header = h.tell()
section_table = optional_header + SizeOfOptionalHeader
# IMAGE_OPTIONAL_HEADER
(Magic,MajorLinkerVersion,MinorLinkerVersion,SizeOfCode,SizeOfInitializedData,SizeOfUninitializedData,AddressOfEntryPoint,BaseOfCode,BaseOfData,ImageBase,SectionAlignment,FileAlignment) = unpack('<HBBIIIIIIIII', h.read(40))
assert cpu=='x86' and Magic==IMAGE_NT_OPTIONAL_HDR32_MAGIC or Magic==IMAGE_NT_OPTIONAL_HDR64_MAGIC
export_table_p = optional_header + (cpu=='x86' and 96 or 112) # location of Export Directory pointer
h.seek(export_table_p)
(va,sz) = unpack('<II', h.read(8))
assert va == sz == 0 # check there isn't already an export table
# IMAGE_SECTION_HEADER
h.seek(section_table)
for section in range(NumberOfSections):
(Name, VirtualSize, VirtualAddress, SizeOfRawData, PointerToRawData, PointerToRelocations, PointerToLinenumbers, NumberOfRelocations, NumberOfLinenumbers, Characteristics) = unpack('<8sIIIIIIHHI', h.read(40))
if Name.rstrip('\0')=='.rdata': # we'll put export table in .rdata, like MSVC's linker
break
else:
assert False
# IMAGE_EXPORT_DIRECTORY
export_table_rva = VirtualAddress + VirtualSize + 4 # leave space for DWORD const variable before export_table
export_table_raw = PointerToRawData + VirtualSize + 4
DLLName = export_table_rva + 0x32
AddressOfFunctions = export_table_rva + 0x28
AddressOfNames = export_table_rva + 0x2c
AddressOfNameOrdinals = export_table_rva + 0x30
export_directory_table = pack('<IIHHIIIIIII', 0, int(time.time()), 0, 0, DLLName, 1, 1, 1, AddressOfFunctions, AddressOfNames, AddressOfNameOrdinals)
export_address_table = pack('<I', export_table_rva - 4) # pointer to exported variable
export_name_table = pack('<I', export_table_rva + 0x44)
export_ordinal_table = pack('<H', 0)
export_DLLname = 'OverlayEditor.exe\0'
export_names = 'NvOptimusEnablement\0'
export_directory = export_directory_table + export_address_table + export_name_table + export_ordinal_table + export_DLLname + export_names
# update .rdata section
assert VirtualSize/SectionAlignment == (VirtualSize+4+len(export_directory))/SectionAlignment # check we won't overflow the section
VirtualSize += (4 + len(export_directory))
h.seek(section_table + section*40)
if VirtualSize <= SizeOfRawData:
h.write(pack('<8sIIIIIIHHI', Name, VirtualSize, VirtualAddress, SizeOfRawData, PointerToRawData, PointerToRelocations, PointerToLinenumbers, NumberOfRelocations, NumberOfLinenumbers, Characteristics))
else:
# not enough space in file on disk
end_rdata_raw = PointerToRawData + SizeOfRawData
SizeOfRawData += FileAlignment # make space
h.write(pack('<8sIIIIIIHHI', Name, VirtualSize, VirtualAddress, SizeOfRawData, PointerToRawData, PointerToRelocations and PointerToRelocations+FileAlignment or 0, PointerToLinenumbers and PointerToLinenumbers+FileAlignment or 0, NumberOfRelocations, NumberOfLinenumbers, Characteristics))
# bump following sections up
section +=1
while section < NumberOfSections:
h.seek(section_table + section*40)
(Name, VirtualSize, VirtualAddress, SizeOfRawData, PointerToRawData, PointerToRelocations, PointerToLinenumbers, NumberOfRelocations, NumberOfLinenumbers, Characteristics) = unpack('<8sIIIIIIHHI', h.read(40))
h.seek(section_table + section*40)
h.write(pack('<8sIIIIIIHHI', Name, VirtualSize, VirtualAddress, SizeOfRawData, PointerToRawData+FileAlignment, PointerToRelocations and PointerToRelocations+FileAlignment or 0, PointerToLinenumbers and PointerToLinenumbers+FileAlignment or 0, NumberOfRelocations, NumberOfLinenumbers, Characteristics))
section += 1
# move the content of the following sections
h.seek(end_rdata_raw)
restoffile = h.read()
h.seek(end_rdata_raw)
h.write('\0' * FileAlignment)
h.write(restoffile)
# Update optional header with new total size
SizeOfInitializedData += FileAlignment
h.seek(optional_header)
h.write(pack('<HBBIII', Magic,MajorLinkerVersion,MinorLinkerVersion,SizeOfCode,SizeOfInitializedData,SizeOfUninitializedData))
# write export directory
h.seek(export_table_raw - 4)
h.write(pack('<I', 1)) # exported variable == 1
h.write(export_directory)
# update optional header to point to it
h.seek(export_table_p)
h.write(pack('<II', export_table_rva, len(export_directory)))
h.close()
| Marginal/OverlayEditor | win32/setup.py | Python | gpl-2.0 | 13,143 |
#
# Copyright (c) 2010--2013 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
#
# Converts headers to the intermediate format
#
import headerSource
import time
import string
from importLib import Channel
from backendLib import gmtime, localtime
from types import IntType
from spacewalk.common.stringutils import to_string
class debBinaryPackage(headerSource.rpmBinaryPackage):
def __init__(self, header, size, checksum_type, checksum, path=None, org_id=None,
channels=[]):
headerSource.rpmBinaryPackage.__init__(self)
self.tagMap = headerSource.rpmBinaryPackage.tagMap.copy()
# Remove already-mapped tags
self._already_mapped = [
'rpm_version', 'payload_size', 'payload_format',
'package_group', 'build_time', 'build_host'
]
for t in self._already_mapped:
if self.tagMap.has_key(t):
del self.tagMap[t]
# XXX is seems to me that this is the place that 'source_rpm' is getting
# set
for f in self.keys():
field = f
if self.tagMap.has_key(f):
field = self.tagMap[f]
if not field:
# Unsupported
continue
# get the db field value from the header
val = header[field]
if f == 'build_time':
if val is not None and isinstance(val, IntType):
# A UNIX timestamp
val = gmtime(val)
elif val:
# Convert to strings
if isinstance(val, unicode):
val = to_string(val)
else:
val = str(val)
elif val == []:
val = None
self[f] = val
self['package_size'] = size
self['checksum_type'] = checksum_type
self['checksum'] = checksum
self['path'] = path
self['org_id'] = org_id
self['header_start'] = None
self['header_end'] = None
self['last_modified'] = localtime(time.time())
if self['sigmd5']:
self['sigchecksum_type'] = 'md5'
self['sigchecksum'] = self['sigmd5']
del(self['sigmd5'])
# Fix some of the information up
vendor = self['vendor']
if vendor is None:
self['vendor'] = 'Debian'
payloadFormat = self['payload_format']
if payloadFormat is None:
self['payload_format'] = 'ar'
if self['payload_size'] is None:
self['payload_size'] = 0
# Populate file information
self._populateFiles(header)
# Populate dependency information
self._populateDependencyInformation(header)
# Populate changelogs
self._populateChangeLog(header)
# Channels
self._populateChannels(channels)
self['source_rpm'] = None
group = self.get('package_group', '')
if group == '' or group is None:
self['package_group'] = 'NoGroup'
def _populateFiles(self, header):
files = []
#for f in header.get('files', []):
# fc = headerSource.rpmFile()
# fc.populate(f)
# files.append(fc)
self['files'] = files
def _populateDependencyInformation(self, header):
mapping = {
'provides' : headerSource.rpmProvides,
'requires' : headerSource.rpmRequires,
'conflicts' : headerSource.rpmConflicts,
'obsoletes' : headerSource.rpmObsoletes,
'suggests' : headerSource.rpmSuggests,
'recommends': headerSource.rpmRecommends,
'breaks' : headerSource.rpmBreaks,
'predepends': headerSource.rpmPredepends,
}
for k, dclass in mapping.items():
l = []
values = header[k]
if values != None:
val = string.join(values.split(), "") # remove whitespaces
val = val.split(',') # split packages
i = 0
for v in val:
version = ''
if '|' in v:
# TODO: store alternative-package-names semantically someday
name = v + '_' + str(i)
else:
nv = v.split('(')
name = nv[0] + '_' + str(i)
# TODO FIX VERSION AND FLAGS
if (len(nv) > 1):
version = nv[1].rstrip(')')
hash = {'name' : name, 'version' : version, 'flags' : 0}
finst = dclass()
finst.populate(hash)
l.append(finst)
i += 1
self[k] = l
def _populateChangeLog(self, header):
l = []
#for cinfo in header.get('changelog', []):
# cinst = headerSource.rpmChangeLog()
# cinst.populate(cinfo)
# l.append(cinst)
self['changelog'] = l
def _populateChannels(self, channels):
l = []
for channel in channels:
dict = {'label' : channel}
obj = Channel()
obj.populate(dict)
l.append(obj)
self['channels'] = l
| moio/spacewalk | backend/server/importlib/debPackage.py | Python | gpl-2.0 | 5,827 |
import json
import os
import unittest
from app import app, db
from app.model.document import Document
from app.model.tag import Tag
from app import lib
#-----------------------------------------------------------------------------#
class BaseTestCase(unittest.TestCase):
def setUp(self):
app.config['TESTING'] = True
app.config['SQLALCHEMY_DATABASE_URI'] = 'sqlite://'
app.config['INDEX_QUEUE'] = 'test_index'
self.app = app.test_client()
db.create_all()
def tearDown(self):
db.session.remove()
db.drop_all()
def _add_default_doc(self):
doc = Document(u"Test Title", u"Test Text")
db.session.add(doc)
db.session.commit()
def _add_default_tag(self):
tag = Tag(u"Test Title", u"Test Description")
db.session.add(tag)
db.session.commit()
#-----------------------------------------------------------------------------#
class LibTestCase(BaseTestCase):
def test_test_string_validator_max_length(self):
with self.assertRaises(ValueError) as cm:
lib.string_length(maximum=10)(u'a' * 11, u'Test Name')
self.assertEqual(cm.exception.message, "Test Name is longer than 10 characters (11)")
def test_test_string_validator_min_length(self):
with self.assertRaises(ValueError) as cm:
lib.string_length(minimum=2)(u'a', u'Test Name')
self.assertEqual(cm.exception.message, "Test Name is less than 2 characters (1)")
def test_test_string_validator_min_and_max_length_too_short(self):
with self.assertRaises(ValueError) as cm:
lib.string_length(minimum=2, maximum=10)(u'a', u'Test Name')
self.assertEqual(cm.exception.message, "Test Name is less than 2 characters (1)")
def test_test_string_validator_min_and_max_length_too_long(self):
with self.assertRaises(ValueError) as cm:
lib.string_length(minimum=2, maximum=10)(u'a' * 11, u'Test Name')
self.assertEqual(cm.exception.message, "Test Name is longer than 10 characters (11)")
def test_string_length_validator_type(self):
with self.assertRaises(ValueError) as cm:
lib.string_length(maximum=256)(1, u'Test Name')
self.assertEqual(cm.exception.message, "Test Name needs to be a string")
def test_tag_list_validator_with_valid(self):
self._add_default_tag()
res = lib.tag_list([1], u'Tag List')
self.assertEqual(type(res[0]), Tag)
self.assertEqual((res[0].id), 1)
def test_tag_list_validator_with_invalid(self):
with self.assertRaises(ValueError) as cm:
res = lib.tag_list([1], u'Tag List')
self.assertEqual(cm.exception.message, "1 is not a valid Tag id")
def test_tag_list_validator_with_mix(self):
self._add_default_tag()
with self.assertRaises(ValueError) as cm:
res = lib.tag_list([1,2], u'Tag List')
self.assertEqual(cm.exception.message, "2 is not a valid Tag id")
def test_tag_list_validator_no_duplicates(self):
self._add_default_tag()
res = lib.tag_list([1,1,1], u'Tag List')
self.assertEqual(len(res), 1)
self.assertEqual((res[0].id), 1)
self.assertEqual(type(res[0]), Tag)
def test_ensure_dir(self):
test_dir = "/tmp/searchr/test"
self.assertFalse(os.path.exists(test_dir))
lib.ensure_dir(test_dir)
self.assertTrue(os.path.exists(test_dir))
os.rmdir(test_dir)
| andytom/searchr-server | app/tests/lib.py | Python | bsd-3-clause | 3,500 |
import re
import os
import sys
import time
import datetime
import traceback
from decimal import Decimal
import threading
import asyncio
from electrum.bitcoin import TYPE_ADDRESS
from electrum.storage import WalletStorage
from electrum.wallet import Wallet, InternalAddressCorruption
from electrum.paymentrequest import InvoiceStore
from electrum.util import profiler, InvalidPassword, send_exception_to_crash_reporter
from electrum.plugin import run_hook
from electrum.util import format_satoshis, format_satoshis_plain, format_fee_satoshis
from electrum.paymentrequest import PR_UNPAID, PR_PAID, PR_UNKNOWN, PR_EXPIRED
from electrum import blockchain
from electrum.network import Network, TxBroadcastError, BestEffortRequestFailed
from .i18n import _
from kivy.app import App
from kivy.core.window import Window
from kivy.logger import Logger
from kivy.utils import platform
from kivy.properties import (OptionProperty, AliasProperty, ObjectProperty,
StringProperty, ListProperty, BooleanProperty, NumericProperty)
from kivy.cache import Cache
from kivy.clock import Clock
from kivy.factory import Factory
from kivy.metrics import inch
from kivy.lang import Builder
## lazy imports for factory so that widgets can be used in kv
#Factory.register('InstallWizard', module='electrum.gui.kivy.uix.dialogs.installwizard')
#Factory.register('InfoBubble', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputList', module='electrum.gui.kivy.uix.dialogs')
#Factory.register('OutputItem', module='electrum.gui.kivy.uix.dialogs')
from .uix.dialogs.installwizard import InstallWizard
from .uix.dialogs import InfoBubble, crash_reporter
from .uix.dialogs import OutputList, OutputItem
from .uix.dialogs import TopLabel, RefLabel
#from kivy.core.window import Window
#Window.softinput_mode = 'below_target'
# delayed imports: for startup speed on android
notification = app = ref = None
util = False
# register widget cache for keeping memory down timeout to forever to cache
# the data
Cache.register('electrum_widgets', timeout=0)
from kivy.uix.screenmanager import Screen
from kivy.uix.tabbedpanel import TabbedPanel
from kivy.uix.label import Label
from kivy.core.clipboard import Clipboard
Factory.register('TabbedCarousel', module='electrum.gui.kivy.uix.screens')
# Register fonts without this you won't be able to use bold/italic...
# inside markup.
from kivy.core.text import Label
Label.register('Roboto',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf',
'electrum/gui/kivy/data/fonts/Roboto-Bold.ttf')
from electrum.util import (base_units, NoDynamicFeeEstimates, decimal_point_to_base_unit_name,
base_unit_name_to_decimal_point, NotEnoughFunds, UnknownBaseUnit,
DECIMAL_POINT_DEFAULT)
class ElectrumWindow(App):
electrum_config = ObjectProperty(None)
language = StringProperty('en')
# properties might be updated by the network
num_blocks = NumericProperty(0)
num_nodes = NumericProperty(0)
server_host = StringProperty('')
server_port = StringProperty('')
num_chains = NumericProperty(0)
blockchain_name = StringProperty('')
fee_status = StringProperty('Fee')
balance = StringProperty('')
fiat_balance = StringProperty('')
is_fiat = BooleanProperty(False)
blockchain_forkpoint = NumericProperty(0)
auto_connect = BooleanProperty(False)
def on_auto_connect(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(auto_connect=self.auto_connect)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_auto_connect(self, x):
self.auto_connect = not self.auto_connect
oneserver = BooleanProperty(False)
def on_oneserver(self, instance, x):
net_params = self.network.get_parameters()
net_params = net_params._replace(oneserver=self.oneserver)
self.network.run_from_another_thread(self.network.set_parameters(net_params))
def toggle_oneserver(self, x):
self.oneserver = not self.oneserver
proxy_str = StringProperty('')
def update_proxy_str(self, proxy: dict):
mode = proxy.get('mode')
host = proxy.get('host')
port = proxy.get('port')
self.proxy_str = (host + ':' + port) if mode else _('None')
def choose_server_dialog(self, popup):
from .uix.dialogs.choice_dialog import ChoiceDialog
protocol = 's'
def cb2(host):
from electrum import constants
pp = servers.get(host, constants.net.DEFAULT_PORTS)
port = pp.get(protocol, '')
popup.ids.host.text = host
popup.ids.port.text = port
servers = self.network.get_servers()
ChoiceDialog(_('Choose a server'), sorted(servers), popup.ids.host.text, cb2).open()
def choose_blockchain_dialog(self, dt):
from .uix.dialogs.choice_dialog import ChoiceDialog
chains = self.network.get_blockchains()
def cb(name):
with blockchain.blockchains_lock: blockchain_items = list(blockchain.blockchains.items())
for chain_id, b in blockchain_items:
if name == b.get_name():
self.network.run_from_another_thread(self.network.follow_chain_given_id(chain_id))
chain_objects = [blockchain.blockchains.get(chain_id) for chain_id in chains]
chain_objects = filter(lambda b: b is not None, chain_objects)
names = [b.get_name() for b in chain_objects]
if len(names) > 1:
cur_chain = self.network.blockchain().get_name()
ChoiceDialog(_('Choose your chain'), names, cur_chain, cb).open()
use_rbf = BooleanProperty(False)
def on_use_rbf(self, instance, x):
self.electrum_config.set_key('use_rbf', self.use_rbf, True)
use_change = BooleanProperty(False)
def on_use_change(self, instance, x):
self.electrum_config.set_key('use_change', self.use_change, True)
use_unconfirmed = BooleanProperty(False)
def on_use_unconfirmed(self, instance, x):
self.electrum_config.set_key('confirmed_only', not self.use_unconfirmed, True)
def set_URI(self, uri):
self.switch_to('send')
self.send_screen.set_URI(uri)
def on_new_intent(self, intent):
if intent.getScheme() != 'fujicoin':
return
uri = intent.getDataString()
self.set_URI(uri)
def on_language(self, instance, language):
Logger.info('language: {}'.format(language))
_.switch_lang(language)
def update_history(self, *dt):
if self.history_screen:
self.history_screen.update()
def on_quotes(self, d):
Logger.info("on_quotes")
self._trigger_update_status()
self._trigger_update_history()
def on_history(self, d):
Logger.info("on_history")
if self.wallet:
self.wallet.clear_coin_price_cache()
self._trigger_update_history()
def on_fee_histogram(self, *args):
self._trigger_update_history()
def _get_bu(self):
decimal_point = self.electrum_config.get('decimal_point', DECIMAL_POINT_DEFAULT)
try:
return decimal_point_to_base_unit_name(decimal_point)
except UnknownBaseUnit:
return decimal_point_to_base_unit_name(DECIMAL_POINT_DEFAULT)
def _set_bu(self, value):
assert value in base_units.keys()
decimal_point = base_unit_name_to_decimal_point(value)
self.electrum_config.set_key('decimal_point', decimal_point, True)
self._trigger_update_status()
self._trigger_update_history()
wallet_name = StringProperty(_('No Wallet'))
base_unit = AliasProperty(_get_bu, _set_bu)
fiat_unit = StringProperty('')
def on_fiat_unit(self, a, b):
self._trigger_update_history()
def decimal_point(self):
return base_units[self.base_unit]
def btc_to_fiat(self, amount_str):
if not amount_str:
return ''
if not self.fx.is_enabled():
return ''
rate = self.fx.exchange_rate()
if rate.is_nan():
return ''
fiat_amount = self.get_amount(amount_str + ' ' + self.base_unit) * rate / pow(10, 8)
return "{:.2f}".format(fiat_amount).rstrip('0').rstrip('.')
def fiat_to_btc(self, fiat_amount):
if not fiat_amount:
return ''
rate = self.fx.exchange_rate()
if rate.is_nan():
return ''
satoshis = int(pow(10,8) * Decimal(fiat_amount) / Decimal(rate))
return format_satoshis_plain(satoshis, self.decimal_point())
def get_amount(self, amount_str):
a, u = amount_str.split()
assert u == self.base_unit
try:
x = Decimal(a)
except:
return None
p = pow(10, self.decimal_point())
return int(p * x)
_orientation = OptionProperty('landscape',
options=('landscape', 'portrait'))
def _get_orientation(self):
return self._orientation
orientation = AliasProperty(_get_orientation,
None,
bind=('_orientation',))
'''Tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`orientation` is a read only `AliasProperty` Defaults to 'landscape'
'''
_ui_mode = OptionProperty('phone', options=('tablet', 'phone'))
def _get_ui_mode(self):
return self._ui_mode
ui_mode = AliasProperty(_get_ui_mode,
None,
bind=('_ui_mode',))
'''Defines tries to ascertain the kind of device the app is running on.
Cane be one of `tablet` or `phone`.
:data:`ui_mode` is a read only `AliasProperty` Defaults to 'phone'
'''
def __init__(self, **kwargs):
# initialize variables
self._clipboard = Clipboard
self.info_bubble = None
self.nfcscanner = None
self.tabs = None
self.is_exit = False
self.wallet = None
self.pause_time = 0
self.asyncio_loop = asyncio.get_event_loop()
App.__init__(self)#, **kwargs)
title = _('Electrum App')
self.electrum_config = config = kwargs.get('config', None)
self.language = config.get('language', 'en')
self.network = network = kwargs.get('network', None) # type: Network
if self.network:
self.num_blocks = self.network.get_local_height()
self.num_nodes = len(self.network.get_interfaces())
net_params = self.network.get_parameters()
self.server_host = net_params.host
self.server_port = net_params.port
self.auto_connect = net_params.auto_connect
self.oneserver = net_params.oneserver
self.proxy_config = net_params.proxy if net_params.proxy else {}
self.update_proxy_str(self.proxy_config)
self.plugins = kwargs.get('plugins', [])
self.gui_object = kwargs.get('gui_object', None)
self.daemon = self.gui_object.daemon
self.fx = self.daemon.fx
self.use_rbf = config.get('use_rbf', True)
self.use_change = config.get('use_change', True)
self.use_unconfirmed = not config.get('confirmed_only', False)
# create triggers so as to minimize updating a max of 2 times a sec
self._trigger_update_wallet = Clock.create_trigger(self.update_wallet, .5)
self._trigger_update_status = Clock.create_trigger(self.update_status, .5)
self._trigger_update_history = Clock.create_trigger(self.update_history, .5)
self._trigger_update_interfaces = Clock.create_trigger(self.update_interfaces, .5)
self._periodic_update_status_during_sync = Clock.schedule_interval(self.update_wallet_synchronizing_progress, .5)
# cached dialogs
self._settings_dialog = None
self._password_dialog = None
self.fee_status = self.electrum_config.get_fee_status()
def on_pr(self, pr):
if not self.wallet:
self.show_error(_('No wallet loaded.'))
return
if pr.verify(self.wallet.contacts):
key = self.wallet.invoices.add(pr)
if self.invoices_screen:
self.invoices_screen.update()
status = self.wallet.invoices.get_status(key)
if status == PR_PAID:
self.show_error("invoice already paid")
self.send_screen.do_clear()
else:
if pr.has_expired():
self.show_error(_('Payment request has expired'))
else:
self.switch_to('send')
self.send_screen.set_request(pr)
else:
self.show_error("invoice error:" + pr.error)
self.send_screen.do_clear()
def on_qr(self, data):
from electrum.bitcoin import base_decode, is_address
data = data.strip()
if is_address(data):
self.set_URI(data)
return
if data.startswith('fujicoin:'):
self.set_URI(data)
return
# try to decode transaction
from electrum.transaction import Transaction
from electrum.util import bh2u
try:
text = bh2u(base_decode(data, None, base=43))
tx = Transaction(text)
tx.deserialize()
except:
tx = None
if tx:
self.tx_dialog(tx)
return
# show error
self.show_error("Unable to decode QR data")
def update_tab(self, name):
s = getattr(self, name + '_screen', None)
if s:
s.update()
@profiler
def update_tabs(self):
for tab in ['invoices', 'send', 'history', 'receive', 'address']:
self.update_tab(tab)
def switch_to(self, name):
s = getattr(self, name + '_screen', None)
if s is None:
s = self.tabs.ids[name + '_screen']
s.load_screen()
panel = self.tabs.ids.panel
tab = self.tabs.ids[name + '_tab']
panel.switch_to(tab)
def show_request(self, addr):
self.switch_to('receive')
self.receive_screen.screen.address = addr
def show_pr_details(self, req, status, is_invoice):
from electrum.util import format_time
requestor = req.get('requestor')
exp = req.get('exp')
memo = req.get('memo')
amount = req.get('amount')
fund = req.get('fund')
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv')
popup.is_invoice = is_invoice
popup.amount = amount
popup.requestor = requestor if is_invoice else req.get('address')
popup.exp = format_time(exp) if exp else ''
popup.description = memo if memo else ''
popup.signature = req.get('signature', '')
popup.status = status
popup.fund = fund if fund else 0
txid = req.get('txid')
popup.tx_hash = txid or ''
popup.on_open = lambda: popup.ids.output_list.update(req.get('outputs', []))
popup.export = self.export_private_keys
popup.open()
def show_addr_details(self, req, status):
from electrum.util import format_time
fund = req.get('fund')
isaddr = 'y'
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/invoice.kv')
popup.isaddr = isaddr
popup.is_invoice = False
popup.status = status
popup.requestor = req.get('address')
popup.fund = fund if fund else 0
popup.export = self.export_private_keys
popup.open()
def qr_dialog(self, title, data, show_text=False, text_for_clipboard=None):
from .uix.dialogs.qr_dialog import QRDialog
def on_qr_failure():
popup.dismiss()
msg = _('Failed to display QR code.')
if text_for_clipboard:
msg += '\n' + _('Text copied to clipboard.')
self._clipboard.copy(text_for_clipboard)
Clock.schedule_once(lambda dt: self.show_info(msg))
popup = QRDialog(title, data, show_text, failure_cb=on_qr_failure,
text_for_clipboard=text_for_clipboard)
popup.open()
def scan_qr(self, on_complete):
if platform != 'android':
return
from jnius import autoclass, cast
from android import activity
PythonActivity = autoclass('org.kivy.android.PythonActivity')
SimpleScannerActivity = autoclass("org.electrum.qr.SimpleScannerActivity")
Intent = autoclass('android.content.Intent')
intent = Intent(PythonActivity.mActivity, SimpleScannerActivity)
def on_qr_result(requestCode, resultCode, intent):
try:
if resultCode == -1: # RESULT_OK:
# this doesn't work due to some bug in jnius:
# contents = intent.getStringExtra("text")
String = autoclass("java.lang.String")
contents = intent.getStringExtra(String("text"))
on_complete(contents)
except Exception as e: # exc would otherwise get lost
send_exception_to_crash_reporter(e)
finally:
activity.unbind(on_activity_result=on_qr_result)
activity.bind(on_activity_result=on_qr_result)
PythonActivity.mActivity.startActivityForResult(intent, 0)
def do_share(self, data, title):
if platform != 'android':
return
from jnius import autoclass, cast
JS = autoclass('java.lang.String')
Intent = autoclass('android.content.Intent')
sendIntent = Intent()
sendIntent.setAction(Intent.ACTION_SEND)
sendIntent.setType("text/plain")
sendIntent.putExtra(Intent.EXTRA_TEXT, JS(data))
PythonActivity = autoclass('org.kivy.android.PythonActivity')
currentActivity = cast('android.app.Activity', PythonActivity.mActivity)
it = Intent.createChooser(sendIntent, cast('java.lang.CharSequence', JS(title)))
currentActivity.startActivity(it)
def build(self):
return Builder.load_file('electrum/gui/kivy/main.kv')
def _pause(self):
if platform == 'android':
# move activity to back
from jnius import autoclass
python_act = autoclass('org.kivy.android.PythonActivity')
mActivity = python_act.mActivity
mActivity.moveTaskToBack(True)
def on_start(self):
''' This is the start point of the kivy ui
'''
import time
Logger.info('Time to on_start: {} <<<<<<<<'.format(time.clock()))
win = Window
win.bind(size=self.on_size, on_keyboard=self.on_keyboard)
win.bind(on_key_down=self.on_key_down)
#win.softinput_mode = 'below_target'
self.on_size(win, win.size)
self.init_ui()
crash_reporter.ExceptionHook(self)
# init plugins
run_hook('init_kivy', self)
# fiat currency
self.fiat_unit = self.fx.ccy if self.fx.is_enabled() else ''
# default tab
self.switch_to('history')
# bind intent for fujicoin: URI scheme
if platform == 'android':
from android import activity
from jnius import autoclass
PythonActivity = autoclass('org.kivy.android.PythonActivity')
mactivity = PythonActivity.mActivity
self.on_new_intent(mactivity.getIntent())
activity.bind(on_new_intent=self.on_new_intent)
# connect callbacks
if self.network:
interests = ['wallet_updated', 'network_updated', 'blockchain_updated',
'status', 'new_transaction', 'verified']
self.network.register_callback(self.on_network_event, interests)
self.network.register_callback(self.on_fee, ['fee'])
self.network.register_callback(self.on_fee_histogram, ['fee_histogram'])
self.network.register_callback(self.on_quotes, ['on_quotes'])
self.network.register_callback(self.on_history, ['on_history'])
# load wallet
self.load_wallet_by_name(self.electrum_config.get_wallet_path())
# URI passed in config
uri = self.electrum_config.get('url')
if uri:
self.set_URI(uri)
def get_wallet_path(self):
if self.wallet:
return self.wallet.storage.path
else:
return ''
def on_wizard_complete(self, wizard, storage):
if storage:
wallet = Wallet(storage)
wallet.start_network(self.daemon.network)
self.daemon.add_wallet(wallet)
self.load_wallet(wallet)
elif not self.wallet:
# wizard did not return a wallet; and there is no wallet open atm
# try to open last saved wallet (potentially start wizard again)
self.load_wallet_by_name(self.electrum_config.get_wallet_path(), ask_if_wizard=True)
def load_wallet_by_name(self, path, ask_if_wizard=False):
if not path:
return
if self.wallet and self.wallet.storage.path == path:
return
wallet = self.daemon.load_wallet(path, None)
if wallet:
if wallet.has_password():
self.password_dialog(wallet, _('Enter PIN code'), lambda x: self.load_wallet(wallet), self.stop)
else:
self.load_wallet(wallet)
else:
def launch_wizard():
wizard = Factory.InstallWizard(self.electrum_config, self.plugins)
wizard.path = path
wizard.bind(on_wizard_complete=self.on_wizard_complete)
storage = WalletStorage(path, manual_upgrades=True)
if not storage.file_exists():
wizard.run('new')
elif storage.is_encrypted():
raise Exception("Kivy GUI does not support encrypted wallet files.")
elif storage.requires_upgrade():
wizard.upgrade_storage(storage)
else:
raise Exception("unexpected storage file situation")
if not ask_if_wizard:
launch_wizard()
else:
from .uix.dialogs.question import Question
def handle_answer(b: bool):
if b:
launch_wizard()
else:
try: os.unlink(path)
except FileNotFoundError: pass
self.stop()
d = Question(_('Do you want to launch the wizard again?'), handle_answer)
d.open()
def on_stop(self):
Logger.info('on_stop')
if self.wallet:
self.electrum_config.save_last_wallet(self.wallet)
self.stop_wallet()
def stop_wallet(self):
if self.wallet:
self.daemon.stop_wallet(self.wallet.storage.path)
self.wallet = None
def on_key_down(self, instance, key, keycode, codepoint, modifiers):
if 'ctrl' in modifiers:
# q=24 w=25
if keycode in (24, 25):
self.stop()
elif keycode == 27:
# r=27
# force update wallet
self.update_wallet()
elif keycode == 112:
# pageup
#TODO move to next tab
pass
elif keycode == 117:
# pagedown
#TODO move to prev tab
pass
#TODO: alt+tab_number to activate the particular tab
def on_keyboard(self, instance, key, keycode, codepoint, modifiers):
if key == 27 and self.is_exit is False:
self.is_exit = True
self.show_info(_('Press again to exit'))
return True
# override settings button
if key in (319, 282): #f1/settings button on android
#self.gui.main_gui.toggle_settings(self)
return True
def settings_dialog(self):
from .uix.dialogs.settings import SettingsDialog
if self._settings_dialog is None:
self._settings_dialog = SettingsDialog(self)
self._settings_dialog.update()
self._settings_dialog.open()
def popup_dialog(self, name):
if name == 'settings':
self.settings_dialog()
elif name == 'wallets':
from .uix.dialogs.wallets import WalletDialog
d = WalletDialog()
d.open()
elif name == 'status':
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/'+name+'.kv')
master_public_keys_layout = popup.ids.master_public_keys
for xpub in self.wallet.get_master_public_keys()[1:]:
master_public_keys_layout.add_widget(TopLabel(text=_('Master Public Key')))
ref = RefLabel()
ref.name = _('Master Public Key')
ref.data = xpub
master_public_keys_layout.add_widget(ref)
popup.open()
else:
popup = Builder.load_file('electrum/gui/kivy/uix/ui_screens/'+name+'.kv')
popup.open()
@profiler
def init_ui(self):
''' Initialize The Ux part of electrum. This function performs the basic
tasks of setting up the ui.
'''
#from weakref import ref
self.funds_error = False
# setup UX
self.screens = {}
#setup lazy imports for mainscreen
Factory.register('AnimatedPopup',
module='electrum.gui.kivy.uix.dialogs')
Factory.register('QRCodeWidget',
module='electrum.gui.kivy.uix.qrcodewidget')
# preload widgets. Remove this if you want to load the widgets on demand
#Cache.append('electrum_widgets', 'AnimatedPopup', Factory.AnimatedPopup())
#Cache.append('electrum_widgets', 'QRCodeWidget', Factory.QRCodeWidget())
# load and focus the ui
self.root.manager = self.root.ids['manager']
self.history_screen = None
self.contacts_screen = None
self.send_screen = None
self.invoices_screen = None
self.receive_screen = None
self.requests_screen = None
self.address_screen = None
self.icon = "electrum/gui/icons/electrum.png"
self.tabs = self.root.ids['tabs']
def update_interfaces(self, dt):
net_params = self.network.get_parameters()
self.num_nodes = len(self.network.get_interfaces())
self.num_chains = len(self.network.get_blockchains())
chain = self.network.blockchain()
self.blockchain_forkpoint = chain.get_max_forkpoint()
self.blockchain_name = chain.get_name()
interface = self.network.interface
if interface:
self.server_host = interface.host
else:
self.server_host = str(net_params.host) + ' (connecting...)'
self.proxy_config = net_params.proxy or {}
self.update_proxy_str(self.proxy_config)
def on_network_event(self, event, *args):
Logger.info('network event: '+ event)
if event == 'network_updated':
self._trigger_update_interfaces()
self._trigger_update_status()
elif event == 'wallet_updated':
self._trigger_update_wallet()
self._trigger_update_status()
elif event == 'blockchain_updated':
# to update number of confirmations in history
self._trigger_update_wallet()
elif event == 'status':
self._trigger_update_status()
elif event == 'new_transaction':
self._trigger_update_wallet()
elif event == 'verified':
self._trigger_update_wallet()
@profiler
def load_wallet(self, wallet):
if self.wallet:
self.stop_wallet()
self.wallet = wallet
self.wallet_name = wallet.basename()
self.update_wallet()
# Once GUI has been initialized check if we want to announce something
# since the callback has been called before the GUI was initialized
if self.receive_screen:
self.receive_screen.clear()
self.update_tabs()
run_hook('load_wallet', wallet, self)
try:
wallet.try_detecting_internal_addresses_corruption()
except InternalAddressCorruption as e:
self.show_error(str(e))
send_exception_to_crash_reporter(e)
def update_status(self, *dt):
if not self.wallet:
return
if self.network is None or not self.network.is_connected():
status = _("Offline")
elif self.network.is_connected():
self.num_blocks = self.network.get_local_height()
server_height = self.network.get_server_height()
server_lag = self.num_blocks - server_height
if not self.wallet.up_to_date or server_height == 0:
num_sent, num_answered = self.wallet.get_history_sync_state_details()
status = ("{} [size=18dp]({}/{})[/size]"
.format(_("Synchronizing..."), num_answered, num_sent))
elif server_lag > 1:
status = _("Server is lagging ({} blocks)").format(server_lag)
else:
status = ''
else:
status = _("Disconnected")
if status:
self.balance = status
self.fiat_balance = status
else:
c, u, x = self.wallet.get_balance()
text = self.format_amount(c+x+u)
self.balance = str(text.strip()) + ' [size=22dp]%s[/size]'% self.base_unit
self.fiat_balance = self.fx.format_amount(c+u+x) + ' [size=22dp]%s[/size]'% self.fx.ccy
def update_wallet_synchronizing_progress(self, *dt):
if not self.wallet:
return
if not self.wallet.up_to_date:
self._trigger_update_status()
def get_max_amount(self):
from electrum.transaction import TxOutput
if run_hook('abort_send', self):
return ''
inputs = self.wallet.get_spendable_coins(None, self.electrum_config)
if not inputs:
return ''
addr = str(self.send_screen.screen.address) or self.wallet.dummy_address()
outputs = [TxOutput(TYPE_ADDRESS, addr, '!')]
try:
tx = self.wallet.make_unsigned_transaction(inputs, outputs, self.electrum_config)
except NoDynamicFeeEstimates as e:
Clock.schedule_once(lambda dt, bound_e=e: self.show_error(str(bound_e)))
return ''
except NotEnoughFunds:
return ''
except InternalAddressCorruption as e:
self.show_error(str(e))
send_exception_to_crash_reporter(e)
return ''
amount = tx.output_value()
__, x_fee_amount = run_hook('get_tx_extra_fee', self.wallet, tx) or (None, 0)
amount_after_all_fees = amount - x_fee_amount
return format_satoshis_plain(amount_after_all_fees, self.decimal_point())
def format_amount(self, x, is_diff=False, whitespaces=False):
return format_satoshis(x, 0, self.decimal_point(), is_diff=is_diff, whitespaces=whitespaces)
def format_amount_and_units(self, x):
return format_satoshis_plain(x, self.decimal_point()) + ' ' + self.base_unit
def format_fee_rate(self, fee_rate):
# fee_rate is in sat/kB
return format_fee_satoshis(fee_rate/1000) + ' sat/byte'
#@profiler
def update_wallet(self, *dt):
self._trigger_update_status()
if self.wallet and (self.wallet.up_to_date or not self.network or not self.network.is_connected()):
self.update_tabs()
def notify(self, message):
try:
global notification, os
if not notification:
from plyer import notification
icon = (os.path.dirname(os.path.realpath(__file__))
+ '/../../' + self.icon)
notification.notify('Electrum', message,
app_icon=icon, app_name='Electrum')
except ImportError:
Logger.Error('Notification: needs plyer; `sudo python3 -m pip install plyer`')
def on_pause(self):
self.pause_time = time.time()
# pause nfc
if self.nfcscanner:
self.nfcscanner.nfc_disable()
return True
def on_resume(self):
now = time.time()
if self.wallet and self.wallet.has_password() and now - self.pause_time > 60:
self.password_dialog(self.wallet, _('Enter PIN'), None, self.stop)
if self.nfcscanner:
self.nfcscanner.nfc_enable()
def on_size(self, instance, value):
width, height = value
self._orientation = 'landscape' if width > height else 'portrait'
self._ui_mode = 'tablet' if min(width, height) > inch(3.51) else 'phone'
def on_ref_label(self, label, touch):
if label.touched:
label.touched = False
self.qr_dialog(label.name, label.data, True)
else:
label.touched = True
self._clipboard.copy(label.data)
Clock.schedule_once(lambda dt: self.show_info(_('Text copied to clipboard.\nTap again to display it as QR code.')))
def show_error(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, icon='atlas://electrum/gui/kivy/theming/light/error', duration=0,
modal=False):
''' Show an error Message Bubble.
'''
self.show_info_bubble( text=error, icon=icon, width=width,
pos=pos or Window.center, arrow_pos=arrow_pos, exit=exit,
duration=duration, modal=modal)
def show_info(self, error, width='200dp', pos=None, arrow_pos=None,
exit=False, duration=0, modal=False):
''' Show an Info Message Bubble.
'''
self.show_error(error, icon='atlas://electrum/gui/kivy/theming/light/important',
duration=duration, modal=modal, exit=exit, pos=pos,
arrow_pos=arrow_pos)
def show_info_bubble(self, text=_('Hello World'), pos=None, duration=0,
arrow_pos='bottom_mid', width=None, icon='', modal=False, exit=False):
'''Method to show an Information Bubble
.. parameters::
text: Message to be displayed
pos: position for the bubble
duration: duration the bubble remains on screen. 0 = click to hide
width: width of the Bubble
arrow_pos: arrow position for the bubble
'''
info_bubble = self.info_bubble
if not info_bubble:
info_bubble = self.info_bubble = Factory.InfoBubble()
win = Window
if info_bubble.parent:
win.remove_widget(info_bubble
if not info_bubble.modal else
info_bubble._modal_view)
if not arrow_pos:
info_bubble.show_arrow = False
else:
info_bubble.show_arrow = True
info_bubble.arrow_pos = arrow_pos
img = info_bubble.ids.img
if text == 'texture':
# icon holds a texture not a source image
# display the texture in full screen
text = ''
img.texture = icon
info_bubble.fs = True
info_bubble.show_arrow = False
img.allow_stretch = True
info_bubble.dim_background = True
info_bubble.background_image = 'atlas://electrum/gui/kivy/theming/light/card'
else:
info_bubble.fs = False
info_bubble.icon = icon
#if img.texture and img._coreimage:
# img.reload()
img.allow_stretch = False
info_bubble.dim_background = False
info_bubble.background_image = 'atlas://data/images/defaulttheme/bubble'
info_bubble.message = text
if not pos:
pos = (win.center[0], win.center[1] - (info_bubble.height/2))
info_bubble.show(pos, duration, width, modal=modal, exit=exit)
def tx_dialog(self, tx):
from .uix.dialogs.tx_dialog import TxDialog
d = TxDialog(self, tx)
d.open()
def sign_tx(self, *args):
threading.Thread(target=self._sign_tx, args=args).start()
def _sign_tx(self, tx, password, on_success, on_failure):
try:
self.wallet.sign_transaction(tx, password)
except InvalidPassword:
Clock.schedule_once(lambda dt: on_failure(_("Invalid PIN")))
return
on_success = run_hook('tc_sign_wrapper', self.wallet, tx, on_success, on_failure) or on_success
Clock.schedule_once(lambda dt: on_success(tx))
def _broadcast_thread(self, tx, on_complete):
status = False
try:
self.network.run_from_another_thread(self.network.broadcast_transaction(tx))
except TxBroadcastError as e:
msg = e.get_message_for_gui()
except BestEffortRequestFailed as e:
msg = repr(e)
else:
status, msg = True, tx.txid()
Clock.schedule_once(lambda dt: on_complete(status, msg))
def broadcast(self, tx, pr=None):
def on_complete(ok, msg):
if ok:
self.show_info(_('Payment sent.'))
if self.send_screen:
self.send_screen.do_clear()
if pr:
self.wallet.invoices.set_paid(pr, tx.txid())
self.wallet.invoices.save()
self.update_tab('invoices')
else:
msg = msg or ''
self.show_error(msg)
if self.network and self.network.is_connected():
self.show_info(_('Sending'))
threading.Thread(target=self._broadcast_thread, args=(tx, on_complete)).start()
else:
self.show_info(_('Cannot broadcast transaction') + ':\n' + _('Not connected'))
def description_dialog(self, screen):
from .uix.dialogs.label_dialog import LabelDialog
text = screen.message
def callback(text):
screen.message = text
d = LabelDialog(_('Enter description'), text, callback)
d.open()
def amount_dialog(self, screen, show_max):
from .uix.dialogs.amount_dialog import AmountDialog
amount = screen.amount
if amount:
amount, u = str(amount).split()
assert u == self.base_unit
def cb(amount):
screen.amount = amount
popup = AmountDialog(show_max, amount, cb)
popup.open()
def invoices_dialog(self, screen):
from .uix.dialogs.invoices import InvoicesDialog
if len(self.wallet.invoices.sorted_list()) == 0:
self.show_info(' '.join([
_('No saved invoices.'),
_('Signed invoices are saved automatically when you scan them.'),
_('You may also save unsigned requests or contact addresses using the save button.')
]))
return
popup = InvoicesDialog(self, screen, None)
popup.update()
popup.open()
def requests_dialog(self, screen):
from .uix.dialogs.requests import RequestsDialog
if len(self.wallet.get_sorted_requests(self.electrum_config)) == 0:
self.show_info(_('No saved requests.'))
return
popup = RequestsDialog(self, screen, None)
popup.update()
popup.open()
def addresses_dialog(self, screen):
from .uix.dialogs.addresses import AddressesDialog
popup = AddressesDialog(self, screen, None)
popup.update()
popup.open()
def fee_dialog(self, label, dt):
from .uix.dialogs.fee_dialog import FeeDialog
def cb():
self.fee_status = self.electrum_config.get_fee_status()
fee_dialog = FeeDialog(self, self.electrum_config, cb)
fee_dialog.open()
def on_fee(self, event, *arg):
self.fee_status = self.electrum_config.get_fee_status()
def protected(self, msg, f, args):
if self.wallet.has_password():
on_success = lambda pw: f(*(args + (pw,)))
self.password_dialog(self.wallet, msg, on_success, lambda: None)
else:
f(*(args + (None,)))
def delete_wallet(self):
from .uix.dialogs.question import Question
basename = os.path.basename(self.wallet.storage.path)
d = Question(_('Delete wallet?') + '\n' + basename, self._delete_wallet)
d.open()
def _delete_wallet(self, b):
if b:
basename = self.wallet.basename()
self.protected(_("Enter your PIN code to confirm deletion of {}").format(basename), self.__delete_wallet, ())
def __delete_wallet(self, pw):
wallet_path = self.get_wallet_path()
dirname = os.path.dirname(wallet_path)
basename = os.path.basename(wallet_path)
if self.wallet.has_password():
try:
self.wallet.check_password(pw)
except:
self.show_error("Invalid PIN")
return
self.stop_wallet()
os.unlink(wallet_path)
self.show_error(_("Wallet removed: {}").format(basename))
new_path = self.electrum_config.get_wallet_path()
self.load_wallet_by_name(new_path)
def show_seed(self, label):
self.protected(_("Enter your PIN code in order to decrypt your seed"), self._show_seed, (label,))
def _show_seed(self, label, password):
if self.wallet.has_password() and password is None:
return
keystore = self.wallet.keystore
try:
seed = keystore.get_seed(password)
passphrase = keystore.get_passphrase(password)
except:
self.show_error("Invalid PIN")
return
label.text = _('Seed') + ':\n' + seed
if passphrase:
label.text += '\n\n' + _('Passphrase') + ': ' + passphrase
def password_dialog(self, wallet, msg, on_success, on_failure):
from .uix.dialogs.password_dialog import PasswordDialog
if self._password_dialog is None:
self._password_dialog = PasswordDialog()
self._password_dialog.init(self, wallet, msg, on_success, on_failure)
self._password_dialog.open()
def change_password(self, cb):
from .uix.dialogs.password_dialog import PasswordDialog
if self._password_dialog is None:
self._password_dialog = PasswordDialog()
message = _("Changing PIN code.") + '\n' + _("Enter your current PIN:")
def on_success(old_password, new_password):
self.wallet.update_password(old_password, new_password)
self.show_info(_("Your PIN code was updated"))
on_failure = lambda: self.show_error(_("PIN codes do not match"))
self._password_dialog.init(self, self.wallet, message, on_success, on_failure, is_change=1)
self._password_dialog.open()
def export_private_keys(self, pk_label, addr):
if self.wallet.is_watching_only():
self.show_info(_('This is a watching-only wallet. It does not contain private keys.'))
return
def show_private_key(addr, pk_label, password):
if self.wallet.has_password() and password is None:
return
if not self.wallet.can_export():
return
try:
key = str(self.wallet.export_private_key(addr, password)[0])
pk_label.data = key
except InvalidPassword:
self.show_error("Invalid PIN")
return
self.protected(_("Enter your PIN code in order to decrypt your private key"), show_private_key, (addr, pk_label))
| fujicoin/electrum-fjc | electrum/gui/kivy/main_window.py | Python | mit | 44,226 |
import warnings
import numpy as np
from numpy.testing import (
run_module_suite, TestCase, assert_, assert_equal,
assert_array_equal, assert_almost_equal, assert_array_almost_equal,
assert_raises, assert_allclose, assert_array_max_ulp
)
from numpy.random import rand
from numpy.lib import *
class TestAny(TestCase):
def test_basic(self):
y1 = [0, 0, 1, 0]
y2 = [0, 0, 0, 0]
y3 = [1, 0, 1, 0]
assert_(np.any(y1))
assert_(np.any(y3))
assert_(not np.any(y2))
def test_nd(self):
y1 = [[0, 0, 0], [0, 1, 0], [1, 1, 0]]
assert_(np.any(y1))
assert_array_equal(np.sometrue(y1, axis=0), [1, 1, 0])
assert_array_equal(np.sometrue(y1, axis=1), [0, 1, 1])
class TestAll(TestCase):
def test_basic(self):
y1 = [0, 1, 1, 0]
y2 = [0, 0, 0, 0]
y3 = [1, 1, 1, 1]
assert_(not np.all(y1))
assert_(np.all(y3))
assert_(not np.all(y2))
assert_(np.all(~np.array(y2)))
def test_nd(self):
y1 = [[0, 0, 1], [0, 1, 1], [1, 1, 1]]
assert_(not np.all(y1))
assert_array_equal(np.alltrue(y1, axis=0), [0, 0, 1])
assert_array_equal(np.alltrue(y1, axis=1), [0, 0, 1])
class TestCopy(TestCase):
def test_basic(self):
a = np.array([[1, 2], [3, 4]])
a_copy = np.copy(a)
assert_array_equal(a, a_copy)
a_copy[0, 0] = 10
assert_equal(a[0, 0], 1)
assert_equal(a_copy[0, 0], 10)
def test_order(self):
# It turns out that people rely on np.copy() preserving order by
# default; changing this broke scikit-learn:
# https://github.com/scikit-learn/scikit-learn/commit/7842748cf777412c506a8c0ed28090711d3a3783
a = np.array([[1, 2], [3, 4]])
assert_(a.flags.c_contiguous)
assert_(not a.flags.f_contiguous)
a_fort = np.array([[1, 2], [3, 4]], order="F")
assert_(not a_fort.flags.c_contiguous)
assert_(a_fort.flags.f_contiguous)
a_copy = np.copy(a)
assert_(a_copy.flags.c_contiguous)
assert_(not a_copy.flags.f_contiguous)
a_fort_copy = np.copy(a_fort)
assert_(not a_fort_copy.flags.c_contiguous)
assert_(a_fort_copy.flags.f_contiguous)
class TestAverage(TestCase):
def test_basic(self):
y1 = np.array([1, 2, 3])
assert_(average(y1, axis=0) == 2.)
y2 = np.array([1., 2., 3.])
assert_(average(y2, axis=0) == 2.)
y3 = [0., 0., 0.]
assert_(average(y3, axis=0) == 0.)
y4 = np.ones((4, 4))
y4[0, 1] = 0
y4[1, 0] = 2
assert_almost_equal(y4.mean(0), average(y4, 0))
assert_almost_equal(y4.mean(1), average(y4, 1))
y5 = rand(5, 5)
assert_almost_equal(y5.mean(0), average(y5, 0))
assert_almost_equal(y5.mean(1), average(y5, 1))
y6 = np.matrix(rand(5, 5))
assert_array_equal(y6.mean(0), average(y6, 0))
def test_weights(self):
y = np.arange(10)
w = np.arange(10)
actual = average(y, weights=w)
desired = (np.arange(10) ** 2).sum()*1. / np.arange(10).sum()
assert_almost_equal(actual, desired)
y1 = np.array([[1, 2, 3], [4, 5, 6]])
w0 = [1, 2]
actual = average(y1, weights=w0, axis=0)
desired = np.array([3., 4., 5.])
assert_almost_equal(actual, desired)
w1 = [0, 0, 1]
actual = average(y1, weights=w1, axis=1)
desired = np.array([3., 6.])
assert_almost_equal(actual, desired)
# This should raise an error. Can we test for that ?
# assert_equal(average(y1, weights=w1), 9./2.)
# 2D Case
w2 = [[0, 0, 1], [0, 0, 2]]
desired = np.array([3., 6.])
assert_array_equal(average(y1, weights=w2, axis=1), desired)
assert_equal(average(y1, weights=w2), 5.)
def test_returned(self):
y = np.array([[1, 2, 3], [4, 5, 6]])
# No weights
avg, scl = average(y, returned=True)
assert_equal(scl, 6.)
avg, scl = average(y, 0, returned=True)
assert_array_equal(scl, np.array([2., 2., 2.]))
avg, scl = average(y, 1, returned=True)
assert_array_equal(scl, np.array([3., 3.]))
# With weights
w0 = [1, 2]
avg, scl = average(y, weights=w0, axis=0, returned=True)
assert_array_equal(scl, np.array([3., 3., 3.]))
w1 = [1, 2, 3]
avg, scl = average(y, weights=w1, axis=1, returned=True)
assert_array_equal(scl, np.array([6., 6.]))
w2 = [[0, 0, 1], [1, 2, 3]]
avg, scl = average(y, weights=w2, axis=1, returned=True)
assert_array_equal(scl, np.array([1., 6.]))
class TestSelect(TestCase):
def _select(self, cond, values, default=0):
output = []
for m in range(len(cond)):
output += [V[m] for V, C in zip(values, cond) if C[m]] or [default]
return output
def test_basic(self):
choices = [np.array([1, 2, 3]),
np.array([4, 5, 6]),
np.array([7, 8, 9])]
conditions = [np.array([0, 0, 0]),
np.array([0, 1, 0]),
np.array([0, 0, 1])]
assert_array_equal(select(conditions, choices, default=15),
self._select(conditions, choices, default=15))
assert_equal(len(choices), 3)
assert_equal(len(conditions), 3)
class TestInsert(TestCase):
def test_basic(self):
a = [1, 2, 3]
assert_equal(insert(a, 0, 1), [1, 1, 2, 3])
assert_equal(insert(a, 3, 1), [1, 2, 3, 1])
assert_equal(insert(a, [1, 1, 1], [1, 2, 3]), [1, 1, 2, 3, 2, 3])
assert_equal(insert(a, 1,[1,2,3]), [1, 1, 2, 3, 2, 3])
assert_equal(insert(a,[1,2,3],9),[1,9,2,9,3,9])
b = np.array([0, 1], dtype=np.float64)
assert_equal(insert(b, 0, b[0]), [0., 0., 1.])
def test_multidim(self):
a = [[1, 1, 1]]
r = [[2, 2, 2],
[1, 1, 1]]
assert_equal(insert(a, 0, [2, 2, 2], axis=0), r)
assert_equal(insert(a, 0, 2, axis=0), r)
assert_equal(insert(a, 2, 2, axis=1), [[1, 1, 2, 1]])
class TestAmax(TestCase):
def test_basic(self):
a = [3, 4, 5, 10, -3, -5, 6.0]
assert_equal(np.amax(a), 10.0)
b = [[3, 6.0, 9.0],
[4, 10.0, 5.0],
[8, 3.0, 2.0]]
assert_equal(np.amax(b, axis=0), [8.0, 10.0, 9.0])
assert_equal(np.amax(b, axis=1), [9.0, 10.0, 8.0])
class TestAmin(TestCase):
def test_basic(self):
a = [3, 4, 5, 10, -3, -5, 6.0]
assert_equal(np.amin(a), -5.0)
b = [[3, 6.0, 9.0],
[4, 10.0, 5.0],
[8, 3.0, 2.0]]
assert_equal(np.amin(b, axis=0), [3.0, 3.0, 2.0])
assert_equal(np.amin(b, axis=1), [3.0, 4.0, 2.0])
class TestPtp(TestCase):
def test_basic(self):
a = [3, 4, 5, 10, -3, -5, 6.0]
assert_equal(np.ptp(a, axis=0), 15.0)
b = [[3, 6.0, 9.0],
[4, 10.0, 5.0],
[8, 3.0, 2.0]]
assert_equal(np.ptp(b, axis=0), [5.0, 7.0, 7.0])
assert_equal(np.ptp(b, axis= -1), [6.0, 6.0, 6.0])
class TestCumsum(TestCase):
def test_basic(self):
ba = [1, 2, 10, 11, 6, 5, 4]
ba2 = [[1, 2, 3, 4], [5, 6, 7, 9], [10, 3, 4, 5]]
for ctype in [np.int8, np.uint8, np.int16, np.uint16, np.int32,
np.uint32, np.float32, np.float64, np.complex64, np.complex128]:
a = np.array(ba, ctype)
a2 = np.array(ba2, ctype)
tgt = np.array([1, 3, 13, 24, 30, 35, 39], ctype)
assert_array_equal(np.cumsum(a, axis=0), tgt)
tgt = np.array([[1, 2, 3, 4], [6, 8, 10, 13], [16, 11, 14, 18]], ctype)
assert_array_equal(np.cumsum(a2, axis=0), tgt)
tgt = np.array([[1, 3, 6, 10], [5, 11, 18, 27], [10, 13, 17, 22]], ctype)
assert_array_equal(np.cumsum(a2, axis=1), tgt)
class TestProd(TestCase):
def test_basic(self):
ba = [1, 2, 10, 11, 6, 5, 4]
ba2 = [[1, 2, 3, 4], [5, 6, 7, 9], [10, 3, 4, 5]]
for ctype in [np.int16, np.uint16, np.int32, np.uint32,
np.float32, np.float64, np.complex64, np.complex128]:
a = np.array(ba, ctype)
a2 = np.array(ba2, ctype)
if ctype in ['1', 'b']:
self.assertRaises(ArithmeticError, prod, a)
self.assertRaises(ArithmeticError, prod, a2, 1)
self.assertRaises(ArithmeticError, prod, a)
else:
assert_equal(np.prod(a, axis=0), 26400)
assert_array_equal(np.prod(a2, axis=0),
np.array([50, 36, 84, 180], ctype))
assert_array_equal(np.prod(a2, axis= -1),
np.array([24, 1890, 600], ctype))
class TestCumprod(TestCase):
def test_basic(self):
ba = [1, 2, 10, 11, 6, 5, 4]
ba2 = [[1, 2, 3, 4], [5, 6, 7, 9], [10, 3, 4, 5]]
for ctype in [np.int16, np.uint16, np.int32, np.uint32,
np.float32, np.float64, np.complex64, np.complex128]:
a = np.array(ba, ctype)
a2 = np.array(ba2, ctype)
if ctype in ['1', 'b']:
self.assertRaises(ArithmeticError, cumprod, a)
self.assertRaises(ArithmeticError, cumprod, a2, 1)
self.assertRaises(ArithmeticError, cumprod, a)
else:
assert_array_equal(np.cumprod(a, axis= -1),
np.array([1, 2, 20, 220, 1320, 6600, 26400], ctype))
assert_array_equal(np.cumprod(a2, axis=0),
np.array([[ 1, 2, 3, 4], [ 5, 12, 21, 36],
[50, 36, 84, 180]], ctype))
assert_array_equal(np.cumprod(a2, axis= -1),
np.array([[ 1, 2, 6, 24], [ 5, 30, 210, 1890],
[10, 30, 120, 600]], ctype))
class TestDiff(TestCase):
def test_basic(self):
x = [1, 4, 6, 7, 12]
out = np.array([3, 2, 1, 5])
out2 = np.array([-1, -1, 4])
out3 = np.array([0, 5])
assert_array_equal(diff(x), out)
assert_array_equal(diff(x, n=2), out2)
assert_array_equal(diff(x, n=3), out3)
def test_nd(self):
x = 20 * rand(10, 20, 30)
out1 = x[:, :, 1:] - x[:, :, :-1]
out2 = out1[:, :, 1:] - out1[:, :, :-1]
out3 = x[1:, :, :] - x[:-1, :, :]
out4 = out3[1:, :, :] - out3[:-1, :, :]
assert_array_equal(diff(x), out1)
assert_array_equal(diff(x, n=2), out2)
assert_array_equal(diff(x, axis=0), out3)
assert_array_equal(diff(x, n=2, axis=0), out4)
class TestGradient(TestCase):
def test_basic(self):
v = [[1, 1], [3, 4]]
x = np.array(v)
dx = [np.array([[2., 3.], [2., 3.]]),
np.array([[0., 0.], [1., 1.]])]
assert_array_equal(gradient(x), dx)
assert_array_equal(gradient(v), dx)
def test_badargs(self):
# for 2D array, gradient can take 0, 1, or 2 extra args
x = np.array([[1, 1], [3, 4]])
assert_raises(SyntaxError, gradient, x, np.array([1., 1.]),
np.array([1., 1.]), np.array([1., 1.]))
def test_masked(self):
# Make sure that gradient supports subclasses like masked arrays
x = np.ma.array([[1, 1], [3, 4]])
assert_equal(type(gradient(x)[0]), type(x))
def test_datetime64(self):
# Make sure gradient() can handle special types like datetime64
x = np.array(['1910-08-16', '1910-08-11', '1910-08-10', '1910-08-12',
'1910-10-12', '1910-12-12', '1912-12-12'],
dtype='datetime64[D]')
dx = np.array([ -5, -3, 0, 31, 61, 396, 731], dtype='timedelta64[D]')
assert_array_equal(gradient(x), dx)
assert_(dx.dtype == np.dtype('timedelta64[D]'))
def test_timedelta64(self):
# Make sure gradient() can handle special types like timedelta64
x = np.array([-5, -3, 10, 12, 61, 321, 300], dtype='timedelta64[D]')
dx = np.array([ 2, 7, 7, 25, 154, 119, -21], dtype='timedelta64[D]')
assert_array_equal(gradient(x), dx)
assert_(dx.dtype == np.dtype('timedelta64[D]'))
class TestAngle(TestCase):
def test_basic(self):
x = [1 + 3j, np.sqrt(2) / 2.0 + 1j * np.sqrt(2) / 2,
1, 1j, -1, -1j, 1 - 3j, -1 + 3j]
y = angle(x)
yo = [np.arctan(3.0 / 1.0), np.arctan(1.0), 0, np.pi / 2, np.pi, -np.pi / 2.0,
- np.arctan(3.0 / 1.0), np.pi - np.arctan(3.0 / 1.0)]
z = angle(x, deg=1)
zo = np.array(yo) * 180 / np.pi
assert_array_almost_equal(y, yo, 11)
assert_array_almost_equal(z, zo, 11)
class TestTrimZeros(TestCase):
""" only testing for integer splits.
"""
def test_basic(self):
a = np.array([0, 0, 1, 2, 3, 4, 0])
res = trim_zeros(a)
assert_array_equal(res, np.array([1, 2, 3, 4]))
def test_leading_skip(self):
a = np.array([0, 0, 1, 0, 2, 3, 4, 0])
res = trim_zeros(a)
assert_array_equal(res, np.array([1, 0, 2, 3, 4]))
def test_trailing_skip(self):
a = np.array([0, 0, 1, 0, 2, 3, 0, 4, 0])
res = trim_zeros(a)
assert_array_equal(res, np.array([1, 0, 2, 3, 0, 4]))
class TestExtins(TestCase):
def test_basic(self):
a = np.array([1, 3, 2, 1, 2, 3, 3])
b = extract(a > 1, a)
assert_array_equal(b, [3, 2, 2, 3, 3])
def test_place(self):
a = np.array([1, 4, 3, 2, 5, 8, 7])
place(a, [0, 1, 0, 1, 0, 1, 0], [2, 4, 6])
assert_array_equal(a, [1, 2, 3, 4, 5, 6, 7])
def test_both(self):
a = rand(10)
mask = a > 0.5
ac = a.copy()
c = extract(mask, a)
place(a, mask, 0)
place(a, mask, c)
assert_array_equal(a, ac)
class TestVectorize(TestCase):
def test_simple(self):
def addsubtract(a, b):
if a > b:
return a - b
else:
return a + b
f = vectorize(addsubtract)
r = f([0, 3, 6, 9], [1, 3, 5, 7])
assert_array_equal(r, [1, 6, 1, 2])
def test_scalar(self):
def addsubtract(a, b):
if a > b:
return a - b
else:
return a + b
f = vectorize(addsubtract)
r = f([0, 3, 6, 9], 5)
assert_array_equal(r, [5, 8, 1, 4])
def test_large(self):
x = np.linspace(-3, 2, 10000)
f = vectorize(lambda x: x)
y = f(x)
assert_array_equal(y, x)
def test_ufunc(self):
import math
f = vectorize(math.cos)
args = np.array([0, 0.5*np.pi, np.pi, 1.5*np.pi, 2*np.pi])
r1 = f(args)
r2 = np.cos(args)
assert_array_equal(r1, r2)
def test_keywords(self):
import math
def foo(a, b=1):
return a + b
f = vectorize(foo)
args = np.array([1, 2, 3])
r1 = f(args)
r2 = np.array([2, 3, 4])
assert_array_equal(r1, r2)
r1 = f(args, 2)
r2 = np.array([3, 4, 5])
assert_array_equal(r1, r2)
def test_keywords_no_func_code(self):
# This needs to test a function that has keywords but
# no func_code attribute, since otherwise vectorize will
# inspect the func_code.
import random
try:
f = vectorize(random.randrange)
except:
raise AssertionError()
def test_keywords2_ticket_2100(self):
r"""Test kwarg support: enhancement ticket 2100"""
import math
def foo(a, b=1):
return a + b
f = vectorize(foo)
args = np.array([1, 2, 3])
r1 = f(a=args)
r2 = np.array([2, 3, 4])
assert_array_equal(r1, r2)
r1 = f(b=1, a=args)
assert_array_equal(r1, r2)
r1 = f(args, b=2)
r2 = np.array([3, 4, 5])
assert_array_equal(r1, r2)
def test_keywords3_ticket_2100(self):
"""Test excluded with mixed positional and kwargs: ticket 2100"""
def mypolyval(x, p):
_p = list(p)
res = _p.pop(0)
while _p:
res = res*x + _p.pop(0)
return res
vpolyval = np.vectorize(mypolyval, excluded=['p',1])
ans = [3, 6]
assert_array_equal(ans, vpolyval(x=[0, 1], p=[1, 2, 3]))
assert_array_equal(ans, vpolyval([0, 1], p=[1, 2, 3]))
assert_array_equal(ans, vpolyval([0, 1], [1, 2, 3]))
def test_keywords4_ticket_2100(self):
"""Test vectorizing function with no positional args."""
@vectorize
def f(**kw):
res = 1.0
for _k in kw:
res *= kw[_k]
return res
assert_array_equal(f(a=[1, 2], b=[3, 4]), [3, 8])
def test_keywords5_ticket_2100(self):
"""Test vectorizing function with no kwargs args."""
@vectorize
def f(*v):
return np.prod(v)
assert_array_equal(f([1, 2], [3, 4]), [3, 8])
def test_coverage1_ticket_2100(self):
def foo():
return 1
f = vectorize(foo)
assert_array_equal(f(), 1)
def test_assigning_docstring(self):
def foo(x):
return x
doc = "Provided documentation"
f = vectorize(foo, doc=doc)
assert_equal(f.__doc__, doc)
def test_UnboundMethod_ticket_1156(self):
"""Regression test for issue 1156"""
class Foo:
b = 2
def bar(self, a):
return a**self.b
assert_array_equal(vectorize(Foo().bar)(np.arange(9)),
np.arange(9)**2)
assert_array_equal(vectorize(Foo.bar)(Foo(), np.arange(9)),
np.arange(9)**2)
def test_execution_order_ticket_1487(self):
"""Regression test for dependence on execution order: issue 1487"""
f1 = vectorize(lambda x: x)
res1a = f1(np.arange(3))
res1b = f1(np.arange(0.1, 3))
f2 = vectorize(lambda x: x)
res2b = f2(np.arange(0.1, 3))
res2a = f2(np.arange(3))
assert_equal(res1a, res2a)
assert_equal(res1b, res2b)
def test_string_ticket_1892(self):
"""Test vectorization over strings: issue 1892."""
f = np.vectorize(lambda x:x)
s = '0123456789'*10
assert_equal(s, f(s))
#z = f(np.array([s,s]))
#assert_array_equal([s,s], f(s))
def test_cache(self):
"""Ensure that vectorized func called exactly once per argument."""
_calls = [0]
@vectorize
def f(x):
_calls[0] += 1
return x**2
f.cache = True
x = np.arange(5)
assert_array_equal(f(x), x*x)
assert_equal(_calls[0], len(x))
class TestDigitize(TestCase):
def test_forward(self):
x = np.arange(-6, 5)
bins = np.arange(-5, 5)
assert_array_equal(digitize(x, bins), np.arange(11))
def test_reverse(self):
x = np.arange(5, -6, -1)
bins = np.arange(5, -5, -1)
assert_array_equal(digitize(x, bins), np.arange(11))
def test_random(self):
x = rand(10)
bin = np.linspace(x.min(), x.max(), 10)
assert_(np.all(digitize(x, bin) != 0))
def test_right_basic(self):
x = [1, 5, 4, 10, 8, 11, 0]
bins = [1, 5, 10]
default_answer = [1, 2, 1, 3, 2, 3, 0]
assert_array_equal(digitize(x, bins), default_answer)
right_answer = [0, 1, 1, 2, 2, 3, 0]
assert_array_equal(digitize(x, bins, True), right_answer)
def test_right_open(self):
x = np.arange(-6, 5)
bins = np.arange(-6, 4)
assert_array_equal(digitize(x, bins, True), np.arange(11))
def test_right_open_reverse(self):
x = np.arange(5, -6, -1)
bins = np.arange(4, -6, -1)
assert_array_equal(digitize(x, bins, True), np.arange(11))
def test_right_open_random(self):
x = rand(10)
bins = np.linspace(x.min(), x.max(), 10)
assert_(np.all(digitize(x, bins, True) != 10))
class TestUnwrap(TestCase):
def test_simple(self):
#check that unwrap removes jumps greather that 2*pi
assert_array_equal(unwrap([1, 1 + 2 * np.pi]), [1, 1])
#check that unwrap maintans continuity
assert_(np.all(diff(unwrap(rand(10) * 100)) < np.pi))
class TestFilterwindows(TestCase):
def test_hanning(self):
#check symmetry
w = hanning(10)
assert_array_almost_equal(w, flipud(w), 7)
#check known value
assert_almost_equal(np.sum(w, axis=0), 4.500, 4)
def test_hamming(self):
#check symmetry
w = hamming(10)
assert_array_almost_equal(w, flipud(w), 7)
#check known value
assert_almost_equal(np.sum(w, axis=0), 4.9400, 4)
def test_bartlett(self):
#check symmetry
w = bartlett(10)
assert_array_almost_equal(w, flipud(w), 7)
#check known value
assert_almost_equal(np.sum(w, axis=0), 4.4444, 4)
def test_blackman(self):
#check symmetry
w = blackman(10)
assert_array_almost_equal(w, flipud(w), 7)
#check known value
assert_almost_equal(np.sum(w, axis=0), 3.7800, 4)
class TestTrapz(TestCase):
def test_simple(self):
x = np.arange(-10, 10, .1)
r = trapz(np.exp(-.5*x**2) / np.sqrt(2*np.pi), dx=0.1)
#check integral of normal equals 1
assert_almost_equal(r, 1, 7)
def test_ndim(self):
x = np.linspace(0, 1, 3)
y = np.linspace(0, 2, 8)
z = np.linspace(0, 3, 13)
wx = np.ones_like(x) * (x[1] - x[0])
wx[0] /= 2
wx[-1] /= 2
wy = np.ones_like(y) * (y[1] - y[0])
wy[0] /= 2
wy[-1] /= 2
wz = np.ones_like(z) * (z[1] - z[0])
wz[0] /= 2
wz[-1] /= 2
q = x[:, None, None] + y[None, :, None] + z[None, None, :]
qx = (q * wx[:, None, None]).sum(axis=0)
qy = (q * wy[None, :, None]).sum(axis=1)
qz = (q * wz[None, None, :]).sum(axis=2)
# n-d `x`
r = trapz(q, x=x[:, None, None], axis=0)
assert_almost_equal(r, qx)
r = trapz(q, x=y[None, :, None], axis=1)
assert_almost_equal(r, qy)
r = trapz(q, x=z[None, None, :], axis=2)
assert_almost_equal(r, qz)
# 1-d `x`
r = trapz(q, x=x, axis=0)
assert_almost_equal(r, qx)
r = trapz(q, x=y, axis=1)
assert_almost_equal(r, qy)
r = trapz(q, x=z, axis=2)
assert_almost_equal(r, qz)
def test_masked(self):
#Testing that masked arrays behave as if the function is 0 where
#masked
x = np.arange(5)
y = x * x
mask = x == 2
ym = np.ma.array(y, mask=mask)
r = 13.0 # sum(0.5 * (0 + 1) * 1.0 + 0.5 * (9 + 16))
assert_almost_equal(trapz(ym, x), r)
xm = np.ma.array(x, mask=mask)
assert_almost_equal(trapz(ym, xm), r)
xm = np.ma.array(x, mask=mask)
assert_almost_equal(trapz(y, xm), r)
def test_matrix(self):
#Test to make sure matrices give the same answer as ndarrays
x = np.linspace(0, 5)
y = x * x
r = trapz(y, x)
mx = np.matrix(x)
my = np.matrix(y)
mr = trapz(my, mx)
assert_almost_equal(mr, r)
class TestSinc(TestCase):
def test_simple(self):
assert_(sinc(0) == 1)
w = sinc(np.linspace(-1, 1, 100))
#check symmetry
assert_array_almost_equal(w, flipud(w), 7)
def test_array_like(self):
x = [0, 0.5]
y1 = sinc(np.array(x))
y2 = sinc(list(x))
y3 = sinc(tuple(x))
assert_array_equal(y1, y2)
assert_array_equal(y1, y3)
class TestHistogram(TestCase):
def setUp(self):
pass
def tearDown(self):
pass
def test_simple(self):
n = 100
v = rand(n)
(a, b) = histogram(v)
#check if the sum of the bins equals the number of samples
assert_equal(np.sum(a, axis=0), n)
#check that the bin counts are evenly spaced when the data is from a
# linear function
(a, b) = histogram(np.linspace(0, 10, 100))
assert_array_equal(a, 10)
def test_one_bin(self):
# Ticket 632
hist, edges = histogram([1, 2, 3, 4], [1, 2])
assert_array_equal(hist, [2,])
assert_array_equal(edges, [1, 2])
assert_raises(ValueError, histogram, [1, 2], bins=0)
h, e = histogram([1, 2], bins=1)
assert_equal(h, np.array([2]))
assert_allclose(e, np.array([1., 2.]))
def test_normed(self):
# Check that the integral of the density equals 1.
n = 100
v = rand(n)
a, b = histogram(v, normed=True)
area = np.sum(a * diff(b))
assert_almost_equal(area, 1)
# Check with non-constant bin widths (buggy but backwards compatible)
v = np.arange(10)
bins = [0, 1, 5, 9, 10]
a, b = histogram(v, bins, normed=True)
area = np.sum(a * diff(b))
assert_almost_equal(area, 1)
def test_density(self):
# Check that the integral of the density equals 1.
n = 100
v = rand(n)
a, b = histogram(v, density=True)
area = np.sum(a * diff(b))
assert_almost_equal(area, 1)
# Check with non-constant bin widths
v = np.arange(10)
bins = [0, 1, 3, 6, 10]
a, b = histogram(v, bins, density=True)
assert_array_equal(a, .1)
assert_equal(np.sum(a*diff(b)), 1)
# Variale bin widths are especially useful to deal with
# infinities.
v = np.arange(10)
bins = [0, 1, 3, 6, np.inf]
a, b = histogram(v, bins, density=True)
assert_array_equal(a, [.1, .1, .1, 0.])
# Taken from a bug report from N. Becker on the numpy-discussion
# mailing list Aug. 6, 2010.
counts, dmy = np.histogram([1, 2, 3, 4], [0.5, 1.5, np.inf], density=True)
assert_equal(counts, [.25, 0])
def test_outliers(self):
# Check that outliers are not tallied
a = np.arange(10) + .5
# Lower outliers
h, b = histogram(a, range=[0, 9])
assert_equal(h.sum(), 9)
# Upper outliers
h, b = histogram(a, range=[1, 10])
assert_equal(h.sum(), 9)
# Normalization
h, b = histogram(a, range=[1, 9], normed=True)
assert_equal((h * diff(b)).sum(), 1)
# Weights
w = np.arange(10) + .5
h, b = histogram(a, range=[1, 9], weights=w, normed=True)
assert_equal((h * diff(b)).sum(), 1)
h, b = histogram(a, bins=8, range=[1, 9], weights=w)
assert_equal(h, w[1:-1])
def test_type(self):
# Check the type of the returned histogram
a = np.arange(10) + .5
h, b = histogram(a)
assert_(issubdtype(h.dtype, int))
h, b = histogram(a, normed=True)
assert_(issubdtype(h.dtype, float))
h, b = histogram(a, weights=np.ones(10, int))
assert_(issubdtype(h.dtype, int))
h, b = histogram(a, weights=np.ones(10, float))
assert_(issubdtype(h.dtype, float))
def test_weights(self):
v = rand(100)
w = np.ones(100) * 5
a, b = histogram(v)
na, nb = histogram(v, normed=True)
wa, wb = histogram(v, weights=w)
nwa, nwb = histogram(v, weights=w, normed=True)
assert_array_almost_equal(a * 5, wa)
assert_array_almost_equal(na, nwa)
# Check weights are properly applied.
v = np.linspace(0, 10, 10)
w = np.concatenate((np.zeros(5), np.ones(5)))
wa, wb = histogram(v, bins=np.arange(11), weights=w)
assert_array_almost_equal(wa, w)
# Check with integer weights
wa, wb = histogram([1, 2, 2, 4], bins=4, weights=[4, 3, 2, 1])
assert_array_equal(wa, [4, 5, 0, 1])
wa, wb = histogram([1, 2, 2, 4], bins=4, weights=[4, 3, 2, 1], normed=True)
assert_array_almost_equal(wa, np.array([4, 5, 0, 1]) / 10. / 3. * 4)
# Check weights with non-uniform bin widths
a, b = histogram(np.arange(9), [0, 1, 3, 6, 10], \
weights=[2, 1, 1, 1, 1, 1, 1, 1, 1], density=True)
assert_almost_equal(a, [.2, .1, .1, .075])
def test_empty(self):
a, b = histogram([], bins=([0, 1]))
assert_array_equal(a, np.array([0]))
assert_array_equal(b, np.array([0, 1]))
class TestHistogramdd(TestCase):
def test_simple(self):
x = np.array([[-.5, .5, 1.5], [-.5, 1.5, 2.5], [-.5, 2.5, .5], \
[.5, .5, 1.5], [.5, 1.5, 2.5], [.5, 2.5, 2.5]])
H, edges = histogramdd(x, (2, 3, 3), range=[[-1, 1], [0, 3], [0, 3]])
answer = np.array([[[0, 1, 0], [0, 0, 1], [1, 0, 0]], [[0, 1, 0], [0, 0, 1],
[0, 0, 1]]])
assert_array_equal(H, answer)
# Check normalization
ed = [[-2, 0, 2], [0, 1, 2, 3], [0, 1, 2, 3]]
H, edges = histogramdd(x, bins=ed, normed=True)
assert_(np.all(H == answer / 12.))
# Check that H has the correct shape.
H, edges = histogramdd(x, (2, 3, 4), range=[[-1, 1], [0, 3], [0, 4]],
normed=True)
answer = np.array([[[0, 1, 0, 0], [0, 0, 1, 0], [1, 0, 0, 0]], [[0, 1, 0, 0],
[0, 0, 1, 0], [0, 0, 1, 0]]])
assert_array_almost_equal(H, answer / 6., 4)
# Check that a sequence of arrays is accepted and H has the correct
# shape.
z = [np.squeeze(y) for y in split(x, 3, axis=1)]
H, edges = histogramdd(z, bins=(4, 3, 2), range=[[-2, 2], [0, 3], [0, 2]])
answer = np.array([[[0, 0], [0, 0], [0, 0]],
[[0, 1], [0, 0], [1, 0]],
[[0, 1], [0, 0], [0, 0]],
[[0, 0], [0, 0], [0, 0]]])
assert_array_equal(H, answer)
Z = np.zeros((5, 5, 5))
Z[range(5), range(5), range(5)] = 1.
H, edges = histogramdd([np.arange(5), np.arange(5), np.arange(5)], 5)
assert_array_equal(H, Z)
def test_shape_3d(self):
# All possible permutations for bins of different lengths in 3D.
bins = ((5, 4, 6), (6, 4, 5), (5, 6, 4), (4, 6, 5), (6, 5, 4),
(4, 5, 6))
r = rand(10, 3)
for b in bins:
H, edges = histogramdd(r, b)
assert_(H.shape == b)
def test_shape_4d(self):
# All possible permutations for bins of different lengths in 4D.
bins = ((7, 4, 5, 6), (4, 5, 7, 6), (5, 6, 4, 7), (7, 6, 5, 4),
(5, 7, 6, 4), (4, 6, 7, 5), (6, 5, 7, 4), (7, 5, 4, 6),
(7, 4, 6, 5), (6, 4, 7, 5), (6, 7, 5, 4), (4, 6, 5, 7),
(4, 7, 5, 6), (5, 4, 6, 7), (5, 7, 4, 6), (6, 7, 4, 5),
(6, 5, 4, 7), (4, 7, 6, 5), (4, 5, 6, 7), (7, 6, 4, 5),
(5, 4, 7, 6), (5, 6, 7, 4), (6, 4, 5, 7), (7, 5, 6, 4))
r = rand(10, 4)
for b in bins:
H, edges = histogramdd(r, b)
assert_(H.shape == b)
def test_weights(self):
v = rand(100, 2)
hist, edges = histogramdd(v)
n_hist, edges = histogramdd(v, normed=True)
w_hist, edges = histogramdd(v, weights=np.ones(100))
assert_array_equal(w_hist, hist)
w_hist, edges = histogramdd(v, weights=np.ones(100) * 2, normed=True)
assert_array_equal(w_hist, n_hist)
w_hist, edges = histogramdd(v, weights=np.ones(100, int) * 2)
assert_array_equal(w_hist, 2 * hist)
def test_identical_samples(self):
x = np.zeros((10, 2), int)
hist, edges = histogramdd(x, bins=2)
assert_array_equal(edges[0], np.array([-0.5, 0. , 0.5]))
def test_empty(self):
a, b = histogramdd([[], []], bins=([0, 1], [0, 1]))
assert_array_max_ulp(a, np.array([[ 0.]]))
a, b = np.histogramdd([[], [], []], bins=2)
assert_array_max_ulp(a, np.zeros((2, 2, 2)))
def test_bins_errors(self):
"""There are two ways to specify bins. Check for the right errors when
mixing those."""
x = np.arange(8).reshape(2, 4)
assert_raises(ValueError, np.histogramdd, x, bins=[-1, 2, 4, 5])
assert_raises(ValueError, np.histogramdd, x, bins=[1, 0.99, 1, 1])
assert_raises(ValueError, np.histogramdd, x, bins=[1, 1, 1, [1, 2, 2, 3]])
assert_raises(ValueError, np.histogramdd, x, bins=[1, 1, 1, [1, 2, 3, -3]])
assert_(np.histogramdd(x, bins=[1, 1, 1, [1, 2, 3, 4]]))
def test_inf_edges(self):
"""Test using +/-inf bin edges works. See #1788."""
olderr = np.seterr(invalid='ignore')
try:
x = np.arange(6).reshape(3, 2)
expected = np.array([[1, 0], [0, 1], [0, 1]])
h, e = np.histogramdd(x, bins=[3, [-np.inf, 2, 10]])
assert_allclose(h, expected)
h, e = np.histogramdd(x, bins=[3, np.array([-1, 2, np.inf])])
assert_allclose(h, expected)
h, e = np.histogramdd(x, bins=[3, [-np.inf, 3, np.inf]])
assert_allclose(h, expected)
finally:
np.seterr(**olderr)
class TestUnique(TestCase):
def test_simple(self):
x = np.array([4, 3, 2, 1, 1, 2, 3, 4, 0])
assert_(np.all(unique(x) == [0, 1, 2, 3, 4]))
assert_(unique(np.array([1, 1, 1, 1, 1])) == np.array([1]))
x = ['widget', 'ham', 'foo', 'bar', 'foo', 'ham']
assert_(np.all(unique(x) == ['bar', 'foo', 'ham', 'widget']))
x = np.array([5 + 6j, 1 + 1j, 1 + 10j, 10, 5 + 6j])
assert_(np.all(unique(x) == [1 + 1j, 1 + 10j, 5 + 6j, 10]))
class TestCheckFinite(TestCase):
def test_simple(self):
a = [1, 2, 3]
b = [1, 2, np.inf]
c = [1, 2, np.nan]
np.lib.asarray_chkfinite(a)
assert_raises(ValueError, np.lib.asarray_chkfinite, b)
assert_raises(ValueError, np.lib.asarray_chkfinite, c)
def test_dtype_order(self):
"""Regression test for missing dtype and order arguments"""
a = [1, 2, 3]
a = np.lib.asarray_chkfinite(a, order='F', dtype=np.float64)
assert_(a.dtype == np.float64)
class TestNaNFuncts(TestCase):
def setUp(self):
self.A = np.array([[[ np.nan, 0.01319214, 0.01620964],
[ 0.11704017, np.nan, 0.75157887],
[ 0.28333658, 0.1630199 , np.nan ]],
[[ 0.59541557, np.nan, 0.37910852],
[ np.nan, 0.87964135, np.nan ],
[ 0.70543747, np.nan, 0.34306596]],
[[ 0.72687499, 0.91084584, np.nan ],
[ 0.84386844, 0.38944762, 0.23913896],
[ np.nan, 0.37068164, 0.33850425]]])
def test_nansum(self):
assert_almost_equal(nansum(self.A), 8.0664079100000006)
assert_almost_equal(nansum(self.A, 0),
np.array([[ 1.32229056, 0.92403798, 0.39531816],
[ 0.96090861, 1.26908897, 0.99071783],
[ 0.98877405, 0.53370154, 0.68157021]]))
assert_almost_equal(nansum(self.A, 1),
np.array([[ 0.40037675, 0.17621204, 0.76778851],
[ 1.30085304, 0.87964135, 0.72217448],
[ 1.57074343, 1.6709751 , 0.57764321]]))
assert_almost_equal(nansum(self.A, 2),
np.array([[ 0.02940178, 0.86861904, 0.44635648],
[ 0.97452409, 0.87964135, 1.04850343],
[ 1.63772083, 1.47245502, 0.70918589]]))
def test_nanmin(self):
assert_almost_equal(nanmin(self.A), 0.01319214)
assert_almost_equal(nanmin(self.A, 0),
np.array([[ 0.59541557, 0.01319214, 0.01620964],
[ 0.11704017, 0.38944762, 0.23913896],
[ 0.28333658, 0.1630199 , 0.33850425]]))
assert_almost_equal(nanmin(self.A, 1),
np.array([[ 0.11704017, 0.01319214, 0.01620964],
[ 0.59541557, 0.87964135, 0.34306596],
[ 0.72687499, 0.37068164, 0.23913896]]))
assert_almost_equal(nanmin(self.A, 2),
np.array([[ 0.01319214, 0.11704017, 0.1630199 ],
[ 0.37910852, 0.87964135, 0.34306596],
[ 0.72687499, 0.23913896, 0.33850425]]))
assert_(np.isnan(nanmin([np.nan, np.nan])))
def test_nanargmin(self):
assert_almost_equal(nanargmin(self.A), 1)
assert_almost_equal(nanargmin(self.A, 0),
np.array([[1, 0, 0],
[0, 2, 2],
[0, 0, 2]]))
assert_almost_equal(nanargmin(self.A, 1),
np.array([[1, 0, 0],
[0, 1, 2],
[0, 2, 1]]))
assert_almost_equal(nanargmin(self.A, 2),
np.array([[1, 0, 1],
[2, 1, 2],
[0, 2, 2]]))
def test_nanmax(self):
assert_almost_equal(nanmax(self.A), 0.91084584000000002)
assert_almost_equal(nanmax(self.A, 0),
np.array([[ 0.72687499, 0.91084584, 0.37910852],
[ 0.84386844, 0.87964135, 0.75157887],
[ 0.70543747, 0.37068164, 0.34306596]]))
assert_almost_equal(nanmax(self.A, 1),
np.array([[ 0.28333658, 0.1630199 , 0.75157887],
[ 0.70543747, 0.87964135, 0.37910852],
[ 0.84386844, 0.91084584, 0.33850425]]))
assert_almost_equal(nanmax(self.A, 2),
np.array([[ 0.01620964, 0.75157887, 0.28333658],
[ 0.59541557, 0.87964135, 0.70543747],
[ 0.91084584, 0.84386844, 0.37068164]]))
assert_(np.isnan(nanmax([np.nan, np.nan])))
def test_nanmin_allnan_on_axis(self):
assert_array_equal(np.isnan(nanmin([[np.nan] * 2] * 3, axis=1)),
[True, True, True])
def test_nanmin_masked(self):
a = np.ma.fix_invalid([[2, 1, 3, np.nan], [5, 2, 3, np.nan]])
ctrl_mask = a._mask.copy()
test = np.nanmin(a, axis=1)
assert_equal(test, [1, 2])
assert_equal(a._mask, ctrl_mask)
assert_equal(np.isinf(a), np.zeros((2, 4), dtype=bool))
class TestNanFunctsIntTypes(TestCase):
int_types = (
np.int8, np.int16, np.int32, np.int64, np.uint8,
np.uint16, np.uint32, np.uint64)
def setUp(self, *args, **kwargs):
self.A = np.array([127, 39, 93, 87, 46])
def integer_arrays(self):
for dtype in self.int_types:
yield self.A.astype(dtype)
def test_nanmin(self):
min_value = min(self.A)
for A in self.integer_arrays():
assert_equal(nanmin(A), min_value)
def test_nanmax(self):
max_value = max(self.A)
for A in self.integer_arrays():
assert_equal(nanmax(A), max_value)
def test_nanargmin(self):
min_arg = np.argmin(self.A)
for A in self.integer_arrays():
assert_equal(nanargmin(A), min_arg)
def test_nanargmax(self):
max_arg = np.argmax(self.A)
for A in self.integer_arrays():
assert_equal(nanargmax(A), max_arg)
class TestCorrCoef(TestCase):
A = np.array([[ 0.15391142, 0.18045767, 0.14197213],
[ 0.70461506, 0.96474128, 0.27906989],
[ 0.9297531 , 0.32296769, 0.19267156]])
B = np.array([[ 0.10377691, 0.5417086 , 0.49807457],
[ 0.82872117, 0.77801674, 0.39226705],
[ 0.9314666 , 0.66800209, 0.03538394]])
res1 = np.array([[ 1. , 0.9379533 , -0.04931983],
[ 0.9379533 , 1. , 0.30007991],
[-0.04931983, 0.30007991, 1. ]])
res2 = np.array([[ 1. , 0.9379533 , -0.04931983,
0.30151751, 0.66318558, 0.51532523],
[ 0.9379533 , 1. , 0.30007991,
- 0.04781421, 0.88157256, 0.78052386],
[-0.04931983, 0.30007991, 1. ,
- 0.96717111, 0.71483595, 0.83053601],
[ 0.30151751, -0.04781421, -0.96717111,
1. , -0.51366032, -0.66173113],
[ 0.66318558, 0.88157256, 0.71483595,
- 0.51366032, 1. , 0.98317823],
[ 0.51532523, 0.78052386, 0.83053601,
- 0.66173113, 0.98317823, 1. ]])
def test_simple(self):
assert_almost_equal(corrcoef(self.A), self.res1)
assert_almost_equal(corrcoef(self.A, self.B), self.res2)
def test_ddof(self):
assert_almost_equal(corrcoef(self.A, ddof=-1), self.res1)
assert_almost_equal(corrcoef(self.A, self.B, ddof=-1), self.res2)
def test_empty(self):
assert_equal(corrcoef(np.array([])).size, 0)
assert_equal(corrcoef(np.array([]).reshape(0, 2)).shape, (0, 2))
class TestCov(TestCase):
def test_basic(self):
x = np.array([[0, 2], [1, 1], [2, 0]]).T
assert_allclose(np.cov(x), np.array([[ 1., -1.], [-1., 1.]]))
def test_empty(self):
assert_equal(cov(np.array([])).size, 0)
assert_equal(cov(np.array([]).reshape(0, 2)).shape, (0, 2))
class Test_i0(TestCase):
def test_simple(self):
assert_almost_equal(i0(0.5), np.array(1.0634833707413234))
A = np.array([ 0.49842636, 0.6969809 , 0.22011976, 0.0155549])
assert_almost_equal(i0(A),
np.array([ 1.06307822, 1.12518299, 1.01214991, 1.00006049]))
B = np.array([[ 0.827002 , 0.99959078],
[ 0.89694769, 0.39298162],
[ 0.37954418, 0.05206293],
[ 0.36465447, 0.72446427],
[ 0.48164949, 0.50324519]])
assert_almost_equal(i0(B),
np.array([[ 1.17843223, 1.26583466],
[ 1.21147086, 1.0389829 ],
[ 1.03633899, 1.00067775],
[ 1.03352052, 1.13557954],
[ 1.0588429 , 1.06432317]]))
class TestKaiser(TestCase):
def test_simple(self):
assert_almost_equal(kaiser(0, 1.0), np.array([]))
assert_(np.isfinite(kaiser(1, 1.0)))
assert_almost_equal(kaiser(2, 1.0), np.array([ 0.78984831, 0.78984831]))
assert_almost_equal(kaiser(5, 1.0),
np.array([ 0.78984831, 0.94503323, 1. ,
0.94503323, 0.78984831]))
assert_almost_equal(kaiser(5, 1.56789),
np.array([ 0.58285404, 0.88409679, 1. ,
0.88409679, 0.58285404]))
def test_int_beta(self):
kaiser(3, 4)
class TestMsort(TestCase):
def test_simple(self):
A = np.array([[ 0.44567325, 0.79115165, 0.5490053 ],
[ 0.36844147, 0.37325583, 0.96098397],
[ 0.64864341, 0.52929049, 0.39172155]])
assert_almost_equal(msort(A),
np.array([[ 0.36844147, 0.37325583, 0.39172155],
[ 0.44567325, 0.52929049, 0.5490053 ],
[ 0.64864341, 0.79115165, 0.96098397]]))
class TestMeshgrid(TestCase):
def test_simple(self):
[X, Y] = meshgrid([1, 2, 3], [4, 5, 6, 7])
assert_(np.all(X == np.array([[1, 2, 3],
[1, 2, 3],
[1, 2, 3],
[1, 2, 3]])))
assert_(np.all(Y == np.array([[4, 4, 4],
[5, 5, 5],
[6, 6, 6],
[7, 7, 7]])))
def test_single_input(self):
assert_raises(ValueError, meshgrid, np.arange(5))
def test_indexing(self):
x = [1, 2, 3]
y = [4, 5, 6, 7]
[X, Y] = meshgrid(x, y, indexing='ij')
assert_(np.all(X == np.array([[1, 1, 1, 1],
[2, 2, 2, 2],
[3, 3, 3, 3]])))
assert_(np.all(Y == np.array([[4, 5, 6, 7],
[4, 5, 6, 7],
[4, 5, 6, 7]])))
# Test expected shapes:
z = [8, 9]
assert_(meshgrid(x, y)[0].shape == (4, 3))
assert_(meshgrid(x, y, indexing='ij')[0].shape == (3, 4))
assert_(meshgrid(x, y, z)[0].shape == (4, 3, 2))
assert_(meshgrid(x, y, z, indexing='ij')[0].shape == (3, 4, 2))
assert_raises(ValueError, meshgrid, x, y, indexing='notvalid')
def test_sparse(self):
[X, Y] = meshgrid([1, 2, 3], [4, 5, 6, 7], sparse=True)
assert_(np.all(X == np.array([[1, 2, 3]])))
assert_(np.all(Y == np.array([[4], [5], [6], [7]])))
class TestPiecewise(TestCase):
def test_simple(self):
# Condition is single bool list
x = piecewise([0, 0], [True, False], [1])
assert_array_equal(x, [1, 0])
# List of conditions: single bool list
x = piecewise([0, 0], [[True, False]], [1])
assert_array_equal(x, [1, 0])
# Conditions is single bool array
x = piecewise([0, 0], np.array([True, False]), [1])
assert_array_equal(x, [1, 0])
# Condition is single int array
x = piecewise([0, 0], np.array([1, 0]), [1])
assert_array_equal(x, [1, 0])
# List of conditions: int array
x = piecewise([0, 0], [np.array([1, 0])], [1])
assert_array_equal(x, [1, 0])
x = piecewise([0, 0], [[False, True]], [lambda x:-1])
assert_array_equal(x, [0, -1])
x = piecewise([1, 2], [[True, False], [False, True]], [3, 4])
assert_array_equal(x, [3, 4])
def test_default(self):
# No value specified for x[1], should be 0
x = piecewise([1, 2], [True, False], [2])
assert_array_equal(x, [2, 0])
# Should set x[1] to 3
x = piecewise([1, 2], [True, False], [2, 3])
assert_array_equal(x, [2, 3])
def test_0d(self):
x = np.array(3)
y = piecewise(x, x > 3, [4, 0])
assert_(y.ndim == 0)
assert_(y == 0)
class TestBincount(TestCase):
def test_simple(self):
y = np.bincount(np.arange(4))
assert_array_equal(y, np.ones(4))
def test_simple2(self):
y = np.bincount(np.array([1, 5, 2, 4, 1]))
assert_array_equal(y, np.array([0, 2, 1, 0, 1, 1]))
def test_simple_weight(self):
x = np.arange(4)
w = np.array([0.2, 0.3, 0.5, 0.1])
y = np.bincount(x, w)
assert_array_equal(y, w)
def test_simple_weight2(self):
x = np.array([1, 2, 4, 5, 2])
w = np.array([0.2, 0.3, 0.5, 0.1, 0.2])
y = np.bincount(x, w)
assert_array_equal(y, np.array([0, 0.2, 0.5, 0, 0.5, 0.1]))
def test_with_minlength(self):
x = np.array([0, 1, 0, 1, 1])
y = np.bincount(x, minlength=3)
assert_array_equal(y, np.array([2, 3, 0]))
def test_with_minlength_smaller_than_maxvalue(self):
x = np.array([0, 1, 1, 2, 2, 3, 3])
y = np.bincount(x, minlength=2)
assert_array_equal(y, np.array([1, 2, 2, 2]))
def test_with_minlength_and_weights(self):
x = np.array([1, 2, 4, 5, 2])
w = np.array([0.2, 0.3, 0.5, 0.1, 0.2])
y = np.bincount(x, w, 8)
assert_array_equal(y, np.array([0, 0.2, 0.5, 0, 0.5, 0.1, 0, 0]))
def test_empty(self):
x = np.array([], dtype=int)
y = np.bincount(x)
assert_array_equal(x, y)
def test_empty_with_minlength(self):
x = np.array([], dtype=int)
y = np.bincount(x, minlength=5)
assert_array_equal(y, np.zeros(5, dtype=int))
class TestInterp(TestCase):
def test_exceptions(self):
assert_raises(ValueError, interp, 0, [], [])
assert_raises(ValueError, interp, 0, [0], [1, 2])
def test_basic(self):
x = np.linspace(0, 1, 5)
y = np.linspace(0, 1, 5)
x0 = np.linspace(0, 1, 50)
assert_almost_equal(np.interp(x0, x, y), x0)
def test_right_left_behavior(self):
assert_equal(interp([-1, 0, 1], [0], [1]), [1, 1, 1])
assert_equal(interp([-1, 0, 1], [0], [1], left=0), [0, 1, 1])
assert_equal(interp([-1, 0, 1], [0], [1], right=0), [1, 1, 0])
assert_equal(interp([-1, 0, 1], [0], [1], left=0, right=0), [0, 1, 0])
def test_scalar_interpolation_point(self):
x = np.linspace(0, 1, 5)
y = np.linspace(0, 1, 5)
x0 = 0
assert_almost_equal(np.interp(x0, x, y), x0)
x0 = .3
assert_almost_equal(np.interp(x0, x, y), x0)
x0 = np.float32(.3)
assert_almost_equal(np.interp(x0, x, y), x0)
x0 = np.float64(.3)
assert_almost_equal(np.interp(x0, x, y), x0)
def test_zero_dimensional_interpolation_point(self):
x = np.linspace(0, 1, 5)
y = np.linspace(0, 1, 5)
x0 = np.array(.3)
assert_almost_equal(np.interp(x0, x, y), x0)
x0 = np.array(.3, dtype=object)
assert_almost_equal(np.interp(x0, x, y), .3)
def test_if_len_x_is_small(self):
xp = np.arange(0, 1000, 0.0001)
fp = np.sin(xp)
assert_almost_equal(np.interp(np.pi, xp, fp), 0.0)
def compare_results(res, desired):
for i in range(len(desired)):
assert_array_equal(res[i], desired[i])
def test_percentile_list():
assert_equal(np.percentile([1, 2, 3], 0), 1)
def test_percentile_out():
x = np.array([1, 2, 3])
y = np.zeros((3,))
p = (1, 2, 3)
np.percentile(x, p, out=y)
assert_equal(y, np.percentile(x, p))
x = np.array([[1, 2, 3],
[4, 5, 6]])
y = np.zeros((3, 3))
np.percentile(x, p, axis=0, out=y)
assert_equal(y, np.percentile(x, p, axis=0))
y = np.zeros((3, 2))
np.percentile(x, p, axis=1, out=y)
assert_equal(y, np.percentile(x, p, axis=1))
def test_median():
a0 = np.array(1)
a1 = np.arange(2)
a2 = np.arange(6).reshape(2, 3)
assert_allclose(np.median(a0), 1)
assert_allclose(np.median(a1), 0.5)
assert_allclose(np.median(a2), 2.5)
assert_allclose(np.median(a2, axis=0), [1.5, 2.5, 3.5])
assert_allclose(np.median(a2, axis=1), [1, 4])
class TestAdd_newdoc_ufunc(TestCase):
def test_ufunc_arg(self):
assert_raises(TypeError, add_newdoc_ufunc, 2, "blah")
assert_raises(ValueError, add_newdoc_ufunc, np.add, "blah")
def test_string_arg(self):
assert_raises(TypeError, add_newdoc_ufunc, np.add, 3)
if __name__ == "__main__":
run_module_suite()
| dwf/numpy | numpy/lib/tests/test_function_base.py | Python | bsd-3-clause | 51,379 |
import demistomock as demisto
import pytest
import ShowCampaignLastIncidentOccurred
INCIDENT_IDS = [{"id": '1'}, {"id": '2'}, {"id": '3'}]
MULTIPLE_INCIDENT_CREATED = [
{
'Contents': '[{"created": "2021-07-27T15:09:35.269187268Z"}, {"created": "2021-07-28T15:06:33.100736309Z"}, \
{"created": "2021-07-29T14:42:38.945010982Z"}, {"created": "2021-07-29T14:09:22.708160443Z"}]',
'Type': 3
}
]
ONE_INCIDENT_CREATED = [
{
'Contents': '[{"created": "2021-07-28T15:06:33.100736309Z"}]',
'Type': 3
}
]
@pytest.mark.parametrize('incident_created, expected_result, pixels', [
(MULTIPLE_INCIDENT_CREATED, 'July 29, 2021', '24'),
(ONE_INCIDENT_CREATED, 'July 28, 2021', '24'),
([{'Contents': '[]', 'Type': 3}], 'No last incident occurred found.', '20')
])
def test_show_last_incident_occurred(mocker, incident_created, expected_result, pixels):
"""
Given:
- Campaign incidents.
When:
- Running the show last incident occurred script main function.
Then:
- Ensure the correct last incident occurred is appear in the html format.
"""
mocker.patch.object(demisto, 'get', return_value=INCIDENT_IDS)
mocker.patch.object(demisto, 'executeCommand', return_value=incident_created)
mocker.patch.object(demisto, 'results')
ShowCampaignLastIncidentOccurred.main()
res = demisto.results.call_args[0][0]['Contents']
expected_result = f"<div style='text-align:center; font-size:17px; padding: 15px;'>Last Incident Occurred</br> " \
f"<div style='font-size:{pixels}px;'> {expected_result} </div></div>"
assert expected_result == res
| VirusTotal/content | Packs/Campaign/Scripts/ShowCampaignLastIncidentOccurred/ShowCampaignLastIncidentOccurred_test.py | Python | mit | 1,691 |
# Copyright 2014 IBM Corp.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo_log import log as logging
import taskflow.engines
from taskflow.patterns import linear_flow
from cinder import exception
from cinder import flow_utils
from cinder.i18n import _, _LE
from cinder.volume.flows.api import create_volume as create_api
from cinder.volume.flows import common as flow_common
from cinder.volume.flows.manager import create_volume as create_mgr
LOG = logging.getLogger(__name__)
ACTION = 'volume:manage_existing'
class PrepareForQuotaReservationTask(flow_utils.CinderTask):
"""Gets the volume size from the driver."""
default_provides = set(['size', 'volume_type_id', 'volume_properties',
'volume_spec'])
def __init__(self, db, driver):
super(PrepareForQuotaReservationTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, volume_ref, manage_existing_ref):
volume_id = volume_ref.id
if not self.driver.initialized:
driver_name = self.driver.__class__.__name__
LOG.error(_LE("Unable to manage existing volume. "
"Volume driver %s not initialized.") % driver_name)
flow_common.error_out_volume(context, self.db, volume_id,
reason=_("Volume driver %s "
"not initialized.") %
driver_name)
raise exception.DriverNotInitialized()
size = self.driver.manage_existing_get_size(volume_ref,
manage_existing_ref)
return {'size': size,
'volume_type_id': volume_ref.volume_type_id,
'volume_properties': volume_ref,
'volume_spec': {'status': volume_ref.status,
'volume_name': volume_ref.name,
'volume_id': volume_id}}
def revert(self, context, result, flow_failures, volume_ref, **kwargs):
volume_id = volume_ref.id
reason = _('Volume manage failed.')
flow_common.error_out_volume(context, self.db,
volume_id, reason=reason)
LOG.error(_LE("Volume %s: manage failed."), volume_id)
class ManageExistingTask(flow_utils.CinderTask):
"""Brings an existing volume under Cinder management."""
default_provides = set(['volume'])
def __init__(self, db, driver):
super(ManageExistingTask, self).__init__(addons=[ACTION])
self.db = db
self.driver = driver
def execute(self, context, volume_ref, manage_existing_ref, size):
model_update = self.driver.manage_existing(volume_ref,
manage_existing_ref)
if not model_update:
model_update = {}
model_update.update({'size': size})
try:
volume_ref.update(model_update)
volume_ref.save()
except exception.CinderException:
LOG.exception(_LE("Failed updating model of volume %(volume_id)s"
" with creation provided model %(model)s") %
{'volume_id': volume_ref['id'],
'model': model_update})
raise
return {'volume': volume_ref}
def get_flow(context, db, driver, host, volume, ref):
"""Constructs and returns the manager entrypoint flow."""
flow_name = ACTION.replace(":", "_") + "_manager"
volume_flow = linear_flow.Flow(flow_name)
# This injects the initial starting flow values into the workflow so that
# the dependency order of the tasks provides/requires can be correctly
# determined.
create_what = {
'context': context,
'volume_ref': volume,
'manage_existing_ref': ref,
'optional_args': {'is_quota_committed': False}
}
volume_flow.add(create_mgr.NotifyVolumeActionTask(db,
"manage_existing.start"),
PrepareForQuotaReservationTask(db, driver),
create_api.QuotaReserveTask(),
ManageExistingTask(db, driver),
create_api.QuotaCommitTask(),
create_mgr.CreateVolumeOnFinishTask(db,
"manage_existing.end"))
# Now load (but do not run) the flow using the provided initial data.
return taskflow.engines.load(volume_flow, store=create_what)
| bswartz/cinder | cinder/volume/flows/manager/manage_existing.py | Python | apache-2.0 | 5,157 |
# coding: utf-8
from .db import db
class Noticia(db.Document):
titulo = db.StringField(required=True)
texto = db.StringField(required=True)
imagem = db.StringField()
| fabiomac/apps | wtf/wtf/models.py | Python | mit | 180 |
# -*- coding: utf-8 -*-
"""Functions for manipulating external links or querying third-party sites."""
#
# (C) Pywikibot team, 2013
#
# Distributed under the terms of the MIT license.
#
from __future__ import unicode_literals
__version__ = '$Id: 50ff23e73dd76adfd2f37025f676849df67243a0 $'
import sys
if sys.version_info[0] > 2:
from urllib.parse import urlencode
else:
from urllib import urlencode
from pywikibot.comms import http
def getInternetArchiveURL(url, timestamp=None):
"""Return archived URL by Internet Archive.
See [[:mw:Archived Pages]] and https://archive.org/help/wayback_api.php
for more details.
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
import json
uri = u'https://archive.org/wayback/available?'
query = {'url': url}
if timestamp is not None:
query['timestamp'] = timestamp
uri = uri + urlencode(query)
jsontext = http.fetch(uri).content
if "closest" in jsontext:
data = json.loads(jsontext)
return data['archived_snapshots']['closest']['url']
else:
return None
def getWebCitationURL(url, timestamp=None):
"""Return archived URL by Web Citation.
See http://www.webcitation.org/doc/WebCiteBestPracticesGuide.pdf
for more details
@param url: url to search an archived version for
@param timestamp: requested archive date. The version closest to that
moment is returned. Format: YYYYMMDDhhmmss or part thereof.
"""
import xml.etree.ElementTree as ET
uri = u'http://www.webcitation.org/query?'
query = {'returnxml': 'true',
'url': url}
if timestamp is not None:
query['date'] = timestamp
uri = uri + urlencode(query)
xmltext = http.fetch(uri).content
if "success" in xmltext:
data = ET.fromstring(xmltext)
return data.find('.//webcite_url').text
else:
return None
| hperala/kontuwikibot | pywikibot/weblib.py | Python | mit | 2,055 |
import copy
import datetime
from tornado.escape import xhtml_escape
class EscapedDict:
"""A wrapper for a dict that HTML-escapes values as you ask for them"""
def __init__(self, doc):
self.doc = doc
def __getitem__(self, key):
item = self.doc[key]
if isinstance(item, str):
return xhtml_escape(self.doc[key])
else:
return self.doc[key]
def get_int_arg(request, field, default=None):
"""Try to get an integer value from a query arg."""
try:
val = int(request.arguments.get(field, [default])[0])
except (ValueError, TypeError):
val = default
return val
def get_str_arg(request, field, default=None):
"""Try to get a string value from a query arg."""
return request.arguments.get(field, [default])[0]
def sqlalchemy_to_dict(result, table):
row_item = {}
for col in table.columns.keys():
row_item[col] = getattr(result, col)
return row_item
def pretty_date(time=False):
"""
Get a datetime object or a int() Epoch timestamp and return a
pretty string like 'an hour ago', 'Yesterday', '3 months ago',
'just now', etc
"""
now = datetime.datetime.now()
if type(time) is int:
diff = now - datetime.datetime.fromtimestamp(time)
elif isinstance(time,datetime.datetime):
diff = now - time
elif not time:
diff = now - now
else:
raise ValueError('need to provide either int, datetime, or None for "time" arg')
second_diff = diff.seconds
day_diff = diff.days
if day_diff < 0:
return ''
if day_diff == 0:
if second_diff < 10:
return "just now"
if second_diff < 60:
return str(second_diff) + " seconds ago"
if second_diff < 120:
return "a minute ago"
if second_diff < 3600:
return str( second_diff / 60 ) + " minutes ago"
if second_diff < 7200:
return "an hour ago"
if second_diff < 86400:
return str( second_diff / 3600 ) + " hours ago"
if day_diff == 1:
return "yesterday"
if day_diff < 7:
return str(day_diff) + " days ago"
if day_diff < 14:
return "1 week ago"
if day_diff < 31:
return str(day_diff/7) + " weeks ago"
if day_diff < 60:
return "1 month ago"
if day_diff < 365:
return str(day_diff/30) + " months ago"
if day_diff < 365 * 2:
remainder = datetime.datetime.now() - (diff - datetime.timedelta(days=365))
diff = now - remainder
if diff.days > 31:
return "1 year, " + pretty_date(remainder)
else:
return "1 year ago"
return str(day_diff/365) + " years ago"
def get_servlet_urlspec(servlet):
try:
return (servlet.regexp, servlet)
except AttributeError:
name = servlet.__name__
regexp = r"/%s" % name[:-len("Servlet")].lower()
return (regexp, servlet)
def tags_str_as_set(tags_str):
"""Return comma separated tags list string as a set, stripping out
surrounding white space if necessary.
"""
return set(filter(lambda t: t != '', (t.strip() for t in tags_str.split(','))))
def tags_contain(tags_str, contains_list):
"""Predicate to check if a tags string list contains any of the
tags in contains_list.
Args:
tags_str - comma-separated string of request tags.
contains_list - list of request tag strings.
"""
return len(tags_str_as_set(tags_str) & set(contains_list)) > 0
def add_to_tags_str(current_tags, tags):
"""Args:
current_tags - A comma-separated string comprising a list of tags
from the current request.
tags - A comma-separated string comprising a list of tags which
should be added to current_tags.
Returns: a comma-separated string which is the union of the sets
represented by current_tags and tags, sorted alphabetically.
"""
return ','.join(
sorted(tags_str_as_set(current_tags) | tags_str_as_set(tags))
)
def del_from_tags_str(current_tags, tags):
"""Args:
current_tags - A comma-separated string comprising a list of tags
from the current request.
tags - A comma-separated string comprising a list of tags which
should be removed from current_tags.
Returns: a comma-separted string which is the difference of
current_tags from tags, sorted alphabetically.
"""
return ','.join(
sorted(tags_str_as_set(current_tags).difference(tags_str_as_set(tags)))
)
def request_to_jsonable(request):
"""Get a request object and return a dict with desired key, value
pairs that are to be encoded to json format
"""
return dict(
(k, request[k]) for k in (
'id',
'user',
'watchers',
'state',
'repo',
'branch',
'revision',
'tags',
'created',
'modified',
'title',
'comments',
'reviewid',
'description'
)
)
def push_to_jsonable(push):
"""Get a push object and return a dict with desired key, value
pairs that are to be encoded to json format
"""
return dict(
(k, push[k]) for k in (
'id',
'title',
'user',
'branch',
'stageenv',
'state',
'created',
'modified',
'pushtype',
'extra_pings'
)
)
def dict_copy_keys(to_dict, from_dict):
"""Copy the values from from_dict to to_dict but only the keys
that are present in to_dict
"""
for key, value in to_dict.items():
if type(value) is dict:
dict_copy_keys(value, from_dict[key])
else:
to_dict[key] = copy.deepcopy(from_dict[key])
| asottile/pushmanager | pushmanager/core/util.py | Python | apache-2.0 | 5,859 |
from struct import unpack, calcsize, Struct
from StataTypes import MissingValue, Variable
class Reader(object):
""".dta file reader"""
_header = {}
_data_location = 0
_col_sizes = ()
_has_string_data = False
_missing_values = False
TYPE_MAP = range(251)+list('bhlfd')
MISSING_VALUES = { 'b': (-127,100), 'h': (-32767, 32740), 'l': (-2147483647, 2147483620), 'f': (-1.701e+38, +1.701e+38), 'd': (-1.798e+308, +8.988e+307) }
def __init__(self, file_object, missing_values=False):
"""Creates a new parser from a file object.
If missing_values, parse missing values and return as a MissingValue
object (instead of None)."""
self._missing_values = missing_values
self._parse_header(file_object)
def file_headers(self):
"""Returns all .dta file headers."""
return self._header
def file_format(self):
"""Returns the file format.
Format 113: Stata 9
Format 114: Stata 10"""
return self._header['ds_format']
def file_label(self):
"""Returns the dataset's label."""
return self._header['data_label']
def file_timestamp(self):
"""Returns the date and time Stata recorded on last file save."""
return self._header['time_stamp']
def value_labels(self):
return self._header["vallabs"]
def variables(self):
"""Returns a list of the dataset's PyDTA.Variables."""
return map(Variable, zip(range(self._header['nvar']),
self._header['typlist'], self._header['varlist'], self._header['srtlist'],
self._header['fmtlist'], self._header['lbllist'], self._header['vlblist']))
def dataset(self, as_dict=False):
"""Returns a Python generator object for iterating over the dataset.
Each observation is returned as a list unless as_dict is set.
Observations with a MissingValue(s) are not filtered and should be
handled by your applcation."""
try:
self._file.seek(self._data_location)
except Exception:
pass
if as_dict:
vars = map(str, self.variables())
for i in range(len(self)):
yield dict(zip(vars, self._next()))
else:
for i in range(self._header['nobs']):
yield self._next()
### Python special methods
def __len__(self):
"""Return the number of observations in the dataset.
This value is taken directly from the header and includes observations
with missing values."""
return self._header['nobs']
def __getitem__(self, k):
"""Seek to an observation indexed k in the file and return it, ordered
by Stata's output to the .dta file.
k is zero-indexed. Prefer using R.data() for performance."""
if not (type(k) is int or type(k) is long) or k < 0 or k > len(self)-1:
raise IndexError(k)
loc = self._data_location + sum(self._col_size()) * k
if self._file.tell() != loc:
self._file.seek(loc)
return self._next()
### PyDTA private methods
def _null_terminate(self, s):
try:
return s.lstrip('\x00')[:s.index('\x00')]
except Exception:
return s
def _parse_header(self, file_object):
self._file = file_object
# parse headers
self._header['ds_format'] = unpack('b', self._file.read(1))[0]
byteorder = self._header['byteorder'] = unpack('b', self._file.read(1))[0]==0x1 and '>' or '<'
self._header['filetype'] = unpack('b', self._file.read(1))[0]
self._file.read(1)
nvar = self._header['nvar'] = unpack(byteorder+'h', self._file.read(2))[0]
if self._header['ds_format'] < 114:
self._header['nobs'] = unpack(byteorder+'i', self._file.read(4))[0]
else:
self._header['nobs'] = unpack(byteorder+'i', self._file.read(4))[0]
self._header['data_label'] = self._null_terminate(self._file.read(81))
self._header['time_stamp'] = self._null_terminate(self._file.read(18))
# parse descriptors
self._header['typlist'] = [self.TYPE_MAP[ord(self._file.read(1))] for i in range(nvar)]
self._header['varlist'] = [self._null_terminate(self._file.read(33)) for i in range(nvar)]
self._header['srtlist'] = unpack(byteorder+('h'*(nvar+1)), self._file.read(2*(nvar+1)))[:-1]
if self._header['ds_format'] <= 113:
self._header['fmtlist'] = [self._null_terminate(self._file.read(12)) for i in range(nvar)]
else:
self._header['fmtlist'] = [self._null_terminate(self._file.read(49)) for i in range(nvar)]
self._header['lbllist'] = [self._null_terminate(self._file.read(33)) for i in range(nvar)]
self._header['vlblist'] = [self._null_terminate(self._file.read(81)) for i in range(nvar)]
# ignore expansion fields
while True:
data_type = unpack(byteorder+'b', self._file.read(1))[0]
data_len = unpack(byteorder+'i', self._file.read(4))[0]
if data_type == 0:
break
self._file.read(data_len)
# other state vars
self._data_location = self._file.tell()
self._has_string_data = len(filter(lambda x: type(x) is int, self._header['typlist'])) > 0
self._col_size()
# create rowunpacker
typlist = self._header['typlist']
frmtlist = [t if type(t)!=int else bytes(t)+"s" for t in typlist]
frmt = "".join(frmtlist)
frmt = self._header['byteorder'] + frmt
self._rowstruct = Struct(frmt)
# offset to value labels
byteoffset = self._rowstruct.size * self._header["nobs"]
self._file.seek(byteoffset, 1)
###############################
# value labels
# taken straight from stata_dta...
class MissingValue():
"""A class to mimic some of the properties of Stata's missing values.
The class is intended for mimicking only the 27 regular missing
values ., .a, .b, .c, etc.
Users wanting MissingValue instances should access members of
MISSING_VALS rather than create new instances.
"""
def __init__(self, index):
"""Users wanting MissingValue instances should access members of
MISSING_VALS rather than create new instances.
"""
self.value = float.fromhex(
"".join(('0x1.0', hex(index)[2:].zfill(2), 'p+1023'))
)
self.name = "." if index == 0 else "." + chr(index + 96)
self.index = index
def __abs__(self):
return self
def __add__(self, other):
return MISSING
def __bool__(self):
return True
def __divmod__(self, other):
return MISSING, MISSING
def __eq__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value == other_val
def __floordiv__(self, other):
return MISSING
def __ge__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value >= other_val
def __gt__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value > other_val
def __hash__(self):
return self.value.__hash__()
def __le__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value <= other_val
def __lt__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value < other_val
def __mod__(self, other):
return MISSING
def __mul__(self, other):
return MISSING
def __ne__(self, other):
other_val = other.value if isinstance(other, MissingValue) else other
return self.value != other_val
def __neg__(self):
return MISSING
def __pos__(self):
return MISSING
def __pow__(self, other):
return MISSING
def __radd__(self, other):
return MISSING
def __rdivmod__(self, other):
return MISSING, MISSING
def __repr__(self):
return self.name
def __rfloordiv__(self, other):
return MISSING
def __rmod__(self, other):
return MISSING
def __rmul__(self, other):
return MISSING
def __round__(self, ndigits=None):
return self
def __rpow__(self, other):
return MISSING
def __rsub__(self, other):
return MISSING
def __rtruediv__(self, other):
return MISSING
def __sub__(self, other):
return MISSING
def __str__(self):
return self.name
def __truediv__(self, other):
return MISSING
MISSING_VALS = tuple(MissingValue(i) for i in range(27))
missing_above = {251: 100, 252: 32740, 253: 2147483620,
254: float.fromhex('0x1.fffffep+126'),
255: float.fromhex('0x1.fffffffffffffp+1022')}
# decimal numbers given in -help dta- for float and double
# are approximations: 'f': 1.701e38, 'd': 8.988e307
type_dict = {251: ['b',1], 252: ['h',2], 253: ['l',4],
254: ['f',4], 255: ['d',8]}
def get_byte_str(str_len):
s = unpack(str(str_len) + 's', self._file.read(str_len))[0]
return s.partition(b'\0')[0].decode('iso-8859-1')
def missing_object(miss_val, st_type):
if st_type == 251: # byte
value = MISSING_VALS[miss_val - 101]
elif st_type == 252: # int
value = MISSING_VALS[miss_val - 32741]
elif st_type == 253: # long
value = MISSING_VALS[miss_val - 2147483621]
elif st_type == 254: # float
value = MISSING_VALS[int(miss_val.hex()[5:7], 16)]
elif st_type == 255: # double
value = MISSING_VALS[int(miss_val.hex()[5:7], 16)]
return value
def get_var_val(st_type):
if st_type <= 244:
return get_byte_str(st_type)
else:
fmt, nbytes = type_dict[st_type]
val = unpack(byteorder+fmt, self._file.read(nbytes))[0]
return (val if val <= missing_above[st_type]
else missing_object(val, st_type))
def parse_value_label_table():
"""helper function for reading dta files"""
nentries = unpack(byteorder + 'l', self._file.read(4))[0]
txtlen = unpack(byteorder + 'l', self._file.read(4))[0]
off = []
val = []
txt = []
for i in range(nentries):
off.append(unpack(byteorder+'l',self._file.read(4))[0])
for i in range(nentries):
val.append(unpack(byteorder+'l',self._file.read(4))[0])
txt_block = unpack(str(txtlen) + "s", self._file.read(txtlen))
txt = [t.decode('iso-8859-1')
for b in txt_block for t in b.split(b'\0')]
# put (off, val) pairs in same order as txt
sorter = list(zip(off, val))
sorter.sort()
# dict of val[i]:txt[i]
table = {sorter[i][1]: txt[i] for i in range(len(sorter))}
return table
value_labels = {}
while True:
try:
self._file.seek(4,1) # table length
labname = get_byte_str(33)
self._file.seek(3,1) # padding
vl_table = parse_value_label_table()
value_labels[labname] = vl_table
except:
break
self._header['vallabs'] = value_labels
def _calcsize(self, fmt):
return type(fmt) is int and fmt or calcsize(self._header['byteorder']+fmt)
def _col_size(self, k = None):
"""Calculate size of a data record."""
if len(self._col_sizes) == 0:
self._col_sizes = map(lambda x: self._calcsize(x), self._header['typlist'])
if k == None:
return self._col_sizes
else:
return self._col_sizes[k]
def _unpack(self, fmt, byt):
d = unpack(self._header['byteorder']+fmt, byt)[0]
if fmt[-1] in self.MISSING_VALUES:
nmin, nmax = self.MISSING_VALUES[fmt[-1]]
if d < nmin or d > nmax:
if self._missing_values:
return MissingValue(nmax, d)
else:
return None
return d
def _next(self):
# what about nullterminate on strings?
row = self._rowstruct.unpack(self._file.read(self._rowstruct.size))
# turn missing values into MissingValue or None
# TODO: This step alone can increase speed from 1 sec to 26 sec.
# so possible with some optimization...
typlist = self._header["typlist"]
valtyps = zip(row,typlist)
def missingfilter():
if self._missing_values:
for val,typ in valtyps:
if typ in self.MISSING_VALUES:
nmin, nmax = self.MISSING_VALUES[typ]
if not nmin <= val <= nmax:
yield MissingValue(nmax, val) # only difference
else:
yield val
else:
yield val
else:
for val,typ in valtyps:
if typ in self.MISSING_VALUES:
nmin, nmax = self.MISSING_VALUES[typ]
if not nmin <= val <= nmax:
yield None # only difference
else:
yield val
else:
yield val
row = list(missingfilter())
return row
## if self._has_string_data:
## data = [None]*self._header['nvar']
## for i in range(len(data)):
## if type(typlist[i]) is int:
## data[i] = self._null_terminate(self._file.read(typlist[i]))
## else:
## data[i] = self._unpack(typlist[i], self._file.read(self._col_size(i)))
## return data
## else:
## return map(lambda i: self._unpack(typlist[i], self._file.read(self._col_size(i))), range(self._header['nvar']))
| karimbahgat/PythonGis | pythongis/vector/fileformats/thirdparty/PyDTA/StataTools.py | Python | mit | 15,846 |
#!/usr/bin/env python3
"""
Some automation for Astrolords.
"""
import sys
import time
import win32api
import win32con
from PIL import ImageGrab
from pywinauto import application, mouse
cords = (1050, 585)
x2_cords = (1050, 585)
x5_cords = (1050, 640)
def run():
path_to_astro = r'C:\Program Files (x86)\Astro Lords\Astrolords.exe'
app = application.Application()
try:
app.connect(path=path_to_astro, title="Astro Lords")
sep = '-' * 30
print(sep)
print('Connected to Astrolords.')
print(sep)
return app
except application.ProcessNotFoundError:
print('Can\'t connect to Astrolords :(')
return False
def core(app):
app.AstroLords.set_focus()
app.AstroLords.draw_outline()
app.AstroLords.move_window(x=200, y=200)
mouse.move(coords=cords)
get_box = (906, 641, 910, 644)
# color_1 = [x for x in range(4, 5)]
# color_2 = [x for x in range(150, 180)]
# color_3 = [x for x in range(20, 40)]
for i in range(10):
tmp = []
image = ImageGrab.grab(get_box)
pre_color = image.getpixel((3, 1))
tmp.append(pre_color[0])
if len(tmp) > 2:
if pre_color[0] == tmp[-1]:
break
print('TARGET RGB =', pre_color[0], pre_color[1], pre_color[2])
while True:
image = ImageGrab.grab(get_box)
color = image.getpixel((3, 1))
if color[0] != pre_color[0]:
if color[1] != pre_color[1]:
if color[2] != pre_color[2]:
click(cords[0], cords[1])
print('NOT RGB =', color[0], color[1], color[2])
return True
break
def click(x, y):
win32api.SetCursorPos((x, y))
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTDOWN, x, y, 0, 0)
win32api.mouse_event(win32con.MOUSEEVENTF_LEFTUP, x, y, 0, 0)
if __name__ == "__main__":
core(run())
| brokeyourbike/astrolords_dayleak | astro.py | Python | mit | 1,700 |
"""
This is an example settings/test.py file.
Use this settings file when running tests.
These settings overrides what's in settings/base.py
"""
from .base import *
DATABASES = {
"default": {
"ENGINE": "django.db.backends.sqlite3",
"NAME": ":memory:",
"USER": "",
"PASSWORD": "",
"HOST": "",
"PORT": "",
},
}
SECRET_KEY = '*k3tkxu5a*08f9ann#5sn!3qc&o2nkr-+z)0=kmm7md9!z7=^k'
| oskarm91/sis | sis/settings/test.py | Python | bsd-3-clause | 435 |
from views import app
from settings import FRONT_WEATHER_PORT
if __name__ == "__main__":
app.run(host='0.0.0.0', port=FRONT_WEATHER_PORT)
| microstack/front | weather/wsgi.py | Python | mit | 143 |
#Milton Orlando Sarria
#filtrado elemental de ruido sinusoidal
from scipy import signal
import matplotlib.pyplot as plt
import numpy as np
#disenar el filtro usando una ventana hamming
b = signal.firwin(9, 0.8, window='hamming', pass_zero=True)
#definir la frecuencia de muestreo y generar un vector de tiempo hasta 5 segundos
fs=1e3
longitud = 5
t=np.linspace(1./fs,longitud,fs*longitud);
F=10 #frecuencia fundamental 10 hz
w=2*np.pi*F #frecuencia angular
Vm=4 #valor de amplitud de la onda
#generar onda sinusoidal pura
x=Vm*np.cos(w*t)
#generar onda de ruido sinusoidal, alta frecuencia y baja amplitud
#usar una frecuencia 30 veces mayor a la inicial
ruido=2*np.cos(30*w*t)
#onda con ruido: sumar las dos sinusoidales
x_n=x+ruido
#filtrar la onda con ruido usando el filtro FIR
yf=signal.lfilter(b, [1.0],x_n)
#visualizar las tres ondas, limpia, contaminada y filtrada
plt.subplot(311)
plt.plot(t,x)
plt.title('onda sin ruido')
plt.subplot(312)
plt.plot(t,x_n)
plt.title('onda con ruido')
plt.subplot(313)
plt.plot(t,yf)
plt.title('onda filtrada')
plt.xlabel('tiempo')
plt.show()
| miltonsarria/dsp-python | filters/FIR/filter_sine1.py | Python | mit | 1,128 |
# -*- coding: utf-8 -*-
'''
Tensorflow Implementation of the Scaled ELU function and Dropout
'''
from __future__ import absolute_import, division, print_function
import numbers
from tensorflow.contrib import layers
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import tensor_util
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import array_ops
from tensorflow.python.layers import utils
import tensorflow as tf
# (1) scale inputs to zero mean and unit variance
# (2) use SELUs
def selu(x):
with ops.name_scope('elu') as scope:
alpha = 1.6732632423543772848170429916717
scale = 1.0507009873554804934193349852946
return scale*tf.where(x>=0.0, x, alpha*tf.nn.elu(x))
# (3) initialize weights with stddev sqrt(1/n)
# (4) use this dropout
def dropout_selu(x, rate, alpha= -1.7580993408473766, fixedPointMean=0.0, fixedPointVar=1.0,
noise_shape=None, seed=None, name=None, training=False):
"""Dropout to a value with rescaling."""
def dropout_selu_impl(x, rate, alpha, noise_shape, seed, name):
keep_prob = 1.0 - rate
x = ops.convert_to_tensor(x, name="x")
if isinstance(keep_prob, numbers.Real) and not 0 < keep_prob <= 1:
raise ValueError("keep_prob must be a scalar tensor or a float in the "
"range (0, 1], got %g" % keep_prob)
keep_prob = ops.convert_to_tensor(keep_prob, dtype=x.dtype, name="keep_prob")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
alpha = ops.convert_to_tensor(alpha, dtype=x.dtype, name="alpha")
keep_prob.get_shape().assert_is_compatible_with(tensor_shape.scalar())
if tensor_util.constant_value(keep_prob) == 1:
return x
noise_shape = noise_shape if noise_shape is not None else array_ops.shape(x)
random_tensor = keep_prob
random_tensor += random_ops.random_uniform(noise_shape, seed=seed, dtype=x.dtype)
binary_tensor = math_ops.floor(random_tensor)
ret = x * binary_tensor + alpha * (1-binary_tensor)
a = tf.sqrt(fixedPointVar / (keep_prob *((1-keep_prob) * tf.pow(alpha-fixedPointMean,2) + fixedPointVar)))
b = fixedPointMean - a * (keep_prob * fixedPointMean + (1 - keep_prob) * alpha)
ret = a * ret + b
ret.set_shape(x.get_shape())
return ret
with ops.name_scope(name, "dropout", [x]) as name:
return utils.smart_cond(training,
lambda: dropout_selu_impl(x, rate, alpha, noise_shape, seed, name),
lambda: array_ops.identity(x))
| sethuiyer/mlhub | Deep MNIST/selu.py | Python | mit | 2,749 |
# Copyright 2016 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Integration test program A for Subpar.
Test a variety of difficult or erroneous import scenarios.
"""
import pkgutil
import sys
# Import some things in various ways
import subpar
from subpar import tests as tests1
import subpar.tests as tests2 # noqa
assert tests1 is tests2, (tests1, tests2)
# Test importing __main__ under its package qualified name.
#
# Check that we handle it the same way Python does (i.e. poorly)
if __name__ == '__main__':
imported_qualified_a = False
# pylint: disable=reimported,import-self
import subpar.tests.package_a.a
assert imported_qualified_a
assert subpar.tests.package_a.a is not sys.modules['__main__']
else:
# We are maybe inside recusive import
assert __name__ == 'subpar.tests.package_a.a', __name__
assert sys.modules.get(__name__) is not None
# Tell __main__ that we got here
if hasattr(sys.modules['__main__'], 'imported_qualified_a'):
sys.modules['__main__'].imported_qualified_a = True
# Import parent package
import subpar.tests.package_a
from .. import package_a
assert subpar.tests.package_a is package_a
# Containing package doesn't have a reference to this module yet
assert (not hasattr(package_a, 'a')), package_a
# Test that neither of these work, because we're in the middle of
# importing 'subpar.tests.package_a.a', so the module object for
# 'subpar.tests.package_a' doesn't have a variable called 'a' yet.
try:
# pylint: disable=import-self
from . import a as a1
# This was fixed in Python 3.5
if (sys.version_info.major, sys.version_info.minor) < (3, 5):
raise AssertionError('This shouldn\'t have worked: %r' % a1)
except ImportError as e:
assert 'cannot import name' in str(e), e
try:
# pylint: disable=import-self
import subpar.tests.package_a.a as a2
raise AssertionError('This shouldn\'t have worked: %r' % a2)
except AttributeError as e:
assert "has no attribute 'a'" in str(e), e
def main():
print('In a.py main()')
# Test resource extraction
a_dat = pkgutil.get_data('subpar.tests.package_a', 'a_dat.txt')
assert (a_dat == b'Dummy data file for a.py\n'), a_dat
if __name__ == '__main__':
main()
| google/subpar | tests/package_a/a.py | Python | apache-2.0 | 2,881 |
# ##### BEGIN GPL LICENSE BLOCK #####
#
# This program is free software; you can redistribute it and/or
# modify it under the terms of the GNU General Public License
# as published by the Free Software Foundation; either version 2
# of the License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software Foundation,
# Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
#
# ##### END GPL LICENSE BLOCK #####
# <pep8-80 compliant>
__all__ = (
"ExportHelper",
"ImportHelper",
"orientation_helper_factory",
"axis_conversion",
"axis_conversion_ensure",
"create_derived_objects",
"free_derived_objects",
"unpack_list",
"unpack_face_list",
"path_reference",
"path_reference_copy",
"path_reference_mode",
"unique_name"
)
import bpy
from bpy.props import (
StringProperty,
BoolProperty,
EnumProperty,
)
def _check_axis_conversion(op):
if hasattr(op, "axis_forward") and hasattr(op, "axis_up"):
return axis_conversion_ensure(op,
"axis_forward",
"axis_up",
)
return False
class ExportHelper:
filepath = StringProperty(
name="File Path",
description="Filepath used for exporting the file",
maxlen=1024,
subtype='FILE_PATH',
)
check_existing = BoolProperty(
name="Check Existing",
description="Check and warn on overwriting existing files",
default=True,
options={'HIDDEN'},
)
# needed for mix-ins
order = [
"filepath",
"check_existing",
]
# subclasses can override with decorator
# True == use ext, False == no ext, None == do nothing.
check_extension = True
def invoke(self, context, event):
import os
if not self.filepath:
blend_filepath = context.blend_data.filepath
if not blend_filepath:
blend_filepath = "untitled"
else:
blend_filepath = os.path.splitext(blend_filepath)[0]
self.filepath = blend_filepath + self.filename_ext
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def check(self, context):
import os
change_ext = False
change_axis = _check_axis_conversion(self)
check_extension = self.check_extension
if check_extension is not None:
filepath = self.filepath
if os.path.basename(filepath):
filepath = bpy.path.ensure_ext(filepath,
self.filename_ext
if check_extension
else "")
if filepath != self.filepath:
self.filepath = filepath
change_ext = True
return (change_ext or change_axis)
class ImportHelper:
filepath = StringProperty(
name="File Path",
description="Filepath used for importing the file",
maxlen=1024,
subtype='FILE_PATH',
)
# needed for mix-ins
order = [
"filepath",
]
def invoke(self, context, event):
context.window_manager.fileselect_add(self)
return {'RUNNING_MODAL'}
def check(self, context):
return _check_axis_conversion(self)
def orientation_helper_factory(name, axis_forward='Y', axis_up='Z'):
members = {}
def _update_axis_forward(self, context):
if self.axis_forward[-1] == self.axis_up[-1]:
self.axis_up = self.axis_up[0:-1] + 'XYZ'[('XYZ'.index(self.axis_up[-1]) + 1) % 3]
members['axis_forward'] = EnumProperty(
name="Forward",
items=(('X', "X Forward", ""),
('Y', "Y Forward", ""),
('Z', "Z Forward", ""),
('-X', "-X Forward", ""),
('-Y', "-Y Forward", ""),
('-Z', "-Z Forward", ""),
),
default=axis_forward,
update=_update_axis_forward,
)
def _update_axis_up(self, context):
if self.axis_up[-1] == self.axis_forward[-1]:
self.axis_forward = self.axis_forward[0:-1] + 'XYZ'[('XYZ'.index(self.axis_forward[-1]) + 1) % 3]
members['axis_up'] = EnumProperty(
name="Up",
items=(('X', "X Up", ""),
('Y', "Y Up", ""),
('Z', "Z Up", ""),
('-X', "-X Up", ""),
('-Y', "-Y Up", ""),
('-Z', "-Z Up", ""),
),
default=axis_up,
update=_update_axis_up,
)
members["order"] = [
"axis_forward",
"axis_up",
]
return type(name, (object,), members)
# Axis conversion function, not pretty LUT
# use lookup table to convert between any axis
_axis_convert_matrix = (
((-1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, 1.0)),
((-1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, -1.0, 0.0)),
((-1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, 1.0, 0.0)),
((-1.0, 0.0, 0.0), (0.0, 1.0, 0.0), (0.0, 0.0, -1.0)),
((0.0, -1.0, 0.0), (-1.0, 0.0, 0.0), (0.0, 0.0, -1.0)),
((0.0, 0.0, 1.0), (-1.0, 0.0, 0.0), (0.0, -1.0, 0.0)),
((0.0, 0.0, -1.0), (-1.0, 0.0, 0.0), (0.0, 1.0, 0.0)),
((0.0, 1.0, 0.0), (-1.0, 0.0, 0.0), (0.0, 0.0, 1.0)),
((0.0, -1.0, 0.0), (0.0, 0.0, 1.0), (-1.0, 0.0, 0.0)),
((0.0, 0.0, -1.0), (0.0, -1.0, 0.0), (-1.0, 0.0, 0.0)),
((0.0, 0.0, 1.0), (0.0, 1.0, 0.0), (-1.0, 0.0, 0.0)),
((0.0, 1.0, 0.0), (0.0, 0.0, -1.0), (-1.0, 0.0, 0.0)),
((0.0, -1.0, 0.0), (0.0, 0.0, -1.0), (1.0, 0.0, 0.0)),
((0.0, 0.0, 1.0), (0.0, -1.0, 0.0), (1.0, 0.0, 0.0)),
((0.0, 0.0, -1.0), (0.0, 1.0, 0.0), (1.0, 0.0, 0.0)),
((0.0, 1.0, 0.0), (0.0, 0.0, 1.0), (1.0, 0.0, 0.0)),
((0.0, -1.0, 0.0), (1.0, 0.0, 0.0), (0.0, 0.0, 1.0)),
((0.0, 0.0, -1.0), (1.0, 0.0, 0.0), (0.0, -1.0, 0.0)),
((0.0, 0.0, 1.0), (1.0, 0.0, 0.0), (0.0, 1.0, 0.0)),
((0.0, 1.0, 0.0), (1.0, 0.0, 0.0), (0.0, 0.0, -1.0)),
((1.0, 0.0, 0.0), (0.0, -1.0, 0.0), (0.0, 0.0, -1.0)),
((1.0, 0.0, 0.0), (0.0, 0.0, 1.0), (0.0, -1.0, 0.0)),
((1.0, 0.0, 0.0), (0.0, 0.0, -1.0), (0.0, 1.0, 0.0)),
)
# store args as a single int
# (X Y Z -X -Y -Z) --> (0, 1, 2, 3, 4, 5)
# each value is ((src_forward, src_up), (dst_forward, dst_up))
# where all 4 values are or'd into a single value...
# (i1<<0 | i1<<3 | i1<<6 | i1<<9)
_axis_convert_lut = (
{0x8C8, 0x4D0, 0x2E0, 0xAE8, 0x701, 0x511, 0x119, 0xB29, 0x682, 0x88A,
0x09A, 0x2A2, 0x80B, 0x413, 0x223, 0xA2B, 0x644, 0x454, 0x05C, 0xA6C,
0x745, 0x94D, 0x15D, 0x365},
{0xAC8, 0x8D0, 0x4E0, 0x2E8, 0x741, 0x951, 0x159, 0x369, 0x702, 0xB0A,
0x11A, 0x522, 0xA0B, 0x813, 0x423, 0x22B, 0x684, 0x894, 0x09C, 0x2AC,
0x645, 0xA4D, 0x05D, 0x465},
{0x4C8, 0x2D0, 0xAE0, 0x8E8, 0x681, 0x291, 0x099, 0x8A9, 0x642, 0x44A,
0x05A, 0xA62, 0x40B, 0x213, 0xA23, 0x82B, 0x744, 0x354, 0x15C, 0x96C,
0x705, 0x50D, 0x11D, 0xB25},
{0x2C8, 0xAD0, 0x8E0, 0x4E8, 0x641, 0xA51, 0x059, 0x469, 0x742, 0x34A,
0x15A, 0x962, 0x20B, 0xA13, 0x823, 0x42B, 0x704, 0xB14, 0x11C, 0x52C,
0x685, 0x28D, 0x09D, 0x8A5},
{0x708, 0xB10, 0x120, 0x528, 0x8C1, 0xAD1, 0x2D9, 0x4E9, 0x942, 0x74A,
0x35A, 0x162, 0x64B, 0xA53, 0x063, 0x46B, 0x804, 0xA14, 0x21C, 0x42C,
0x885, 0x68D, 0x29D, 0x0A5},
{0xB08, 0x110, 0x520, 0x728, 0x941, 0x151, 0x359, 0x769, 0x802, 0xA0A,
0x21A, 0x422, 0xA4B, 0x053, 0x463, 0x66B, 0x884, 0x094, 0x29C, 0x6AC,
0x8C5, 0xACD, 0x2DD, 0x4E5},
{0x508, 0x710, 0xB20, 0x128, 0x881, 0x691, 0x299, 0x0A9, 0x8C2, 0x4CA,
0x2DA, 0xAE2, 0x44B, 0x653, 0xA63, 0x06B, 0x944, 0x754, 0x35C, 0x16C,
0x805, 0x40D, 0x21D, 0xA25},
{0x108, 0x510, 0x720, 0xB28, 0x801, 0x411, 0x219, 0xA29, 0x882, 0x08A,
0x29A, 0x6A2, 0x04B, 0x453, 0x663, 0xA6B, 0x8C4, 0x4D4, 0x2DC, 0xAEC,
0x945, 0x14D, 0x35D, 0x765},
{0x748, 0x350, 0x160, 0x968, 0xAC1, 0x2D1, 0x4D9, 0x8E9, 0xA42, 0x64A,
0x45A, 0x062, 0x68B, 0x293, 0x0A3, 0x8AB, 0xA04, 0x214, 0x41C, 0x82C,
0xB05, 0x70D, 0x51D, 0x125},
{0x948, 0x750, 0x360, 0x168, 0xB01, 0x711, 0x519, 0x129, 0xAC2, 0x8CA,
0x4DA, 0x2E2, 0x88B, 0x693, 0x2A3, 0x0AB, 0xA44, 0x654, 0x45C, 0x06C,
0xA05, 0x80D, 0x41D, 0x225},
{0x348, 0x150, 0x960, 0x768, 0xA41, 0x051, 0x459, 0x669, 0xA02, 0x20A,
0x41A, 0x822, 0x28B, 0x093, 0x8A3, 0x6AB, 0xB04, 0x114, 0x51C, 0x72C,
0xAC5, 0x2CD, 0x4DD, 0x8E5},
{0x148, 0x950, 0x760, 0x368, 0xA01, 0x811, 0x419, 0x229, 0xB02, 0x10A,
0x51A, 0x722, 0x08B, 0x893, 0x6A3, 0x2AB, 0xAC4, 0x8D4, 0x4DC, 0x2EC,
0xA45, 0x04D, 0x45D, 0x665},
{0x688, 0x890, 0x0A0, 0x2A8, 0x4C1, 0x8D1, 0xAD9, 0x2E9, 0x502, 0x70A,
0xB1A, 0x122, 0x74B, 0x953, 0x163, 0x36B, 0x404, 0x814, 0xA1C, 0x22C,
0x445, 0x64D, 0xA5D, 0x065},
{0x888, 0x090, 0x2A0, 0x6A8, 0x501, 0x111, 0xB19, 0x729, 0x402, 0x80A,
0xA1A, 0x222, 0x94B, 0x153, 0x363, 0x76B, 0x444, 0x054, 0xA5C, 0x66C,
0x4C5, 0x8CD, 0xADD, 0x2E5},
{0x288, 0x690, 0x8A0, 0x0A8, 0x441, 0x651, 0xA59, 0x069, 0x4C2, 0x2CA,
0xADA, 0x8E2, 0x34B, 0x753, 0x963, 0x16B, 0x504, 0x714, 0xB1C, 0x12C,
0x405, 0x20D, 0xA1D, 0x825},
{0x088, 0x290, 0x6A0, 0x8A8, 0x401, 0x211, 0xA19, 0x829, 0x442, 0x04A,
0xA5A, 0x662, 0x14B, 0x353, 0x763, 0x96B, 0x4C4, 0x2D4, 0xADC, 0x8EC,
0x505, 0x10D, 0xB1D, 0x725},
{0x648, 0x450, 0x060, 0xA68, 0x2C1, 0x4D1, 0x8D9, 0xAE9, 0x282, 0x68A,
0x89A, 0x0A2, 0x70B, 0x513, 0x123, 0xB2B, 0x204, 0x414, 0x81C, 0xA2C,
0x345, 0x74D, 0x95D, 0x165},
{0xA48, 0x650, 0x460, 0x068, 0x341, 0x751, 0x959, 0x169, 0x2C2, 0xACA,
0x8DA, 0x4E2, 0xB0B, 0x713, 0x523, 0x12B, 0x284, 0x694, 0x89C, 0x0AC,
0x205, 0xA0D, 0x81D, 0x425},
{0x448, 0x050, 0xA60, 0x668, 0x281, 0x091, 0x899, 0x6A9, 0x202, 0x40A,
0x81A, 0xA22, 0x50B, 0x113, 0xB23, 0x72B, 0x344, 0x154, 0x95C, 0x76C,
0x2C5, 0x4CD, 0x8DD, 0xAE5},
{0x048, 0xA50, 0x660, 0x468, 0x201, 0xA11, 0x819, 0x429, 0x342, 0x14A,
0x95A, 0x762, 0x10B, 0xB13, 0x723, 0x52B, 0x2C4, 0xAD4, 0x8DC, 0x4EC,
0x285, 0x08D, 0x89D, 0x6A5},
{0x808, 0xA10, 0x220, 0x428, 0x101, 0xB11, 0x719, 0x529, 0x142, 0x94A,
0x75A, 0x362, 0x8CB, 0xAD3, 0x2E3, 0x4EB, 0x044, 0xA54, 0x65C, 0x46C,
0x085, 0x88D, 0x69D, 0x2A5},
{0xA08, 0x210, 0x420, 0x828, 0x141, 0x351, 0x759, 0x969, 0x042, 0xA4A,
0x65A, 0x462, 0xACB, 0x2D3, 0x4E3, 0x8EB, 0x084, 0x294, 0x69C, 0x8AC,
0x105, 0xB0D, 0x71D, 0x525},
{0x408, 0x810, 0xA20, 0x228, 0x081, 0x891, 0x699, 0x2A9, 0x102, 0x50A,
0x71A, 0xB22, 0x4CB, 0x8D3, 0xAE3, 0x2EB, 0x144, 0x954, 0x75C, 0x36C,
0x045, 0x44D, 0x65D, 0xA65},
)
_axis_convert_num = {'X': 0, 'Y': 1, 'Z': 2, '-X': 3, '-Y': 4, '-Z': 5}
def axis_conversion(from_forward='Y', from_up='Z', to_forward='Y', to_up='Z'):
"""
Each argument us an axis in ['X', 'Y', 'Z', '-X', '-Y', '-Z']
where the first 2 are a source and the second 2 are the target.
"""
from mathutils import Matrix
from functools import reduce
if from_forward == to_forward and from_up == to_up:
return Matrix().to_3x3()
if from_forward[-1] == from_up[-1] or to_forward[-1] == to_up[-1]:
raise Exception("Invalid axis arguments passed, "
"can't use up/forward on the same axis")
value = reduce(int.__or__, (_axis_convert_num[a] << (i * 3)
for i, a in enumerate((from_forward,
from_up,
to_forward,
to_up,
))))
for i, axis_lut in enumerate(_axis_convert_lut):
if value in axis_lut:
return Matrix(_axis_convert_matrix[i])
assert(0)
def axis_conversion_ensure(operator, forward_attr, up_attr):
"""
Function to ensure an operator has valid axis conversion settings, intended
to be used from :class:`bpy.types.Operator.check`.
:arg operator: the operator to access axis attributes from.
:type operator: :class:`bpy.types.Operator`
:arg forward_attr: attribute storing the forward axis
:type forward_attr: string
:arg up_attr: attribute storing the up axis
:type up_attr: string
:return: True if the value was modified.
:rtype: boolean
"""
def validate(axis_forward, axis_up):
if axis_forward[-1] == axis_up[-1]:
axis_up = axis_up[0:-1] + 'XYZ'[('XYZ'.index(axis_up[-1]) + 1) % 3]
return axis_forward, axis_up
axis = getattr(operator, forward_attr), getattr(operator, up_attr)
axis_new = validate(*axis)
if axis != axis_new:
setattr(operator, forward_attr, axis_new[0])
setattr(operator, up_attr, axis_new[1])
return True
else:
return False
# return a tuple (free, object list), free is True if memory should be freed
# later with free_derived_objects()
def create_derived_objects(scene, ob):
if ob.parent and ob.parent.dupli_type in {'VERTS', 'FACES'}:
return False, None
if ob.dupli_type != 'NONE':
ob.dupli_list_create(scene)
return True, [(dob.object, dob.matrix) for dob in ob.dupli_list]
else:
return False, [(ob, ob.matrix_world)]
def free_derived_objects(ob):
ob.dupli_list_clear()
def unpack_list(list_of_tuples):
flat_list = []
flat_list_extend = flat_list.extend # a tiny bit faster
for t in list_of_tuples:
flat_list_extend(t)
return flat_list
# same as above except that it adds 0 for triangle faces
def unpack_face_list(list_of_tuples):
# allocate the entire list
flat_ls = [0] * (len(list_of_tuples) * 4)
i = 0
for t in list_of_tuples:
if len(t) == 3:
if t[2] == 0:
t = t[1], t[2], t[0]
else: # assume quad
if t[3] == 0 or t[2] == 0:
t = t[2], t[3], t[0], t[1]
flat_ls[i:i + len(t)] = t
i += 4
return flat_ls
path_reference_mode = EnumProperty(
name="Path Mode",
description="Method used to reference paths",
items=(('AUTO', "Auto", "Use Relative paths with subdirectories only"),
('ABSOLUTE', "Absolute", "Always write absolute paths"),
('RELATIVE', "Relative", "Always write relative paths "
"(where possible)"),
('MATCH', "Match", "Match Absolute/Relative "
"setting with input path"),
('STRIP', "Strip Path", "Filename only"),
('COPY', "Copy", "Copy the file to the destination path "
"(or subdirectory)"),
),
default='AUTO',
)
def path_reference(filepath,
base_src,
base_dst,
mode='AUTO',
copy_subdir="",
copy_set=None,
library=None,
):
"""
Return a filepath relative to a destination directory, for use with
exporters.
:arg filepath: the file path to return,
supporting blenders relative '//' prefix.
:type filepath: string
:arg base_src: the directory the *filepath* is relative too
(normally the blend file).
:type base_src: string
:arg base_dst: the directory the *filepath* will be referenced from
(normally the export path).
:type base_dst: string
:arg mode: the method used get the path in
['AUTO', 'ABSOLUTE', 'RELATIVE', 'MATCH', 'STRIP', 'COPY']
:type mode: string
:arg copy_subdir: the subdirectory of *base_dst* to use when mode='COPY'.
:type copy_subdir: string
:arg copy_set: collect from/to pairs when mode='COPY',
pass to *path_reference_copy* when exporting is done.
:type copy_set: set
:arg library: The library this path is relative to.
:type library: :class:`bpy.types.Library` or None
:return: the new filepath.
:rtype: string
"""
import os
is_relative = filepath.startswith("//")
filepath_abs = bpy.path.abspath(filepath, base_src, library)
filepath_abs = os.path.normpath(filepath_abs)
if mode in {'ABSOLUTE', 'RELATIVE', 'STRIP'}:
pass
elif mode == 'MATCH':
mode = 'RELATIVE' if is_relative else 'ABSOLUTE'
elif mode == 'AUTO':
mode = ('RELATIVE'
if bpy.path.is_subdir(filepath_abs, base_dst)
else 'ABSOLUTE')
elif mode == 'COPY':
subdir_abs = os.path.normpath(base_dst)
if copy_subdir:
subdir_abs = os.path.join(subdir_abs, copy_subdir)
filepath_cpy = os.path.join(subdir_abs, os.path.basename(filepath))
copy_set.add((filepath_abs, filepath_cpy))
filepath_abs = filepath_cpy
mode = 'RELATIVE'
else:
raise Exception("invalid mode given %r" % mode)
if mode == 'ABSOLUTE':
return filepath_abs
elif mode == 'RELATIVE':
# can't always find the relative path
# (between drive letters on windows)
try:
return os.path.relpath(filepath_abs, base_dst)
except ValueError:
return filepath_abs
elif mode == 'STRIP':
return os.path.basename(filepath_abs)
def path_reference_copy(copy_set, report=print):
"""
Execute copying files of path_reference
:arg copy_set: set of (from, to) pairs to copy.
:type copy_set: set
:arg report: function used for reporting warnings, takes a string argument.
:type report: function
"""
if not copy_set:
return
import os
import shutil
for file_src, file_dst in copy_set:
if not os.path.exists(file_src):
report("missing %r, not copying" % file_src)
elif os.path.exists(file_dst) and os.path.samefile(file_src, file_dst):
pass
else:
dir_to = os.path.dirname(file_dst)
try:
os.makedirs(dir_to, exist_ok=True)
except:
import traceback
traceback.print_exc()
try:
shutil.copy(file_src, file_dst)
except:
import traceback
traceback.print_exc()
def unique_name(key, name, name_dict, name_max=-1, clean_func=None, sep="."):
"""
Helper function for storing unique names which may have special characters
stripped and restricted to a maximum length.
:arg key: unique item this name belongs to, name_dict[key] will be reused
when available.
This can be the object, mesh, material, etc instance its self.
:type key: any hashable object associated with the *name*.
:arg name: The name used to create a unique value in *name_dict*.
:type name: string
:arg name_dict: This is used to cache namespace to ensure no collisions
occur, this should be an empty dict initially and only modified by this
function.
:type name_dict: dict
:arg clean_func: Function to call on *name* before creating a unique value.
:type clean_func: function
:arg sep: Separator to use when between the name and a number when a
duplicate name is found.
:type sep: string
"""
name_new = name_dict.get(key)
if name_new is None:
count = 1
name_dict_values = name_dict.values()
name_new = name_new_orig = (name if clean_func is None
else clean_func(name))
if name_max == -1:
while name_new in name_dict_values:
name_new = "%s%s%03d" % (name_new_orig, sep, count)
count += 1
else:
name_new = name_new[:name_max]
while name_new in name_dict_values:
count_str = "%03d" % count
name_new = "%.*s%s%s" % (name_max - (len(count_str) + 1),
name_new_orig,
sep,
count_str,
)
count += 1
name_dict[key] = name_new
return name_new
| pawkoz/dyplom | blender/release/scripts/modules/bpy_extras/io_utils.py | Python | gpl-2.0 | 20,833 |
from flask import g, render_template, request, session, redirect, url_for, send_file
from scrimmage import app, db
from scrimmage.decorators import sponsor_or_admin_required
from scrimmage.models import Team
from scrimmage.statistics import generate_team_stats
@app.route('/sponsor/')
@sponsor_or_admin_required
def sponsor_index():
teams = Team.query.filter(Team.is_disabled == False).all()
return render_template('sponsor/index.html', teams=teams)
@app.route('/sponsor/team/<int:team_id>')
@sponsor_or_admin_required
def sponsor_team(team_id):
team = Team.query.get_or_404(team_id)
elo_over_time, histogram_data = generate_team_stats(team)
return render_template('sponsor/team_info.html', team=team, elo_over_time=elo_over_time, histogram_data=histogram_data)
| mitpokerbots/scrimmage | scrimmage/sponsor/index.py | Python | mit | 778 |
# Copyright 2014 Hewlett-Packard Development Company, L.P.
#
# Author: Endre Karlson <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import random
from rally import consts
from rally.plugins.openstack import scenario
from rally.plugins.openstack.scenarios.designate import utils
from rally.task import atomic
from rally.task import validation
class DesignateBasic(utils.DesignateScenario):
"""Basic benchmark scenarios for Designate."""
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_list_domains(self):
"""Create a domain and list all domains.
Measure the "designate domain-list" command performance.
If you have only 1 user in your context, you will
add 1 domain on every iteration. So you will have more
and more domain and will be able to measure the
performance of the "designate domain-list" command depending on
the number of domains owned by users.
"""
self._create_domain()
self._list_domains()
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def list_domains(self):
"""List Designate domains.
This simple scenario tests the designate domain-list command by listing
all the domains.
Suppose if we have 2 users in context and each has 2 domains
uploaded for them we will be able to test the performance of
designate domain-list command in this case.
"""
self._list_domains()
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_delete_domain(self):
"""Create and then delete a domain.
Measure the performance of creating and deleting domains
with different level of load.
"""
domain = self._create_domain()
self._delete_domain(domain["id"])
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_update_domain(self):
"""Create and then update a domain.
Measure the performance of creating and updating domains
with different level of load.
"""
domain = self._create_domain()
self._update_domain(domain)
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_delete_records(self, records_per_domain=5):
"""Create and then delete records.
Measure the performance of creating and deleting records
with different level of load.
:param records_per_domain: Records to create pr domain.
"""
domain = self._create_domain()
records = []
key = "designate.create_%s_records" % records_per_domain
with atomic.ActionTimer(self, key):
for i in range(records_per_domain):
record = self._create_record(domain, atomic_action=False)
records.append(record)
key = "designate.delete_%s_records" % records_per_domain
with atomic.ActionTimer(self, key):
for record in records:
self._delete_record(
domain["id"], record["id"], atomic_action=False)
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def list_records(self, domain_id):
"""List Designate records.
This simple scenario tests the designate record-list command by listing
all the records in a domain.
Suppose if we have 2 users in context and each has 2 domains
uploaded for them we will be able to test the performance of
designate record-list command in this case.
:param domain_id: Domain ID
"""
self._list_records(domain_id)
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_list_records(self, records_per_domain=5):
"""Create and then list records.
If you have only 1 user in your context, you will
add 1 record on every iteration. So you will have more
and more records and will be able to measure the
performance of the "designate record-list" command depending on
the number of domains/records owned by users.
:param records_per_domain: Records to create pr domain.
"""
domain = self._create_domain()
key = "designate.create_%s_records" % records_per_domain
with atomic.ActionTimer(self, key):
for i in range(records_per_domain):
self._create_record(domain, atomic_action=False)
self._list_records(domain["id"])
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(admin=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_list_servers(self):
"""Create a Designate server and list all servers.
If you have only 1 user in your context, you will
add 1 server on every iteration. So you will have more
and more server and will be able to measure the
performance of the "designate server-list" command depending on
the number of servers owned by users.
"""
self._create_server()
self._list_servers()
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(admin=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_delete_server(self):
"""Create and then delete a server.
Measure the performance of creating and deleting servers
with different level of load.
"""
server = self._create_server()
self._delete_server(server["id"])
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(admin=True)
@scenario.configure(context={"cleanup": ["designate"]})
def list_servers(self):
"""List Designate servers.
This simple scenario tests the designate server-list command by listing
all the servers.
"""
self._list_servers()
# NOTE: API V2
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_list_zones(self):
"""Create a zone and list all zones.
Measure the "openstack zone list" command performance.
If you have only 1 user in your context, you will
add 1 zone on every iteration. So you will have more
and more zone and will be able to measure the
performance of the "openstack zone list" command depending on
the number of zones owned by users.
"""
self._create_zone()
self._list_zones()
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def list_zones(self):
"""List Designate zones.
This simple scenario tests the openstack zone list command by listing
all the zones.
"""
self._list_zones()
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_delete_zone(self):
"""Create and then delete a zone.
Measure the performance of creating and deleting zones
with different level of load.
"""
zone = self._create_zone()
self._delete_zone(zone["id"])
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@scenario.configure(context={"cleanup": ["designate"]})
def list_recordsets(self, zone_id):
"""List Designate recordsets.
This simple scenario tests the openstack recordset list command by
listing all the recordsets in a zone.
:param zone_id: Zone ID
"""
self._list_recordsets(zone_id)
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@validation.required_contexts("zones")
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_delete_recordsets(self, recordsets_per_zone=5):
"""Create and then delete recordsets.
Measure the performance of creating and deleting recordsets
with different level of load.
:param recordsets_per_zone: recordsets to create pr zone.
"""
zone = random.choice(self.context["tenant"]["zones"])
recordsets = []
key = "designate.create_%s_recordsets" % recordsets_per_zone
with atomic.ActionTimer(self, key):
for i in range(recordsets_per_zone):
recordset = self._create_recordset(zone, atomic_action=False)
recordsets.append(recordset)
key = "designate.delete_%s_recordsets" % recordsets_per_zone
with atomic.ActionTimer(self, key):
for recordset in recordsets:
self._delete_recordset(
zone["id"], recordset["id"], atomic_action=False)
@validation.required_services(consts.Service.DESIGNATE)
@validation.required_openstack(users=True)
@validation.required_contexts("zones")
@scenario.configure(context={"cleanup": ["designate"]})
def create_and_list_recordsets(self, recordsets_per_zone=5):
"""Create and then list recordsets.
If you have only 1 user in your context, you will
add 1 recordset on every iteration. So you will have more
and more recordsets and will be able to measure the
performance of the "openstack recordset list" command depending on
the number of zones/recordsets owned by users.
:param recordsets_per_zone: recordsets to create pr zone.
"""
zone = random.choice(self.context["tenant"]["zones"])
key = "designate.create_%s_recordsets" % recordsets_per_zone
with atomic.ActionTimer(self, key):
for i in range(recordsets_per_zone):
self._create_recordset(zone, atomic_action=False)
self._list_recordsets(zone["id"])
| amit0701/rally | rally/plugins/openstack/scenarios/designate/basic.py | Python | apache-2.0 | 11,351 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
# -*- coding: utf-8 -*-
'''
desimodel.inputs.fiberpos
=========================
Utilities for updating positioner to fiber mapping.
'''
import os
import shutil
import numpy as np
from astropy.table import Table, vstack
from . import docdb
from ..io import datadir
def update(testdir=None, seed=2):
'''
Update positioner to fiber number mapping from DocDB
Options:
testdir: if not None, write files here instead of
$DESIMODEL/data/footprint/fiberpos*
seed:
integer random number seed for randomization within a cartridge
Writes testdir/fiberpos* or $DESIMODEL/data/focalplane/fiberpos*
'''
from desiutil.log import get_logger
log = get_logger()
#- Download input files from DocDB
cassette_file = docdb.download(2721, 2, 'cassette_order.txt')
xls_fp_layout = docdb.download(530, 14, 'DESI-0530-v14 (Focal Plane Layout).xlsx')
platescale_file = docdb.download(329, 15, 'Echo22Platescale.txt')
#- Pick filenames in output directory
if testdir is None:
outdir = os.path.join(datadir(), 'focalplane')
else:
outdir = testdir
if not os.path.isdir(outdir):
raise ValueError("Missing directory {}".format(testdir))
#- copy platescale file
outpsfile = os.path.join(outdir, 'platescale.txt')
shutil.copy(platescale_file, outpsfile)
log.info('Wrote {}'.format(outpsfile))
#- Random but reproducible
np.random.seed(seed)
#- DESI-0530 file name (fn) and sheet name (sn) shortcuts
fn = xls_fp_layout
sn = 'PositionerAndFiducialLocations'
#- Sanity check that columns are still in the same place
rowmin, rowmax = 49, 591
headers = docdb.xls_read_row(fn, sn, rowmin-1, 'B', 'S')
assert headers[0] == 'device_location_id'
assert headers[1] == 'device_type'
assert headers[2] == 'X'
assert headers[3] == 'Y'
assert headers[4] == 'Z'
assert headers[8] == 'cassetteID'
assert headers[15] == 'Q'
assert headers[17] == 'S'
#- Read Excel table with device locations
posloc = Table()
posloc['DEVICE'] = docdb.xls_read_col(fn, sn, 'B', rowmin, rowmax, dtype=int)
posloc['DEVICE_TYPE'] = docdb.xls_read_col(fn, sn, 'C', rowmin, rowmax, dtype=str)
posloc['X'] = docdb.xls_read_col(fn, sn, 'D', rowmin, rowmax, dtype=float)
posloc['Y'] = docdb.xls_read_col(fn, sn, 'E', rowmin, rowmax, dtype=float)
posloc['Z'] = docdb.xls_read_col(fn, sn, 'F', rowmin, rowmax, dtype=float)
posloc['Q'] = docdb.xls_read_col(fn, sn, 'Q', rowmin, rowmax, dtype=float)
posloc['S'] = docdb.xls_read_col(fn, sn, 'S', rowmin, rowmax, dtype=float)
#- Cassette N/A -> -1, and parse string -> float -> int
c = docdb.xls_read_col(fn, sn, 'J', rowmin, rowmax)
not_spectro_fiber = (c == 'N/A')
c[not_spectro_fiber] = '-1'
posloc['CASSETTE'] = np.array(c, dtype=float).astype(int)
#- Sanity check on values
ndevice = len(posloc)
assert ndevice == 543 #- 543 holes have been drilled
assert len(np.unique(posloc['DEVICE'])) == len(posloc['DEVICE'])
assert set(posloc['DEVICE_TYPE']) == set(['POS', 'FIF', 'GIF', 'NON', 'OPT', 'ETC'])
assert 0 < np.min(posloc['X']) and np.max(posloc['X']) < 410
assert 0 <= np.min(posloc['Q']) and np.max(posloc['Q']) < 36.0
assert 0 <= np.min(posloc['S']) and np.max(posloc['S']) < 412.3
assert np.all(posloc['S']**2 > posloc['X']**2 + posloc['Y']**2 + posloc['Z']**2)
assert np.min(posloc['CASSETTE']) == -1
assert np.max(posloc['CASSETTE']) == 11
assert set(posloc['DEVICE_TYPE'][posloc['CASSETTE']==11]) == set(['ETC', 'OPT'])
assert set(posloc['DEVICE_TYPE'][posloc['CASSETTE']==-1]) == set(['FIF', 'GIF', 'NON'])
assert 0 not in posloc['CASSETTE']
#- Read mapping of cassettes on focal plane to fibers on slithead
colnames = ['fibermin', 'fibermax', 'sp0', 'sp1', 'sp2', 'sp3', 'sp4', 'sp5', 'sp6', 'sp7', 'sp8', 'sp9']
cassettes = Table.read(cassette_file, format='ascii', names=colnames)
#- Randomize fibers within a cassette
petals = list()
for p in range(10):
fiberpos = posloc.copy(copy_data=True)
fiberpos['FIBER'] = -1
fiberpos['PETAL'] = p
fiberpos['SLIT'] = p
fiberpos['SPECTRO'] = p
iipos = (fiberpos['DEVICE_TYPE'] == 'POS')
### fiberpos['device'] += p*len(fiberpos)
for c in range(1,11):
ii = (cassettes['sp'+str(p)] == c)
assert np.count_nonzero(ii) == 1
fibermin = p*500 + cassettes['fibermin'][ii][0]
fibermax = p*500 + cassettes['fibermax'][ii][0]
jj = iipos & (fiberpos['CASSETTE'] == c)
assert np.count_nonzero(jj) == 50
fiber = list(range(fibermin, fibermax+1))
np.random.shuffle(fiber)
fiberpos['FIBER'][jj] = fiber
#- Additional columns
fiberpos['SLITBLOCK'] = (fiberpos['FIBER'] % 500) // 25
fiberpos['BLOCKFIBER'] = (fiberpos['FIBER'] % 500) % 25
fiberpos['LOCATION'] = p*1000 + fiberpos['DEVICE']
#- Petal 0 is at the "bottom"; See DESI-0530
phi = np.radians((7*36 + 36*p)%360)
x = np.cos(phi)*fiberpos['X'] - np.sin(phi)*fiberpos['Y']
y = np.sin(phi)*fiberpos['X'] + np.cos(phi)*fiberpos['Y']
fiberpos['X'] = x
fiberpos['Y'] = y
petals.append(fiberpos)
fiberpos = vstack(petals)
fiberpos.sort('FIBER')
POS = (fiberpos['DEVICE_TYPE'] == 'POS')
#- devices that don't go to spectrographs don't have slitblock, blockfiber
fiberpos['SLITBLOCK'][~POS] = -1
fiberpos['BLOCKFIBER'][~POS] = -1
#- More sanity checks before writing output
fp = fiberpos[POS]
assert len(fp) == 5000
assert len(np.unique(fp['FIBER'])) == 5000
assert min(fp['FIBER']) == 0
assert max(fp['FIBER']) == 4999
assert len(set(fp['SPECTRO'])) == 10
assert min(fp['SPECTRO']) == 0
assert max(fp['SPECTRO']) == 9
assert len(np.unique(fiberpos['DEVICE'])) == ndevice
assert len(np.unique(fiberpos['LOCATION'])) == len(fiberpos)
#- Drop some columns we don't need
fiberpos.remove_column('CASSETTE')
#- Update i8 -> i4 for integer columns
for colname in ['FIBER', 'DEVICE', 'SPECTRO', 'PETAL', 'SLIT']:
fiberpos.replace_column(colname, fiberpos[colname].astype('i4'))
#- Reorder columns
assert set(fiberpos.colnames) == set('DEVICE DEVICE_TYPE X Y Z Q S FIBER PETAL SLIT SPECTRO SLITBLOCK BLOCKFIBER LOCATION'.split())
colnames = 'PETAL DEVICE DEVICE_TYPE LOCATION FIBER X Y Z Q S SPECTRO SLIT SLITBLOCK BLOCKFIBER'.split()
fiberpos = fiberpos[colnames]
assert fiberpos.colnames == colnames
#- Set units and descriptions; see DESI-2724
fiberpos['X'].unit = 'mm'
fiberpos['Y'].unit = 'mm'
fiberpos['Z'].unit = 'mm'
fiberpos['Q'].unit = 'deg'
fiberpos['S'].unit = 'mm'
fiberpos['X'].description = 'focal surface location [mm]'
fiberpos['Y'].description = 'focal surface location [mm]'
fiberpos['Z'].description = 'focal surface location [mm]'
fiberpos['Q'].description = 'azimuthal angle on focal surface [deg]'
fiberpos['S'].description = 'radial distance along focal surface [mm]'
fiberpos['FIBER'].description = 'fiber number [0-4999]'
fiberpos['DEVICE'].description = 'focal plane device_loc number [0-542]'
fiberpos['SPECTRO'].description = 'spectrograph number [0-9]'
fiberpos['PETAL'].description = 'focal plane petal_loc number [0-9]'
fiberpos['SLIT'].description = 'spectrograph slit number [0-9]'
fiberpos['SLITBLOCK'].description = 'id of the slitblock on the slit [0-19]'
fiberpos['BLOCKFIBER'].description = 'id of the fiber on the slitblock [0-24]'
fiberpos['LOCATION'].description = 'global location id across entire focal plane [0-9543]; has gaps in sequence'
fiberpos.meta['comments'] = [
"Coordinates at zenith: +x = East = +RA; +y = South = -dec",
"PETAL and DEVICE refer to locations, not hardware serial numbers",
"Differences from DESI-2724 naming:",
' - Drops "_ID" from column names',
' - Drops "_LOC" from "DEVICE_LOC" and "PETAL_LOC"',
" - SLITBLOCK as int [0-19] instead of string [B0-B19]",
" - BLOCKFIBER as int [0-24] instead of string [F0-F24]",
"Convenience columns:",
" - FIBER = PETAL*500 + SLITBLOCK*25 + BLOCKFIBER",
" - LOCATION = PETAL*1000 + DEVICE",
]
ecsvout = os.path.join(outdir, 'fiberpos.ecsv')
textout = os.path.join(outdir, 'fiberpos.txt')
fitsout = os.path.join(outdir, 'fiberpos.fits')
pngout = os.path.join(outdir, 'fiberpos.png')
#- Write old text format with just fiber, device, spectro, x, y, z
write_text_fiberpos(textout, fiberpos[POS])
log.info('Wrote {}'.format(textout))
#- Write all columns but only for positioners with fibers
fiberpos[POS].write(ecsvout, format='ascii.ecsv')
log.info('Wrote {}'.format(ecsvout))
fiberpos[POS].write(fitsout, format='fits', overwrite=True)
log.info('Wrote {}'.format(fitsout))
#- Write all columns and all rows, including
#- fiducials (device_type='FIF') and sky monitor (device_type='ETC')
fiberpos.sort('LOCATION')
fitsallout = fitsout.replace('.fits', '-all.fits')
ecsvallout = textout.replace('.txt', '-all.ecsv')
fiberpos.write(fitsallout, format='fits', overwrite=True)
fiberpos.write(ecsvallout, format='ascii.ecsv')
log.info('Wrote {}'.format(fitsallout))
log.info('Wrote {}'.format(ecsvallout))
#- Visualize mapping
POS = (fiberpos['DEVICE_TYPE'] == 'POS')
FIF = (fiberpos['DEVICE_TYPE'] == 'FIF')
ETC = (fiberpos['DEVICE_TYPE'] == 'ETC')
import pylab as P
P.jet() #- With apologies to viridis
P.figure(figsize=(7,7))
P.scatter(fiberpos['X'][POS], fiberpos['Y'][POS], c=fiberpos['FIBER'][POS]%500, edgecolor='none', s=20)
# P.scatter(fiberpos['x'][FIF], fiberpos['y'][FIF], s=5, color='k')
# P.plot(fiberpos['x'][ETC], fiberpos['y'][ETC], 'kx', ms=3)
P.grid(alpha=0.2, color='k')
P.xlim(-420,420)
P.ylim(-420,420)
P.xlabel('x [mm]')
P.ylabel('y [mm]')
P.title('Focal plane color coded by fiber location on slithead')
P.savefig(pngout, dpi=80)
log.info('Wrote {}'.format(pngout))
def write_text_fiberpos(filename, fiberpos):
'''
Writes a fiberpos table to filename, maintaining backwards compatibility
with the original fiberpos.txt format
Args:
filename: output file name string
fiberpos: astropy Table of fiber positions
'''
#- Write the old text file format for backwards compatibility
fxlines = [
"#- THIS FILE IS PROVIDED FOR BACKWARDS COMPATIBILITY",
"#- Please use fiberpos-all.[ecsv,fits] for additional columns",
'#- and non-spectrofiber device hole locations.',
'#-'
"#- Fiber to focal plane device hole mapping; x,y,z in mm on focal plane",
"#- See doc/fiberpos.rst and DESI-0530 for more details.",
"#- Coordinates at zenith: +x = East = +RA; +y = South = -dec",
"",
"#- fiber=at spectrograph; fpdevice=numbering on focal plane",
"",
'#- fiber location spectro x y z']
for row in fiberpos:
fxlines.append("{:4d} {:4d} {:2d} {:12.6f} {:12.6f} {:12.6f}".format(
row['FIBER'], row['LOCATION'], row['SPECTRO'],
row['X'], row['Y'], row['Z'],
))
with open(filename, 'w') as fx:
fx.write('\n'.join(fxlines)+'\n')
| desihub/desimodel | py/desimodel/inputs/fiberpos.py | Python | bsd-3-clause | 11,623 |
__author__ = 'Nataly'
from model.project import Project
import string
import random
def random_string(prefix, maxlen):
symbols = string.ascii_letters
return prefix + "".join([random.choice(symbols) for i in range(random.randrange(maxlen))])
def test_add_project(app):
project = Project(random_string("name_", 10), random_string("description_", 10))
old_list = app.soap.get_project_list()
if project in old_list:
app.project.delete_project(project)
old_list = app.soap.get_project_list()
app.project.add_project(project)
new_list = app.soap.get_project_list()
old_list.append(project)
assert sorted(old_list, key=Project.id_or_max) == sorted(new_list, key=Project.id_or_max)
| simonenkong/python_training_mantis | test/test_add_project.py | Python | gpl-2.0 | 726 |
# Import the library
try:
# This is the statement you normally use.
graph = ximport("graph")
except ImportError:
# But since these examples are "inside" the library
# we may need to try something different when
# the library is not located in /Application Support
graph = ximport("__init__")
reload(graph)
size(500, 500)
g = graph.create()
# Create some relations.
g.add_edge("roof" , "house")
g.add_edge("garden" , "house")
g.add_edge("room" , "house")
g.add_edge("kitchen" , "room")
g.add_edge("bedroom" , "room")
g.add_edge("bathroom" , "room")
g.add_edge("living room" , "room")
g.add_edge("sofa" , "living room")
g.add_edge("table" , "living room")
# Calculate a good layout.
g.solve()
# Apply default rules for node colors and size,
# for example, important nodes become blue.
g.styles.apply()
# Draw the graph, indicating the direction of each relation
# and the two nodes that get the most traffic.
g.draw(
directed=True,
traffic=1
)
# You'll see that "house" is the key node,
# and that "room" gets the most traffic.
| zaqwes8811/coordinator-tasks | extern/GPL_libs/graph/graph_example1.py | Python | apache-2.0 | 1,154 |
import dateutil.parser
import yaml
import os
import shutil
import datetime
from setting import SettingHandler
class BlogpostHandler:
inst = None
def __init__(self):
BlogpostHandler.inst = self
self.pullList()
def pullList(self):
with open("../database/blog.yaml", encoding="utf-8") as f:
self.blogpost_list = yaml.load(f.read())
def pushList(self):
with open("../database/blog.yaml", "w") as f:
yaml.dump(self.blogpost_list, f,
default_flow_style=False, allow_unicode=True)
def pushPage(self):
import markdown
import tornado.template
from feedgen.feed import FeedGenerator
from datetime import timezone
fg = FeedGenerator()
fg.id('https://www.mukyu.win/')
fg.link(href='https://www.mukyu.win/')
fg.title('帕秋莉的奇妙歷險')
fg.subtitle('Imaginary City')
fg.author(name='魔法偽娘帕秋莉', email='[email protected]')
fg.rss_str(pretty=True)
fg.rss_file('../frontend/rss.xml')
local = timezone(datetime.timedelta(hours=8))
paging = int(SettingHandler.inst.get("paging"))
prc_list = sorted(self.blogpost_list,
key=lambda x: x["datetime"], reverse=True)
length = len(self.blogpost_list)
page_count = (length+paging-1)//paging
# Count of page
with open("../frontend/page/page.html", "w", encoding="utf-8") as f:
f.write(str(page_count))
for index in range(page_count):
page = open("../frontend/page/" + str(index+1) +
".html", "w", encoding="utf-8")
# data to put in template
fullblogpost_list = []
for blogpost in prc_list[index*paging: min(length, (index+1)*paging)]:
feed_item = fg.add_entry()
bp = blogpost.copy()
dt = blogpost["datetime"]
# folder path of file
bp['folder'] = "%04d/%02d/%02d/%s" % (
dt.year, dt.month, dt.day, blogpost["filename"])
feed_item.id('https://www.mukyu.win/#/blog/' + bp['folder'])
feed_item.link(
href='https://www.mukyu.win/#/blog/' + bp['folder'])
feed_item.title(bp["title"])
feed_item.updated(dt.replace(tzinfo=local))
feed_item.published(dt.replace(tzinfo=local))
with open("../frontend/blog/%s/README.md" % (bp['folder'],), encoding="utf-8") as md:
preview_content = ""
get_line = 0
new_line = False
for line in md:
if line[0] == '#':
continue
if line[0] == '!':
continue # Disable Image
if line == '\n':
if not new_line and preview_content:
preview_content += "\n"
new_line = True
continue
new_line = False
get_line += 1
if get_line > 3:
break
preview_content += line
bp["content"] = markdown.markdown(preview_content)
feed_item.description(bp["content"])
fullblogpost_list.append(bp)
loader = tornado.template.Loader("./front_templ")
cont = loader.load("page.html").generate(
blogpost_list=fullblogpost_list
)
page.write(cont.decode('utf-8'))
page.close()
def list(self):
return self.blogpost_list
def _getBlogpost(self, filepath):
for blogpost in self.blogpost_list:
dt = blogpost["datetime"]
part = filepath.split("/")
if blogpost["filename"] == part[3] and dt.year == int(part[0]) and dt.month == int(part[1]) and dt.day == int(part[2]):
return blogpost
return None
def get(self, filepath):
blogpost = self._getBlogpost(filepath).copy()
with open("../frontend/blog/" + filepath + "/README.md") as f:
blogpost["md"] = f.read()
return blogpost
def updatePostMD(self, filepath, md):
with open("../frontend/blog/" + filepath + "/README.md", "w") as f:
f.write(md)
def updatePostTitle(self, filepath, title):
blogpost = self._getBlogpost(filepath)
blogpost["title"] = title
self.pushList()
def createPost(self, filepath):
os.makedirs("../frontend/blog/" + filepath + "/", exist_ok=True)
with open("../frontend/blog/" + filepath + "/README.md", "w") as f:
f.write("# NoTitle")
part = filepath.split("/")
blogpost = {
"filename": part[3],
"title": part[3],
"datetime": datetime.datetime(int(part[0]), int(part[1]), int(part[2]), 8, 0)
}
self.blogpost_list.append(blogpost)
self.pushList()
return None, blogpost
def deletePost(self, filepath):
blogpost = self._getBlogpost(filepath)
self.blogpost_list.remove(blogpost)
shutil.rmtree("../frontend/blog/" + filepath)
self.pushList()
| mudream4869/imaginary-city | admin/blogpost.py | Python | apache-2.0 | 5,467 |
# -*- coding: utf-8 -*-
''' Utilities for using Delphes.
This module require for libDelphes.so to be in LD_LIBRARY_PATH
Examples:
A code to load the first 1000 of the photons in the specified root file::
from heptools.collider import delphes
@delphes.activate
def photon(i, photons):
if i > 1000:
delphes.deactivate(photon)
for p1, p2 in itertools.combinations(photons, 2):
do_something()
delphes.load_root_file(SOME_ROOT_FILE)
'''
from ROOT import gSystem
gSystem.Load('libDelphes')
from ROOT import TChain, ExRootTreeReader
import inspect
DELPHES_BRANCH = 'Delphes'
delphes_name = ['Event', 'Particle', 'Track', 'Tower', 'EFlowTrack', 'EFlowPhoton',
'EFlowNeutralHadron', 'GenJet', 'Jet', 'Electron', 'Photon', 'Muon',
'MissingET', 'ScalarHT']
callback_functions = {}
def standardize(snake):
'''This function returns the "standardized" name.
Args:
snake (str): a name to standardize
Returns:
str: The standardized name
Examples:
>>> standardize('gen_particles')
'GenParticle'
>>> standardize('muons')
'Muon'
'''
return ''.join(x.title() for x in snake.split('_'))[:-1]
def load_root_file(*args):
'''This function starts to load the specified root file.
Each callbacks are called once this function is called.
If the number of the callbacks becomes zero while loading,
this function stops to load the root file anymore.
Args:
*args: A list of paths to the root file to load
'''
chain = TChain(DELPHES_BRANCH)
for root_file in args:
chain.Add(root_file)
tree_reader = ExRootTreeReader(chain)
branches = {n: tree_reader.UseBranch(n) for n in delphes_name}
for i in xrange(tree_reader.GetEntries()):
if not callback_functions:
return
tree_reader.ReadEntry(i)
callback_copy = {f: callback_functions[f] for f in callback_functions}
for callback in callback_copy:
branch_name = callback_functions[callback]
kwargs = {b: branches[standardize(b)] for b in branch_name}
callback(i, **kwargs)
def activate(f):
'''A decorator to register a callback function.
The first argument of each callbacks are expected to be an integer
corresponding to the index of the events.
The others are for the particle arrays. heptools determines the correspondance
from the name of the arguments. See the example above.
Args:
f (callable): a function to wrap
'''
args = inspect.getargspec(f).args
args = [a for a in args if standardize(a) in delphes_name]
callback_functions[f] = args
return f
def deactivate(f):
'''Remove the function from the callback list.
Args:
f (callable): a function to remove
'''
del callback_functions[f]
| hajifkd/heptools | heptools/collider/delphes.py | Python | gpl-3.0 | 2,948 |
# Authors:
# Jason Gerard DeRose <[email protected]>
#
# Copyright (C) 2008-2016 Red Hat
# see file 'COPYING' for use and warranty information
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""
RPC server.
Also see the `ipalib.rpc` module.
"""
from xml.sax.saxutils import escape
import os
import datetime
import json
import traceback
import gssapi
import time
import ldap.controls
from pyasn1.type import univ, namedtype
from pyasn1.codec.ber import encoder
import six
# pylint: disable=import-error
from six.moves.urllib.parse import parse_qs
from six.moves.xmlrpc_client import Fault
# pylint: enable=import-error
from ipalib import plugable, errors
from ipalib.capabilities import VERSION_WITHOUT_CAPABILITIES
from ipalib.frontend import Local
from ipalib.backend import Executioner
from ipalib.errors import (PublicError, InternalError, JSONError,
CCacheError, RefererError, InvalidSessionPassword, NotFound, ACIError,
ExecutionError, PasswordExpired, KrbPrincipalExpired, UserLocked)
from ipalib.request import context, destroy_context
from ipalib.rpc import (xml_dumps, xml_loads,
json_encode_binary, json_decode_binary)
from ipalib.util import parse_time_duration, normalize_name
from ipapython.dn import DN
from ipaserver.plugins.ldap2 import ldap2
from ipaserver.session import (
get_session_mgr, AuthManager, get_ipa_ccache_name,
load_ccache_data, bind_ipa_ccache, release_ipa_ccache, fmt_time,
default_max_session_duration, krbccache_dir, krbccache_prefix)
from ipalib.backend import Backend
from ipalib.krb_utils import (
krb_ticket_expiration_threshold, krb5_format_principal_name,
krb5_format_service_principal_name, get_credentials, get_credentials_if_valid)
from ipapython import ipautil
from ipaplatform.paths import paths
from ipapython.version import VERSION
from ipalib.text import _
if six.PY3:
unicode = str
HTTP_STATUS_SUCCESS = '200 Success'
HTTP_STATUS_SERVER_ERROR = '500 Internal Server Error'
_not_found_template = """<html>
<head>
<title>404 Not Found</title>
</head>
<body>
<h1>Not Found</h1>
<p>
The requested URL <strong>%(url)s</strong> was not found on this server.
</p>
</body>
</html>"""
_bad_request_template = """<html>
<head>
<title>400 Bad Request</title>
</head>
<body>
<h1>Bad Request</h1>
<p>
<strong>%(message)s</strong>
</p>
</body>
</html>"""
_internal_error_template = """<html>
<head>
<title>500 Internal Server Error</title>
</head>
<body>
<h1>Internal Server Error</h1>
<p>
<strong>%(message)s</strong>
</p>
</body>
</html>"""
_unauthorized_template = """<html>
<head>
<title>401 Unauthorized</title>
</head>
<body>
<h1>Invalid Authentication</h1>
<p>
<strong>%(message)s</strong>
</p>
</body>
</html>"""
_success_template = """<html>
<head>
<title>200 Success</title>
</head>
<body>
<h1>%(title)s</h1>
<p>
<strong>%(message)s</strong>
</p>
</body>
</html>"""
class HTTP_Status(plugable.Plugin):
def not_found(self, environ, start_response, url, message):
"""
Return a 404 Not Found error.
"""
status = '404 Not Found'
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
self.info('%s: URL="%s", %s', status, url, message)
start_response(status, response_headers)
output = _not_found_template % dict(url=escape(url))
return [output]
def bad_request(self, environ, start_response, message):
"""
Return a 400 Bad Request error.
"""
status = '400 Bad Request'
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
self.info('%s: %s', status, message)
start_response(status, response_headers)
output = _bad_request_template % dict(message=escape(message))
return [output]
def internal_error(self, environ, start_response, message):
"""
Return a 500 Internal Server Error.
"""
status = HTTP_STATUS_SERVER_ERROR
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
self.error('%s: %s', status, message)
start_response(status, response_headers)
output = _internal_error_template % dict(message=escape(message))
return [output]
def unauthorized(self, environ, start_response, message, reason):
"""
Return a 401 Unauthorized error.
"""
status = '401 Unauthorized'
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
if reason:
response_headers.append(('X-IPA-Rejection-Reason', reason))
self.info('%s: %s', status, message)
start_response(status, response_headers)
output = _unauthorized_template % dict(message=escape(message))
return [output]
def read_input(environ):
"""
Read the request body from environ['wsgi.input'].
"""
try:
length = int(environ.get('CONTENT_LENGTH'))
except (ValueError, TypeError):
return
return environ['wsgi.input'].read(length)
def params_2_args_options(params):
if len(params) == 0:
return (tuple(), dict())
if len(params) == 1:
return (params[0], dict())
return (params[0], params[1])
def nicify_query(query, encoding='utf-8'):
if not query:
return
for (key, value) in query.items():
if len(value) == 0:
yield (key, None)
elif len(value) == 1:
yield (key, value[0].decode(encoding))
else:
yield (key, tuple(v.decode(encoding) for v in value))
def extract_query(environ):
"""
Return the query as a ``dict``, or ``None`` if no query is presest.
"""
qstr = None
if environ['REQUEST_METHOD'] == 'POST':
if environ['CONTENT_TYPE'] == 'application/x-www-form-urlencoded':
qstr = read_input(environ)
elif environ['REQUEST_METHOD'] == 'GET':
qstr = environ['QUERY_STRING']
if qstr:
query = dict(nicify_query(parse_qs(qstr))) # keep_blank_values=True)
else:
query = {}
environ['wsgi.query'] = query
return query
class wsgi_dispatch(Executioner, HTTP_Status):
"""
WSGI routing middleware and entry point into IPA server.
The `wsgi_dispatch` plugin is the entry point into the IPA server.
It dispatchs the request to the appropriate wsgi application
handler which is specific to the authentication and RPC mechanism.
"""
def __init__(self, api):
super(wsgi_dispatch, self).__init__(api)
self.__apps = {}
def __iter__(self):
for key in sorted(self.__apps):
yield key
def __getitem__(self, key):
return self.__apps[key]
def __contains__(self, key):
return key in self.__apps
def __call__(self, environ, start_response):
self.debug('WSGI wsgi_dispatch.__call__:')
try:
return self.route(environ, start_response)
finally:
destroy_context()
def _on_finalize(self):
self.url = self.env['mount_ipa']
super(wsgi_dispatch, self)._on_finalize()
def route(self, environ, start_response):
key = environ.get('PATH_INFO')
if key in self.__apps:
app = self.__apps[key]
return app(environ, start_response)
url = environ['SCRIPT_NAME'] + environ['PATH_INFO']
return self.not_found(environ, start_response, url,
'URL fragment "%s" does not have a handler' % (key))
def mount(self, app, key):
"""
Mount the WSGI application *app* at *key*.
"""
# if self.__islocked__():
# raise Exception('%s.mount(): locked, cannot mount %r at %r' % (
# self.name, app, key)
# )
if key in self.__apps:
raise Exception('%s.mount(): cannot replace %r with %r at %r' % (
self.name, self.__apps[key], app, key)
)
self.debug('Mounting %r at %r', app, key)
self.__apps[key] = app
class WSGIExecutioner(Executioner):
"""
Base class for execution backends with a WSGI application interface.
"""
content_type = None
key = ''
_system_commands = {}
def _on_finalize(self):
self.url = self.env.mount_ipa + self.key
super(WSGIExecutioner, self)._on_finalize()
if 'wsgi_dispatch' in self.api.Backend:
self.api.Backend.wsgi_dispatch.mount(self, self.key)
def _get_command(self, name):
try:
# assume version 1 for unversioned command calls
command = self.api.Command[name, '1']
except KeyError:
try:
command = self.api.Command[name]
except KeyError:
command = None
if command is None or isinstance(command, Local):
raise errors.CommandError(name=name)
return command
def wsgi_execute(self, environ):
result = None
error = None
_id = None
lang = os.environ['LANG']
name = None
args = ()
options = {}
command = None
e = None
if not 'HTTP_REFERER' in environ:
return self.marshal(result, RefererError(referer='missing'), _id)
if not environ['HTTP_REFERER'].startswith('https://%s/ipa' % self.api.env.host) and not self.env.in_tree:
return self.marshal(result, RefererError(referer=environ['HTTP_REFERER']), _id)
try:
if ('HTTP_ACCEPT_LANGUAGE' in environ):
lang_reg_w_q = environ['HTTP_ACCEPT_LANGUAGE'].split(',')[0]
lang_reg = lang_reg_w_q.split(';')[0]
lang_ = lang_reg.split('-')[0]
if '-' in lang_reg:
reg = lang_reg.split('-')[1].upper()
else:
reg = lang_.upper()
os.environ['LANG'] = '%s_%s' % (lang_, reg)
if (
environ.get('CONTENT_TYPE', '').startswith(self.content_type)
and environ['REQUEST_METHOD'] == 'POST'
):
data = read_input(environ)
(name, args, options, _id) = self.unmarshal(data)
else:
(name, args, options, _id) = self.simple_unmarshal(environ)
if name in self._system_commands:
result = self._system_commands[name](self, *args, **options)
else:
command = self._get_command(name)
result = command(*args, **options)
except PublicError as e:
if self.api.env.debug:
self.debug('WSGI wsgi_execute PublicError: %s', traceback.format_exc())
error = e
except Exception as e:
self.exception(
'non-public: %s: %s', e.__class__.__name__, str(e)
)
error = InternalError()
finally:
os.environ['LANG'] = lang
principal = getattr(context, 'principal', 'UNKNOWN')
if command is not None:
try:
params = command.args_options_2_params(*args, **options)
except Exception as e:
self.info(
'exception %s caught when converting options: %s', e.__class__.__name__, str(e)
)
# get at least some context of what is going on
params = options
if error:
result_string = type(e).__name__
else:
result_string = 'SUCCESS'
self.info('[%s] %s: %s(%s): %s',
type(self).__name__,
principal,
name,
', '.join(command._repr_iter(**params)),
result_string)
else:
self.info('[%s] %s: %s: %s',
type(self).__name__,
principal,
name,
type(e).__name__)
version = options.get('version', VERSION_WITHOUT_CAPABILITIES)
return self.marshal(result, error, _id, version)
def simple_unmarshal(self, environ):
name = environ['PATH_INFO'].strip('/')
options = extract_query(environ)
return (name, tuple(), options, None)
def __call__(self, environ, start_response):
"""
WSGI application for execution.
"""
self.debug('WSGI WSGIExecutioner.__call__:')
try:
status = HTTP_STATUS_SUCCESS
response = self.wsgi_execute(environ)
headers = [('Content-Type', self.content_type + '; charset=utf-8')]
except Exception:
self.exception('WSGI %s.__call__():', self.name)
status = HTTP_STATUS_SERVER_ERROR
response = status
headers = [('Content-Type', 'text/plain; charset=utf-8')]
session_data = getattr(context, 'session_data', None)
if session_data is not None:
# Send session cookie back and store session data
# FIXME: the URL path should be retreived from somewhere (but where?), not hardcoded
session_mgr = get_session_mgr()
session_cookie = session_mgr.generate_cookie('/ipa', session_data['session_id'],
session_data['session_expiration_timestamp'])
headers.append(('Set-Cookie', session_cookie))
start_response(status, headers)
return [response]
def unmarshal(self, data):
raise NotImplementedError('%s.unmarshal()' % type(self).__name__)
def marshal(self, result, error, _id=None,
version=VERSION_WITHOUT_CAPABILITIES):
raise NotImplementedError('%s.marshal()' % type(self).__name__)
class jsonserver(WSGIExecutioner, HTTP_Status):
"""
JSON RPC server.
For information on the JSON-RPC spec, see:
http://json-rpc.org/wiki/specification
"""
content_type = 'application/json'
def __call__(self, environ, start_response):
'''
'''
self.debug('WSGI jsonserver.__call__:')
response = super(jsonserver, self).__call__(environ, start_response)
return response
def marshal(self, result, error, _id=None,
version=VERSION_WITHOUT_CAPABILITIES):
if error:
assert isinstance(error, PublicError)
error = dict(
code=error.errno,
message=error.strerror,
data=error.kw,
name=unicode(error.__class__.__name__),
)
principal = getattr(context, 'principal', 'UNKNOWN')
response = dict(
result=result,
error=error,
id=_id,
principal=unicode(principal),
version=unicode(VERSION),
)
response = json_encode_binary(response, version)
return json.dumps(response, sort_keys=True, indent=4)
def unmarshal(self, data):
try:
d = json.loads(data)
except ValueError as e:
raise JSONError(error=e)
if not isinstance(d, dict):
raise JSONError(error=_('Request must be a dict'))
if 'method' not in d:
raise JSONError(error=_('Request is missing "method"'))
if 'params' not in d:
raise JSONError(error=_('Request is missing "params"'))
d = json_decode_binary(d)
method = d['method']
params = d['params']
_id = d.get('id')
if not isinstance(params, (list, tuple)):
raise JSONError(error=_('params must be a list'))
if len(params) != 2:
raise JSONError(error=_('params must contain [args, options]'))
args = params[0]
if not isinstance(args, (list, tuple)):
raise JSONError(error=_('params[0] (aka args) must be a list'))
options = params[1]
if not isinstance(options, dict):
raise JSONError(error=_('params[1] (aka options) must be a dict'))
options = dict((str(k), v) for (k, v) in options.items())
return (method, args, options, _id)
class AuthManagerKerb(AuthManager):
'''
Instances of the AuthManger class are used to handle
authentication events delivered by the SessionManager. This class
specifcally handles the management of Kerbeos credentials which
may be stored in the session.
'''
def __init__(self, name):
super(AuthManagerKerb, self).__init__(name)
def logout(self, session_data):
'''
The current user has requested to be logged out. To accomplish
this we remove the user's kerberos credentials from their
session. This does not destroy the session, it just prevents
it from being used for fast authentication. Because the
credentials are no longer in the session cache any future
attempt will require the acquisition of credentials using one
of the login mechanisms.
'''
if 'ccache_data' in session_data:
self.debug('AuthManager.logout.%s: deleting ccache_data', self.name)
del session_data['ccache_data']
else:
self.error('AuthManager.logout.%s: session_data does not contain ccache_data', self.name)
class KerberosSession(object):
'''
Functionally shared by all RPC handlers using both sessions and
Kerberos. This class must be implemented as a mixin class rather
than the more obvious technique of subclassing because the classes
needing this do not share a common base class.
'''
def kerb_session_on_finalize(self):
'''
Initialize values from the Env configuration.
Why do it this way and not simply reference
api.env.session_auth_duration? Because that config item cannot
be used directly, it must be parsed and converted to an
integer. It would be inefficient to reparse it on every
request. So we parse it once and store the result in the class
instance.
'''
# Set the session expiration time
try:
seconds = parse_time_duration(self.api.env.session_auth_duration)
self.session_auth_duration = int(seconds)
self.debug("session_auth_duration: %s", datetime.timedelta(seconds=self.session_auth_duration))
except Exception as e:
self.session_auth_duration = default_max_session_duration
self.error('unable to parse session_auth_duration, defaulting to %d: %s',
self.session_auth_duration, e)
def update_session_expiration(self, session_data, krb_endtime):
'''
Each time a session is created or accessed we need to update
it's expiration time. The expiration time is set inside the
session_data.
:parameters:
session_data
The session data whose expiration is being updatded.
krb_endtime
The UNIX timestamp for when the Kerberos credentials expire.
:returns:
None
'''
# Account for clock skew and/or give us some time leeway
krb_expiration = krb_endtime - krb_ticket_expiration_threshold
# Set the session expiration time
session_mgr = get_session_mgr()
session_mgr.set_session_expiration_time(session_data,
duration=self.session_auth_duration,
max_age=krb_expiration,
duration_type=self.api.env.session_duration_type)
def finalize_kerberos_acquisition(self, who, ccache_name, environ, start_response, headers=None):
if headers is None:
headers = []
# Retrieve the session data (or newly create)
session_mgr = get_session_mgr()
session_data = session_mgr.load_session_data(environ.get('HTTP_COOKIE'))
session_id = session_data['session_id']
self.debug('finalize_kerberos_acquisition: %s ccache_name="%s" session_id="%s"',
who, ccache_name, session_id)
# Copy the ccache file contents into the session data
session_data['ccache_data'] = load_ccache_data(ccache_name)
# Set when the session will expire
creds = get_credentials(ccache_name=ccache_name)
endtime = creds.lifetime + time.time()
self.update_session_expiration(session_data, endtime)
# Store the session data now that it's been updated with the ccache
session_mgr.store_session_data(session_data)
# The request is finished with the ccache, destroy it.
release_ipa_ccache(ccache_name)
# Return success and set session cookie
session_cookie = session_mgr.generate_cookie('/ipa', session_id,
session_data['session_expiration_timestamp'])
headers.append(('Set-Cookie', session_cookie))
start_response(HTTP_STATUS_SUCCESS, headers)
return ['']
class KerberosWSGIExecutioner(WSGIExecutioner, HTTP_Status, KerberosSession):
"""Base class for xmlserver and jsonserver_kerb
"""
def _on_finalize(self):
super(KerberosWSGIExecutioner, self)._on_finalize()
self.kerb_session_on_finalize()
def __call__(self, environ, start_response):
self.debug('KerberosWSGIExecutioner.__call__:')
user_ccache=environ.get('KRB5CCNAME')
headers = [('Content-Type', '%s; charset=utf-8' % self.content_type)]
if user_ccache is None:
status = HTTP_STATUS_SERVER_ERROR
self.log.error(
'%s: %s', status,
'KerberosWSGIExecutioner.__call__: '
'KRB5CCNAME not defined in HTTP request environment')
return self.marshal(None, CCacheError())
try:
self.create_context(ccache=user_ccache)
response = super(KerberosWSGIExecutioner, self).__call__(
environ, start_response)
session_data = getattr(context, 'session_data', None)
if (session_data is None and self.env.context != 'lite'):
self.finalize_kerberos_acquisition(
'xmlserver', user_ccache, environ, start_response, headers)
except PublicError as e:
status = HTTP_STATUS_SUCCESS
response = status
start_response(status, headers)
return self.marshal(None, e)
finally:
destroy_context()
return response
class xmlserver(KerberosWSGIExecutioner):
"""
Execution backend plugin for XML-RPC server.
Also see the `ipalib.rpc.xmlclient` plugin.
"""
content_type = 'text/xml'
key = '/xml'
def listMethods(self, *params):
"""list methods for XML-RPC introspection"""
if params:
raise errors.ZeroArgumentError(name='system.listMethods')
return (tuple(unicode(cmd.name) for cmd in self.Command()
if cmd is self.Command[cmd.name]) +
tuple(unicode(name) for name in self._system_commands))
def _get_method_name(self, name, *params):
"""Get a method name for XML-RPC introspection commands"""
if not params:
raise errors.RequirementError(name='method name')
elif len(params) > 1:
raise errors.MaxArgumentError(name=name, count=1)
[method_name] = params
return method_name
def methodSignature(self, *params):
"""get method signature for XML-RPC introspection"""
method_name = self._get_method_name('system.methodSignature', *params)
if method_name in self._system_commands:
# TODO
# for now let's not go out of our way to document standard XML-RPC
return u'undef'
else:
self._get_command(method_name)
# All IPA commands return a dict (struct),
# and take a params, options - list and dict (array, struct)
return [[u'struct', u'array', u'struct']]
def methodHelp(self, *params):
"""get method docstring for XML-RPC introspection"""
method_name = self._get_method_name('system.methodHelp', *params)
if method_name in self._system_commands:
return u''
else:
command = self._get_command(method_name)
return unicode(command.doc or '')
_system_commands = {
'system.listMethods': listMethods,
'system.methodSignature': methodSignature,
'system.methodHelp': methodHelp,
}
def unmarshal(self, data):
(params, name) = xml_loads(data)
if name in self._system_commands:
# For XML-RPC introspection, return params directly
return (name, params, {}, None)
(args, options) = params_2_args_options(params)
if 'version' not in options:
# Keep backwards compatibility with client containing
# bug https://fedorahosted.org/freeipa/ticket/3294:
# If `version` is not given in XML-RPC, assume an old version
options['version'] = VERSION_WITHOUT_CAPABILITIES
return (name, args, options, None)
def marshal(self, result, error, _id=None,
version=VERSION_WITHOUT_CAPABILITIES):
if error:
self.debug('response: %s: %s', error.__class__.__name__, str(error))
response = Fault(error.errno, error.strerror)
else:
if isinstance(result, dict):
self.debug('response: entries returned %d', result.get('count', 1))
response = (result,)
return xml_dumps(response, version, methodresponse=True)
class jsonserver_session(jsonserver, KerberosSession):
"""
JSON RPC server protected with session auth.
"""
key = '/session/json'
def __init__(self, api):
super(jsonserver_session, self).__init__(api)
name = '{0}_{1}'.format(self.__class__.__name__, id(self))
auth_mgr = AuthManagerKerb(name)
session_mgr = get_session_mgr()
session_mgr.auth_mgr.register(auth_mgr.name, auth_mgr)
def _on_finalize(self):
super(jsonserver_session, self)._on_finalize()
self.kerb_session_on_finalize()
def need_login(self, start_response):
status = '401 Unauthorized'
headers = []
response = ''
self.debug('jsonserver_session: %s need login', status)
start_response(status, headers)
return [response]
def __call__(self, environ, start_response):
'''
'''
self.debug('WSGI jsonserver_session.__call__:')
# Load the session data
session_mgr = get_session_mgr()
session_data = session_mgr.load_session_data(environ.get('HTTP_COOKIE'))
session_id = session_data['session_id']
self.debug('jsonserver_session.__call__: session_id=%s start_timestamp=%s access_timestamp=%s expiration_timestamp=%s',
session_id,
fmt_time(session_data['session_start_timestamp']),
fmt_time(session_data['session_access_timestamp']),
fmt_time(session_data['session_expiration_timestamp']))
ccache_data = session_data.get('ccache_data')
# Redirect to login if no Kerberos credentials
if ccache_data is None:
self.debug('no ccache, need login')
return self.need_login(start_response)
ipa_ccache_name = bind_ipa_ccache(ccache_data)
# Redirect to login if Kerberos credentials are expired
creds = get_credentials_if_valid(ccache_name=ipa_ccache_name)
if not creds:
self.debug('ccache expired, deleting session, need login')
# The request is finished with the ccache, destroy it.
release_ipa_ccache(ipa_ccache_name)
return self.need_login(start_response)
# Update the session expiration based on the Kerberos expiration
endtime = creds.lifetime + time.time()
self.update_session_expiration(session_data, endtime)
# Store the session data in the per-thread context
setattr(context, 'session_data', session_data)
# This may fail if a ticket from wrong realm was handled via browser
try:
self.create_context(ccache=ipa_ccache_name)
except ACIError as e:
return self.unauthorized(environ, start_response, str(e), 'denied')
try:
response = super(jsonserver_session, self).__call__(environ, start_response)
finally:
# Kerberos may have updated the ccache data during the
# execution of the command therefore we need refresh our
# copy of it in the session data so the next command sees
# the same state of the ccache.
#
# However we must be careful not to restore the ccache
# data in the session data if it was explicitly deleted
# during the execution of the command. For example the
# logout command removes the ccache data from the session
# data to invalidate the session credentials.
if 'ccache_data' in session_data:
session_data['ccache_data'] = load_ccache_data(ipa_ccache_name)
# The request is finished with the ccache, destroy it.
release_ipa_ccache(ipa_ccache_name)
# Store the session data.
session_mgr.store_session_data(session_data)
destroy_context()
return response
class jsonserver_kerb(jsonserver, KerberosWSGIExecutioner):
"""
JSON RPC server protected with kerberos auth.
"""
key = '/json'
class KerberosLogin(Backend, KerberosSession, HTTP_Status):
key = None
def _on_finalize(self):
super(KerberosLogin, self)._on_finalize()
self.api.Backend.wsgi_dispatch.mount(self, self.key)
self.kerb_session_on_finalize()
def __call__(self, environ, start_response):
self.debug('WSGI KerberosLogin.__call__:')
# Get the ccache created by mod_auth_gssapi
user_ccache_name=environ.get('KRB5CCNAME')
if user_ccache_name is None:
return self.internal_error(environ, start_response,
'login_kerberos: KRB5CCNAME not defined in HTTP request environment')
return self.finalize_kerberos_acquisition('login_kerberos', user_ccache_name, environ, start_response)
class login_kerberos(KerberosLogin):
key = '/session/login_kerberos'
class login_x509(KerberosLogin):
key = '/session/login_x509'
class login_password(Backend, KerberosSession, HTTP_Status):
content_type = 'text/plain'
key = '/session/login_password'
def _on_finalize(self):
super(login_password, self)._on_finalize()
self.api.Backend.wsgi_dispatch.mount(self, self.key)
self.kerb_session_on_finalize()
def __call__(self, environ, start_response):
self.debug('WSGI login_password.__call__:')
# Get the user and password parameters from the request
content_type = environ.get('CONTENT_TYPE', '').lower()
if not content_type.startswith('application/x-www-form-urlencoded'):
return self.bad_request(environ, start_response, "Content-Type must be application/x-www-form-urlencoded")
method = environ.get('REQUEST_METHOD', '').upper()
if method == 'POST':
query_string = read_input(environ)
else:
return self.bad_request(environ, start_response, "HTTP request method must be POST")
try:
query_dict = parse_qs(query_string)
except Exception as e:
return self.bad_request(environ, start_response, "cannot parse query data")
user = query_dict.get('user', None)
if user is not None:
if len(user) == 1:
user = user[0]
else:
return self.bad_request(environ, start_response, "more than one user parameter")
else:
return self.bad_request(environ, start_response, "no user specified")
# allows login in the form user@SERVER_REALM or user@server_realm
# FIXME: uppercasing may be removed when better handling of UPN
# is introduced
parts = normalize_name(user)
if "domain" in parts:
# username is of the form user@SERVER_REALM or user@server_realm
# check whether the realm is server's realm
# Users from other realms are not supported
# (they do not have necessary LDAP entry, LDAP connect will fail)
if parts["domain"].upper()==self.api.env.realm:
user=parts["name"]
else:
return self.unauthorized(environ, start_response, '', 'denied')
elif "flatname" in parts:
# username is of the form NetBIOS\user
return self.unauthorized(environ, start_response, '', 'denied')
else:
# username is of the form user or of some wild form, e.g.
# user@REALM1@REALM2 or NetBIOS1\NetBIOS2\user (see normalize_name)
# wild form username will fail at kinit, so nothing needs to be done
pass
password = query_dict.get('password', None)
if password is not None:
if len(password) == 1:
password = password[0]
else:
return self.bad_request(environ, start_response, "more than one password parameter")
else:
return self.bad_request(environ, start_response, "no password specified")
# Get the ccache we'll use and attempt to get credentials in it with user,password
ipa_ccache_name = get_ipa_ccache_name()
try:
self.kinit(user, self.api.env.realm, password, ipa_ccache_name)
except PasswordExpired as e:
return self.unauthorized(environ, start_response, str(e), 'password-expired')
except InvalidSessionPassword as e:
return self.unauthorized(environ, start_response, str(e), 'invalid-password')
except KrbPrincipalExpired as e:
return self.unauthorized(environ,
start_response,
str(e),
'krbprincipal-expired')
except UserLocked as e:
return self.unauthorized(environ,
start_response,
str(e),
'user-locked')
return self.finalize_kerberos_acquisition('login_password', ipa_ccache_name, environ, start_response)
def kinit(self, user, realm, password, ccache_name):
# get http service ccache as an armor for FAST to enable OTP authentication
armor_principal = str(krb5_format_service_principal_name(
'HTTP', self.api.env.host, realm))
keytab = paths.IPA_KEYTAB
armor_name = "%sA_%s" % (krbccache_prefix, user)
armor_path = os.path.join(krbccache_dir, armor_name)
self.debug('Obtaining armor ccache: principal=%s keytab=%s ccache=%s',
armor_principal, keytab, armor_path)
try:
ipautil.kinit_keytab(armor_principal, paths.IPA_KEYTAB, armor_path)
except gssapi.exceptions.GSSError as e:
raise CCacheError(message=unicode(e))
# Format the user as a kerberos principal
principal = krb5_format_principal_name(user, realm)
try:
ipautil.kinit_password(principal, password, ccache_name,
armor_ccache_name=armor_path)
self.debug('Cleanup the armor ccache')
ipautil.run(
[paths.KDESTROY, '-A', '-c', armor_path],
env={'KRB5CCNAME': armor_path},
raiseonerr=False)
except RuntimeError as e:
if ('kinit: Cannot read password while '
'getting initial credentials') in str(e):
raise PasswordExpired(principal=principal, message=unicode(e))
elif ('kinit: Client\'s entry in database'
' has expired while getting initial credentials') in str(e):
raise KrbPrincipalExpired(principal=principal,
message=unicode(e))
elif ('kinit: Clients credentials have been revoked '
'while getting initial credentials') in str(e):
raise UserLocked(principal=principal,
message=unicode(e))
raise InvalidSessionPassword(principal=principal,
message=unicode(e))
class change_password(Backend, HTTP_Status):
content_type = 'text/plain'
key = '/session/change_password'
def _on_finalize(self):
super(change_password, self)._on_finalize()
self.api.Backend.wsgi_dispatch.mount(self, self.key)
def __call__(self, environ, start_response):
self.info('WSGI change_password.__call__:')
# Get the user and password parameters from the request
content_type = environ.get('CONTENT_TYPE', '').lower()
if not content_type.startswith('application/x-www-form-urlencoded'):
return self.bad_request(environ, start_response, "Content-Type must be application/x-www-form-urlencoded")
method = environ.get('REQUEST_METHOD', '').upper()
if method == 'POST':
query_string = read_input(environ)
else:
return self.bad_request(environ, start_response, "HTTP request method must be POST")
try:
query_dict = parse_qs(query_string)
except Exception as e:
return self.bad_request(environ, start_response, "cannot parse query data")
data = {}
for field in ('user', 'old_password', 'new_password', 'otp'):
value = query_dict.get(field, None)
if value is not None:
if len(value) == 1:
data[field] = value[0]
else:
return self.bad_request(environ, start_response, "more than one %s parameter"
% field)
elif field != 'otp': # otp is optional
return self.bad_request(environ, start_response, "no %s specified" % field)
# start building the response
self.info("WSGI change_password: start password change of user '%s'", data['user'])
status = HTTP_STATUS_SUCCESS
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
title = 'Password change rejected'
result = 'error'
policy_error = None
bind_dn = DN((self.api.Object.user.primary_key.name, data['user']),
self.api.env.container_user, self.api.env.basedn)
try:
pw = data['old_password']
if data.get('otp'):
pw = data['old_password'] + data['otp']
conn = ldap2(self.api)
conn.connect(bind_dn=bind_dn, bind_pw=pw)
except (NotFound, ACIError):
result = 'invalid-password'
message = 'The old password or username is not correct.'
except Exception as e:
message = "Could not connect to LDAP server."
self.error("change_password: cannot authenticate '%s' to LDAP server: %s",
data['user'], str(e))
else:
try:
conn.modify_password(bind_dn, data['new_password'], data['old_password'], skip_bind=True)
except ExecutionError as e:
result = 'policy-error'
policy_error = escape(str(e))
message = "Password change was rejected: %s" % escape(str(e))
except Exception as e:
message = "Could not change the password"
self.error("change_password: cannot change password of '%s': %s",
data['user'], str(e))
else:
result = 'ok'
title = "Password change successful"
message = "Password was changed."
finally:
if conn.isconnected():
conn.disconnect()
self.info('%s: %s', status, message)
response_headers.append(('X-IPA-Pwchange-Result', result))
if policy_error:
response_headers.append(('X-IPA-Pwchange-Policy-Error', policy_error))
start_response(status, response_headers)
output = _success_template % dict(title=str(title),
message=str(message))
return [output]
class sync_token(Backend, HTTP_Status):
content_type = 'text/plain'
key = '/session/sync_token'
class OTPSyncRequest(univ.Sequence):
OID = "2.16.840.1.113730.3.8.10.6"
componentType = namedtype.NamedTypes(
namedtype.NamedType('firstCode', univ.OctetString()),
namedtype.NamedType('secondCode', univ.OctetString()),
namedtype.OptionalNamedType('tokenDN', univ.OctetString())
)
def _on_finalize(self):
super(sync_token, self)._on_finalize()
self.api.Backend.wsgi_dispatch.mount(self, self.key)
def __call__(self, environ, start_response):
# Make sure this is a form request.
content_type = environ.get('CONTENT_TYPE', '').lower()
if not content_type.startswith('application/x-www-form-urlencoded'):
return self.bad_request(environ, start_response, "Content-Type must be application/x-www-form-urlencoded")
# Make sure this is a POST request.
method = environ.get('REQUEST_METHOD', '').upper()
if method == 'POST':
query_string = read_input(environ)
else:
return self.bad_request(environ, start_response, "HTTP request method must be POST")
# Parse the query string to a dictionary.
try:
query_dict = parse_qs(query_string)
except Exception as e:
return self.bad_request(environ, start_response, "cannot parse query data")
data = {}
for field in ('user', 'password', 'first_code', 'second_code', 'token'):
value = query_dict.get(field, None)
if value is not None:
if len(value) == 1:
data[field] = value[0]
else:
return self.bad_request(environ, start_response, "more than one %s parameter"
% field)
elif field != 'token':
return self.bad_request(environ, start_response, "no %s specified" % field)
# Create the request control.
sr = self.OTPSyncRequest()
sr.setComponentByName('firstCode', data['first_code'])
sr.setComponentByName('secondCode', data['second_code'])
if 'token' in data:
try:
token_dn = DN(data['token'])
except ValueError:
token_dn = DN((self.api.Object.otptoken.primary_key.name, data['token']),
self.api.env.container_otp, self.api.env.basedn)
sr.setComponentByName('tokenDN', str(token_dn))
rc = ldap.controls.RequestControl(sr.OID, True, encoder.encode(sr))
# Resolve the user DN
bind_dn = DN((self.api.Object.user.primary_key.name, data['user']),
self.api.env.container_user, self.api.env.basedn)
# Start building the response.
status = HTTP_STATUS_SUCCESS
response_headers = [('Content-Type', 'text/html; charset=utf-8')]
title = 'Token sync rejected'
# Perform the synchronization.
conn = ldap2(self.api)
try:
conn.connect(bind_dn=bind_dn,
bind_pw=data['password'],
serverctrls=[rc,])
result = 'ok'
title = "Token sync successful"
message = "Token was synchronized."
except (NotFound, ACIError):
result = 'invalid-credentials'
message = 'The username, password or token codes are not correct.'
except Exception as e:
result = 'error'
message = "Could not connect to LDAP server."
self.error("token_sync: cannot authenticate '%s' to LDAP server: %s",
data['user'], str(e))
finally:
if conn.isconnected():
conn.disconnect()
# Report status and return.
response_headers.append(('X-IPA-TokenSync-Result', result))
start_response(status, response_headers)
output = _success_template % dict(title=str(title),
message=str(message))
return [output]
class xmlserver_session(xmlserver, KerberosSession):
"""
XML RPC server protected with session auth.
"""
key = '/session/xml'
def __init__(self, api):
super(xmlserver_session, self).__init__(api)
name = '{0}_{1}'.format(self.__class__.__name__, id(self))
auth_mgr = AuthManagerKerb(name)
session_mgr = get_session_mgr()
session_mgr.auth_mgr.register(auth_mgr.name, auth_mgr)
def _on_finalize(self):
super(xmlserver_session, self)._on_finalize()
self.kerb_session_on_finalize()
def need_login(self, start_response):
status = '401 Unauthorized'
headers = []
response = ''
self.debug('xmlserver_session: %s need login', status)
start_response(status, headers)
return [response]
def __call__(self, environ, start_response):
'''
'''
self.debug('WSGI xmlserver_session.__call__:')
# Load the session data
session_mgr = get_session_mgr()
session_data = session_mgr.load_session_data(environ.get('HTTP_COOKIE'))
session_id = session_data['session_id']
self.debug('xmlserver_session.__call__: session_id=%s start_timestamp=%s access_timestamp=%s expiration_timestamp=%s',
session_id,
fmt_time(session_data['session_start_timestamp']),
fmt_time(session_data['session_access_timestamp']),
fmt_time(session_data['session_expiration_timestamp']))
ccache_data = session_data.get('ccache_data')
# Redirect to /ipa/xml if no Kerberos credentials
if ccache_data is None:
self.debug('xmlserver_session.__call_: no ccache, need TGT')
return self.need_login(start_response)
ipa_ccache_name = bind_ipa_ccache(ccache_data)
# Redirect to /ipa/xml if Kerberos credentials are expired
creds = get_credentials_if_valid(ccache_name=ipa_ccache_name)
if not creds:
self.debug('xmlserver_session.__call_: ccache expired, deleting session, need login')
# The request is finished with the ccache, destroy it.
release_ipa_ccache(ipa_ccache_name)
return self.need_login(start_response)
# Update the session expiration based on the Kerberos expiration
endtime = creds.lifetime + time.time()
self.update_session_expiration(session_data, endtime)
# Store the session data in the per-thread context
setattr(context, 'session_data', session_data)
environ['KRB5CCNAME'] = ipa_ccache_name
try:
response = super(xmlserver_session, self).__call__(environ, start_response)
finally:
# Kerberos may have updated the ccache data during the
# execution of the command therefore we need refresh our
# copy of it in the session data so the next command sees
# the same state of the ccache.
#
# However we must be careful not to restore the ccache
# data in the session data if it was explicitly deleted
# during the execution of the command. For example the
# logout command removes the ccache data from the session
# data to invalidate the session credentials.
if 'ccache_data' in session_data:
session_data['ccache_data'] = load_ccache_data(ipa_ccache_name)
# The request is finished with the ccache, destroy it.
release_ipa_ccache(ipa_ccache_name)
# Store the session data.
session_mgr.store_session_data(session_data)
destroy_context()
return response
| ofayans/freeipa | ipaserver/rpcserver.py | Python | gpl-3.0 | 49,299 |
corner_tuples = (
(1, 37, 150),
(6, 145, 114),
(31, 73, 42),
(36, 109, 78),
(181, 72, 103),
(186, 108, 139),
(211, 180, 67),
(216, 144, 175),
)
edge_orbit_id = {
# orbit 0
2: 0,
5: 0,
7: 0,
25: 0,
12: 0,
30: 0,
32: 0,
35: 0, # Upper
38: 0,
41: 0,
43: 0,
61: 0,
48: 0,
66: 0,
68: 0,
71: 0, # Left
74: 0,
77: 0,
79: 0,
97: 0,
84: 0,
102: 0,
104: 0,
107: 0, # Front
110: 0,
113: 0,
115: 0,
133: 0,
120: 0,
138: 0,
140: 0,
143: 0, # Right
146: 0,
149: 0,
151: 0,
169: 0,
156: 0,
174: 0,
176: 0,
179: 0, # Back
182: 0,
185: 0,
187: 0,
205: 0,
192: 0,
210: 0,
212: 0,
215: 0, # Down
# orbit 1
3: 1,
4: 1,
13: 1,
19: 1,
18: 1,
24: 1,
33: 1,
34: 1, # Upper
39: 1,
40: 1,
49: 1,
55: 1,
54: 1,
60: 1,
69: 1,
70: 1, # Left
75: 1,
76: 1,
85: 1,
91: 1,
90: 1,
96: 1,
105: 1,
106: 1, # Front
111: 1,
112: 1,
121: 1,
127: 1,
126: 1,
132: 1,
141: 1,
142: 1, # Right
147: 1,
148: 1,
157: 1,
163: 1,
162: 1,
168: 1,
177: 1,
178: 1, # Back
183: 1,
184: 1,
193: 1,
199: 1,
198: 1,
204: 1,
213: 1,
214: 1, # Down
}
center_groups = (
(
"inner x-centers",
(
15,
16,
21,
22, # Upper
51,
52,
57,
58, # Left
87,
88,
93,
94, # Front
123,
124,
129,
130, # Right
159,
160,
165,
166, # Back
195,
196,
201,
202, # Down
),
),
(
"outer x-centers",
(
8,
11,
26,
29, # Upper
44,
47,
62,
65, # Left
80,
83,
98,
101, # Front
116,
119,
134,
137, # Right
152,
155,
170,
173, # Back
188,
191,
206,
209, # Down
),
),
(
"left centers (oblique edge)",
(
9,
17,
28,
20, # Upper
45,
53,
64,
56, # Left
81,
89,
100,
92, # Front
117,
125,
136,
128, # Right
153,
161,
172,
164, # Back
189,
197,
208,
200, # Down
),
),
(
"right centers (oblique edges)",
(
10,
23,
27,
14, # Upper
46,
59,
63,
50, # Left
82,
95,
99,
86, # Front
118,
131,
135,
122, # Right
154,
167,
171,
158, # Back
190,
203,
207,
194, # Down
),
),
)
edge_orbit_wing_pairs = (
# orbit 0
(
(2, 149),
(5, 146),
(7, 38),
(25, 41),
(12, 113),
(30, 110),
(32, 74),
(35, 77),
(43, 156),
(61, 174),
(48, 79),
(66, 97),
(115, 84),
(133, 102),
(120, 151),
(138, 169),
(182, 104),
(185, 107),
(187, 71),
(205, 68),
(192, 140),
(210, 143),
(212, 179),
(215, 176),
),
# orbit 1
(
(3, 148),
(4, 147),
(13, 39),
(19, 40),
(18, 112),
(24, 111),
(33, 75),
(34, 76),
(49, 162),
(55, 168),
(54, 85),
(60, 91),
(90, 121),
(96, 127),
(126, 157),
(132, 163),
(183, 105),
(184, 106),
(193, 70),
(199, 69),
(198, 141),
(204, 142),
(213, 178),
(214, 177),
),
)
highlow_edge_values = {
(2, 149, "B", "D"): "D",
(2, 149, "B", "L"): "D",
(2, 149, "B", "R"): "D",
(2, 149, "B", "U"): "D",
(2, 149, "D", "B"): "U",
(2, 149, "D", "F"): "U",
(2, 149, "D", "L"): "U",
(2, 149, "D", "R"): "U",
(2, 149, "F", "D"): "D",
(2, 149, "F", "L"): "D",
(2, 149, "F", "R"): "D",
(2, 149, "F", "U"): "D",
(2, 149, "L", "B"): "U",
(2, 149, "L", "D"): "D",
(2, 149, "L", "F"): "U",
(2, 149, "L", "U"): "D",
(2, 149, "R", "B"): "U",
(2, 149, "R", "D"): "D",
(2, 149, "R", "F"): "U",
(2, 149, "R", "U"): "D",
(2, 149, "U", "B"): "U",
(2, 149, "U", "F"): "U",
(2, 149, "U", "L"): "U",
(2, 149, "U", "R"): "U",
(3, 148, "B", "D"): "D",
(3, 148, "B", "L"): "D",
(3, 148, "B", "R"): "D",
(3, 148, "B", "U"): "D",
(3, 148, "D", "B"): "U",
(3, 148, "D", "F"): "U",
(3, 148, "D", "L"): "U",
(3, 148, "D", "R"): "U",
(3, 148, "F", "D"): "D",
(3, 148, "F", "L"): "D",
(3, 148, "F", "R"): "D",
(3, 148, "F", "U"): "D",
(3, 148, "L", "B"): "U",
(3, 148, "L", "D"): "D",
(3, 148, "L", "F"): "U",
(3, 148, "L", "U"): "D",
(3, 148, "R", "B"): "U",
(3, 148, "R", "D"): "D",
(3, 148, "R", "F"): "U",
(3, 148, "R", "U"): "D",
(3, 148, "U", "B"): "U",
(3, 148, "U", "F"): "U",
(3, 148, "U", "L"): "U",
(3, 148, "U", "R"): "U",
(4, 147, "B", "D"): "U",
(4, 147, "B", "L"): "U",
(4, 147, "B", "R"): "U",
(4, 147, "B", "U"): "U",
(4, 147, "D", "B"): "D",
(4, 147, "D", "F"): "D",
(4, 147, "D", "L"): "D",
(4, 147, "D", "R"): "D",
(4, 147, "F", "D"): "U",
(4, 147, "F", "L"): "U",
(4, 147, "F", "R"): "U",
(4, 147, "F", "U"): "U",
(4, 147, "L", "B"): "D",
(4, 147, "L", "D"): "U",
(4, 147, "L", "F"): "D",
(4, 147, "L", "U"): "U",
(4, 147, "R", "B"): "D",
(4, 147, "R", "D"): "U",
(4, 147, "R", "F"): "D",
(4, 147, "R", "U"): "U",
(4, 147, "U", "B"): "D",
(4, 147, "U", "F"): "D",
(4, 147, "U", "L"): "D",
(4, 147, "U", "R"): "D",
(5, 146, "B", "D"): "U",
(5, 146, "B", "L"): "U",
(5, 146, "B", "R"): "U",
(5, 146, "B", "U"): "U",
(5, 146, "D", "B"): "D",
(5, 146, "D", "F"): "D",
(5, 146, "D", "L"): "D",
(5, 146, "D", "R"): "D",
(5, 146, "F", "D"): "U",
(5, 146, "F", "L"): "U",
(5, 146, "F", "R"): "U",
(5, 146, "F", "U"): "U",
(5, 146, "L", "B"): "D",
(5, 146, "L", "D"): "U",
(5, 146, "L", "F"): "D",
(5, 146, "L", "U"): "U",
(5, 146, "R", "B"): "D",
(5, 146, "R", "D"): "U",
(5, 146, "R", "F"): "D",
(5, 146, "R", "U"): "U",
(5, 146, "U", "B"): "D",
(5, 146, "U", "F"): "D",
(5, 146, "U", "L"): "D",
(5, 146, "U", "R"): "D",
(7, 38, "B", "D"): "U",
(7, 38, "B", "L"): "U",
(7, 38, "B", "R"): "U",
(7, 38, "B", "U"): "U",
(7, 38, "D", "B"): "D",
(7, 38, "D", "F"): "D",
(7, 38, "D", "L"): "D",
(7, 38, "D", "R"): "D",
(7, 38, "F", "D"): "U",
(7, 38, "F", "L"): "U",
(7, 38, "F", "R"): "U",
(7, 38, "F", "U"): "U",
(7, 38, "L", "B"): "D",
(7, 38, "L", "D"): "U",
(7, 38, "L", "F"): "D",
(7, 38, "L", "U"): "U",
(7, 38, "R", "B"): "D",
(7, 38, "R", "D"): "U",
(7, 38, "R", "F"): "D",
(7, 38, "R", "U"): "U",
(7, 38, "U", "B"): "D",
(7, 38, "U", "F"): "D",
(7, 38, "U", "L"): "D",
(7, 38, "U", "R"): "D",
(12, 113, "B", "D"): "D",
(12, 113, "B", "L"): "D",
(12, 113, "B", "R"): "D",
(12, 113, "B", "U"): "D",
(12, 113, "D", "B"): "U",
(12, 113, "D", "F"): "U",
(12, 113, "D", "L"): "U",
(12, 113, "D", "R"): "U",
(12, 113, "F", "D"): "D",
(12, 113, "F", "L"): "D",
(12, 113, "F", "R"): "D",
(12, 113, "F", "U"): "D",
(12, 113, "L", "B"): "U",
(12, 113, "L", "D"): "D",
(12, 113, "L", "F"): "U",
(12, 113, "L", "U"): "D",
(12, 113, "R", "B"): "U",
(12, 113, "R", "D"): "D",
(12, 113, "R", "F"): "U",
(12, 113, "R", "U"): "D",
(12, 113, "U", "B"): "U",
(12, 113, "U", "F"): "U",
(12, 113, "U", "L"): "U",
(12, 113, "U", "R"): "U",
(13, 39, "B", "D"): "U",
(13, 39, "B", "L"): "U",
(13, 39, "B", "R"): "U",
(13, 39, "B", "U"): "U",
(13, 39, "D", "B"): "D",
(13, 39, "D", "F"): "D",
(13, 39, "D", "L"): "D",
(13, 39, "D", "R"): "D",
(13, 39, "F", "D"): "U",
(13, 39, "F", "L"): "U",
(13, 39, "F", "R"): "U",
(13, 39, "F", "U"): "U",
(13, 39, "L", "B"): "D",
(13, 39, "L", "D"): "U",
(13, 39, "L", "F"): "D",
(13, 39, "L", "U"): "U",
(13, 39, "R", "B"): "D",
(13, 39, "R", "D"): "U",
(13, 39, "R", "F"): "D",
(13, 39, "R", "U"): "U",
(13, 39, "U", "B"): "D",
(13, 39, "U", "F"): "D",
(13, 39, "U", "L"): "D",
(13, 39, "U", "R"): "D",
(18, 112, "B", "D"): "D",
(18, 112, "B", "L"): "D",
(18, 112, "B", "R"): "D",
(18, 112, "B", "U"): "D",
(18, 112, "D", "B"): "U",
(18, 112, "D", "F"): "U",
(18, 112, "D", "L"): "U",
(18, 112, "D", "R"): "U",
(18, 112, "F", "D"): "D",
(18, 112, "F", "L"): "D",
(18, 112, "F", "R"): "D",
(18, 112, "F", "U"): "D",
(18, 112, "L", "B"): "U",
(18, 112, "L", "D"): "D",
(18, 112, "L", "F"): "U",
(18, 112, "L", "U"): "D",
(18, 112, "R", "B"): "U",
(18, 112, "R", "D"): "D",
(18, 112, "R", "F"): "U",
(18, 112, "R", "U"): "D",
(18, 112, "U", "B"): "U",
(18, 112, "U", "F"): "U",
(18, 112, "U", "L"): "U",
(18, 112, "U", "R"): "U",
(19, 40, "B", "D"): "D",
(19, 40, "B", "L"): "D",
(19, 40, "B", "R"): "D",
(19, 40, "B", "U"): "D",
(19, 40, "D", "B"): "U",
(19, 40, "D", "F"): "U",
(19, 40, "D", "L"): "U",
(19, 40, "D", "R"): "U",
(19, 40, "F", "D"): "D",
(19, 40, "F", "L"): "D",
(19, 40, "F", "R"): "D",
(19, 40, "F", "U"): "D",
(19, 40, "L", "B"): "U",
(19, 40, "L", "D"): "D",
(19, 40, "L", "F"): "U",
(19, 40, "L", "U"): "D",
(19, 40, "R", "B"): "U",
(19, 40, "R", "D"): "D",
(19, 40, "R", "F"): "U",
(19, 40, "R", "U"): "D",
(19, 40, "U", "B"): "U",
(19, 40, "U", "F"): "U",
(19, 40, "U", "L"): "U",
(19, 40, "U", "R"): "U",
(24, 111, "B", "D"): "U",
(24, 111, "B", "L"): "U",
(24, 111, "B", "R"): "U",
(24, 111, "B", "U"): "U",
(24, 111, "D", "B"): "D",
(24, 111, "D", "F"): "D",
(24, 111, "D", "L"): "D",
(24, 111, "D", "R"): "D",
(24, 111, "F", "D"): "U",
(24, 111, "F", "L"): "U",
(24, 111, "F", "R"): "U",
(24, 111, "F", "U"): "U",
(24, 111, "L", "B"): "D",
(24, 111, "L", "D"): "U",
(24, 111, "L", "F"): "D",
(24, 111, "L", "U"): "U",
(24, 111, "R", "B"): "D",
(24, 111, "R", "D"): "U",
(24, 111, "R", "F"): "D",
(24, 111, "R", "U"): "U",
(24, 111, "U", "B"): "D",
(24, 111, "U", "F"): "D",
(24, 111, "U", "L"): "D",
(24, 111, "U", "R"): "D",
(25, 41, "B", "D"): "D",
(25, 41, "B", "L"): "D",
(25, 41, "B", "R"): "D",
(25, 41, "B", "U"): "D",
(25, 41, "D", "B"): "U",
(25, 41, "D", "F"): "U",
(25, 41, "D", "L"): "U",
(25, 41, "D", "R"): "U",
(25, 41, "F", "D"): "D",
(25, 41, "F", "L"): "D",
(25, 41, "F", "R"): "D",
(25, 41, "F", "U"): "D",
(25, 41, "L", "B"): "U",
(25, 41, "L", "D"): "D",
(25, 41, "L", "F"): "U",
(25, 41, "L", "U"): "D",
(25, 41, "R", "B"): "U",
(25, 41, "R", "D"): "D",
(25, 41, "R", "F"): "U",
(25, 41, "R", "U"): "D",
(25, 41, "U", "B"): "U",
(25, 41, "U", "F"): "U",
(25, 41, "U", "L"): "U",
(25, 41, "U", "R"): "U",
(30, 110, "B", "D"): "U",
(30, 110, "B", "L"): "U",
(30, 110, "B", "R"): "U",
(30, 110, "B", "U"): "U",
(30, 110, "D", "B"): "D",
(30, 110, "D", "F"): "D",
(30, 110, "D", "L"): "D",
(30, 110, "D", "R"): "D",
(30, 110, "F", "D"): "U",
(30, 110, "F", "L"): "U",
(30, 110, "F", "R"): "U",
(30, 110, "F", "U"): "U",
(30, 110, "L", "B"): "D",
(30, 110, "L", "D"): "U",
(30, 110, "L", "F"): "D",
(30, 110, "L", "U"): "U",
(30, 110, "R", "B"): "D",
(30, 110, "R", "D"): "U",
(30, 110, "R", "F"): "D",
(30, 110, "R", "U"): "U",
(30, 110, "U", "B"): "D",
(30, 110, "U", "F"): "D",
(30, 110, "U", "L"): "D",
(30, 110, "U", "R"): "D",
(32, 74, "B", "D"): "U",
(32, 74, "B", "L"): "U",
(32, 74, "B", "R"): "U",
(32, 74, "B", "U"): "U",
(32, 74, "D", "B"): "D",
(32, 74, "D", "F"): "D",
(32, 74, "D", "L"): "D",
(32, 74, "D", "R"): "D",
(32, 74, "F", "D"): "U",
(32, 74, "F", "L"): "U",
(32, 74, "F", "R"): "U",
(32, 74, "F", "U"): "U",
(32, 74, "L", "B"): "D",
(32, 74, "L", "D"): "U",
(32, 74, "L", "F"): "D",
(32, 74, "L", "U"): "U",
(32, 74, "R", "B"): "D",
(32, 74, "R", "D"): "U",
(32, 74, "R", "F"): "D",
(32, 74, "R", "U"): "U",
(32, 74, "U", "B"): "D",
(32, 74, "U", "F"): "D",
(32, 74, "U", "L"): "D",
(32, 74, "U", "R"): "D",
(33, 75, "B", "D"): "U",
(33, 75, "B", "L"): "U",
(33, 75, "B", "R"): "U",
(33, 75, "B", "U"): "U",
(33, 75, "D", "B"): "D",
(33, 75, "D", "F"): "D",
(33, 75, "D", "L"): "D",
(33, 75, "D", "R"): "D",
(33, 75, "F", "D"): "U",
(33, 75, "F", "L"): "U",
(33, 75, "F", "R"): "U",
(33, 75, "F", "U"): "U",
(33, 75, "L", "B"): "D",
(33, 75, "L", "D"): "U",
(33, 75, "L", "F"): "D",
(33, 75, "L", "U"): "U",
(33, 75, "R", "B"): "D",
(33, 75, "R", "D"): "U",
(33, 75, "R", "F"): "D",
(33, 75, "R", "U"): "U",
(33, 75, "U", "B"): "D",
(33, 75, "U", "F"): "D",
(33, 75, "U", "L"): "D",
(33, 75, "U", "R"): "D",
(34, 76, "B", "D"): "D",
(34, 76, "B", "L"): "D",
(34, 76, "B", "R"): "D",
(34, 76, "B", "U"): "D",
(34, 76, "D", "B"): "U",
(34, 76, "D", "F"): "U",
(34, 76, "D", "L"): "U",
(34, 76, "D", "R"): "U",
(34, 76, "F", "D"): "D",
(34, 76, "F", "L"): "D",
(34, 76, "F", "R"): "D",
(34, 76, "F", "U"): "D",
(34, 76, "L", "B"): "U",
(34, 76, "L", "D"): "D",
(34, 76, "L", "F"): "U",
(34, 76, "L", "U"): "D",
(34, 76, "R", "B"): "U",
(34, 76, "R", "D"): "D",
(34, 76, "R", "F"): "U",
(34, 76, "R", "U"): "D",
(34, 76, "U", "B"): "U",
(34, 76, "U", "F"): "U",
(34, 76, "U", "L"): "U",
(34, 76, "U", "R"): "U",
(35, 77, "B", "D"): "D",
(35, 77, "B", "L"): "D",
(35, 77, "B", "R"): "D",
(35, 77, "B", "U"): "D",
(35, 77, "D", "B"): "U",
(35, 77, "D", "F"): "U",
(35, 77, "D", "L"): "U",
(35, 77, "D", "R"): "U",
(35, 77, "F", "D"): "D",
(35, 77, "F", "L"): "D",
(35, 77, "F", "R"): "D",
(35, 77, "F", "U"): "D",
(35, 77, "L", "B"): "U",
(35, 77, "L", "D"): "D",
(35, 77, "L", "F"): "U",
(35, 77, "L", "U"): "D",
(35, 77, "R", "B"): "U",
(35, 77, "R", "D"): "D",
(35, 77, "R", "F"): "U",
(35, 77, "R", "U"): "D",
(35, 77, "U", "B"): "U",
(35, 77, "U", "F"): "U",
(35, 77, "U", "L"): "U",
(35, 77, "U", "R"): "U",
(38, 7, "B", "D"): "D",
(38, 7, "B", "L"): "D",
(38, 7, "B", "R"): "D",
(38, 7, "B", "U"): "D",
(38, 7, "D", "B"): "U",
(38, 7, "D", "F"): "U",
(38, 7, "D", "L"): "U",
(38, 7, "D", "R"): "U",
(38, 7, "F", "D"): "D",
(38, 7, "F", "L"): "D",
(38, 7, "F", "R"): "D",
(38, 7, "F", "U"): "D",
(38, 7, "L", "B"): "U",
(38, 7, "L", "D"): "D",
(38, 7, "L", "F"): "U",
(38, 7, "L", "U"): "D",
(38, 7, "R", "B"): "U",
(38, 7, "R", "D"): "D",
(38, 7, "R", "F"): "U",
(38, 7, "R", "U"): "D",
(38, 7, "U", "B"): "U",
(38, 7, "U", "F"): "U",
(38, 7, "U", "L"): "U",
(38, 7, "U", "R"): "U",
(39, 13, "B", "D"): "D",
(39, 13, "B", "L"): "D",
(39, 13, "B", "R"): "D",
(39, 13, "B", "U"): "D",
(39, 13, "D", "B"): "U",
(39, 13, "D", "F"): "U",
(39, 13, "D", "L"): "U",
(39, 13, "D", "R"): "U",
(39, 13, "F", "D"): "D",
(39, 13, "F", "L"): "D",
(39, 13, "F", "R"): "D",
(39, 13, "F", "U"): "D",
(39, 13, "L", "B"): "U",
(39, 13, "L", "D"): "D",
(39, 13, "L", "F"): "U",
(39, 13, "L", "U"): "D",
(39, 13, "R", "B"): "U",
(39, 13, "R", "D"): "D",
(39, 13, "R", "F"): "U",
(39, 13, "R", "U"): "D",
(39, 13, "U", "B"): "U",
(39, 13, "U", "F"): "U",
(39, 13, "U", "L"): "U",
(39, 13, "U", "R"): "U",
(40, 19, "B", "D"): "U",
(40, 19, "B", "L"): "U",
(40, 19, "B", "R"): "U",
(40, 19, "B", "U"): "U",
(40, 19, "D", "B"): "D",
(40, 19, "D", "F"): "D",
(40, 19, "D", "L"): "D",
(40, 19, "D", "R"): "D",
(40, 19, "F", "D"): "U",
(40, 19, "F", "L"): "U",
(40, 19, "F", "R"): "U",
(40, 19, "F", "U"): "U",
(40, 19, "L", "B"): "D",
(40, 19, "L", "D"): "U",
(40, 19, "L", "F"): "D",
(40, 19, "L", "U"): "U",
(40, 19, "R", "B"): "D",
(40, 19, "R", "D"): "U",
(40, 19, "R", "F"): "D",
(40, 19, "R", "U"): "U",
(40, 19, "U", "B"): "D",
(40, 19, "U", "F"): "D",
(40, 19, "U", "L"): "D",
(40, 19, "U", "R"): "D",
(41, 25, "B", "D"): "U",
(41, 25, "B", "L"): "U",
(41, 25, "B", "R"): "U",
(41, 25, "B", "U"): "U",
(41, 25, "D", "B"): "D",
(41, 25, "D", "F"): "D",
(41, 25, "D", "L"): "D",
(41, 25, "D", "R"): "D",
(41, 25, "F", "D"): "U",
(41, 25, "F", "L"): "U",
(41, 25, "F", "R"): "U",
(41, 25, "F", "U"): "U",
(41, 25, "L", "B"): "D",
(41, 25, "L", "D"): "U",
(41, 25, "L", "F"): "D",
(41, 25, "L", "U"): "U",
(41, 25, "R", "B"): "D",
(41, 25, "R", "D"): "U",
(41, 25, "R", "F"): "D",
(41, 25, "R", "U"): "U",
(41, 25, "U", "B"): "D",
(41, 25, "U", "F"): "D",
(41, 25, "U", "L"): "D",
(41, 25, "U", "R"): "D",
(43, 156, "B", "D"): "U",
(43, 156, "B", "L"): "U",
(43, 156, "B", "R"): "U",
(43, 156, "B", "U"): "U",
(43, 156, "D", "B"): "D",
(43, 156, "D", "F"): "D",
(43, 156, "D", "L"): "D",
(43, 156, "D", "R"): "D",
(43, 156, "F", "D"): "U",
(43, 156, "F", "L"): "U",
(43, 156, "F", "R"): "U",
(43, 156, "F", "U"): "U",
(43, 156, "L", "B"): "D",
(43, 156, "L", "D"): "U",
(43, 156, "L", "F"): "D",
(43, 156, "L", "U"): "U",
(43, 156, "R", "B"): "D",
(43, 156, "R", "D"): "U",
(43, 156, "R", "F"): "D",
(43, 156, "R", "U"): "U",
(43, 156, "U", "B"): "D",
(43, 156, "U", "F"): "D",
(43, 156, "U", "L"): "D",
(43, 156, "U", "R"): "D",
(48, 79, "B", "D"): "D",
(48, 79, "B", "L"): "D",
(48, 79, "B", "R"): "D",
(48, 79, "B", "U"): "D",
(48, 79, "D", "B"): "U",
(48, 79, "D", "F"): "U",
(48, 79, "D", "L"): "U",
(48, 79, "D", "R"): "U",
(48, 79, "F", "D"): "D",
(48, 79, "F", "L"): "D",
(48, 79, "F", "R"): "D",
(48, 79, "F", "U"): "D",
(48, 79, "L", "B"): "U",
(48, 79, "L", "D"): "D",
(48, 79, "L", "F"): "U",
(48, 79, "L", "U"): "D",
(48, 79, "R", "B"): "U",
(48, 79, "R", "D"): "D",
(48, 79, "R", "F"): "U",
(48, 79, "R", "U"): "D",
(48, 79, "U", "B"): "U",
(48, 79, "U", "F"): "U",
(48, 79, "U", "L"): "U",
(48, 79, "U", "R"): "U",
(49, 162, "B", "D"): "U",
(49, 162, "B", "L"): "U",
(49, 162, "B", "R"): "U",
(49, 162, "B", "U"): "U",
(49, 162, "D", "B"): "D",
(49, 162, "D", "F"): "D",
(49, 162, "D", "L"): "D",
(49, 162, "D", "R"): "D",
(49, 162, "F", "D"): "U",
(49, 162, "F", "L"): "U",
(49, 162, "F", "R"): "U",
(49, 162, "F", "U"): "U",
(49, 162, "L", "B"): "D",
(49, 162, "L", "D"): "U",
(49, 162, "L", "F"): "D",
(49, 162, "L", "U"): "U",
(49, 162, "R", "B"): "D",
(49, 162, "R", "D"): "U",
(49, 162, "R", "F"): "D",
(49, 162, "R", "U"): "U",
(49, 162, "U", "B"): "D",
(49, 162, "U", "F"): "D",
(49, 162, "U", "L"): "D",
(49, 162, "U", "R"): "D",
(54, 85, "B", "D"): "D",
(54, 85, "B", "L"): "D",
(54, 85, "B", "R"): "D",
(54, 85, "B", "U"): "D",
(54, 85, "D", "B"): "U",
(54, 85, "D", "F"): "U",
(54, 85, "D", "L"): "U",
(54, 85, "D", "R"): "U",
(54, 85, "F", "D"): "D",
(54, 85, "F", "L"): "D",
(54, 85, "F", "R"): "D",
(54, 85, "F", "U"): "D",
(54, 85, "L", "B"): "U",
(54, 85, "L", "D"): "D",
(54, 85, "L", "F"): "U",
(54, 85, "L", "U"): "D",
(54, 85, "R", "B"): "U",
(54, 85, "R", "D"): "D",
(54, 85, "R", "F"): "U",
(54, 85, "R", "U"): "D",
(54, 85, "U", "B"): "U",
(54, 85, "U", "F"): "U",
(54, 85, "U", "L"): "U",
(54, 85, "U", "R"): "U",
(55, 168, "B", "D"): "D",
(55, 168, "B", "L"): "D",
(55, 168, "B", "R"): "D",
(55, 168, "B", "U"): "D",
(55, 168, "D", "B"): "U",
(55, 168, "D", "F"): "U",
(55, 168, "D", "L"): "U",
(55, 168, "D", "R"): "U",
(55, 168, "F", "D"): "D",
(55, 168, "F", "L"): "D",
(55, 168, "F", "R"): "D",
(55, 168, "F", "U"): "D",
(55, 168, "L", "B"): "U",
(55, 168, "L", "D"): "D",
(55, 168, "L", "F"): "U",
(55, 168, "L", "U"): "D",
(55, 168, "R", "B"): "U",
(55, 168, "R", "D"): "D",
(55, 168, "R", "F"): "U",
(55, 168, "R", "U"): "D",
(55, 168, "U", "B"): "U",
(55, 168, "U", "F"): "U",
(55, 168, "U", "L"): "U",
(55, 168, "U", "R"): "U",
(60, 91, "B", "D"): "U",
(60, 91, "B", "L"): "U",
(60, 91, "B", "R"): "U",
(60, 91, "B", "U"): "U",
(60, 91, "D", "B"): "D",
(60, 91, "D", "F"): "D",
(60, 91, "D", "L"): "D",
(60, 91, "D", "R"): "D",
(60, 91, "F", "D"): "U",
(60, 91, "F", "L"): "U",
(60, 91, "F", "R"): "U",
(60, 91, "F", "U"): "U",
(60, 91, "L", "B"): "D",
(60, 91, "L", "D"): "U",
(60, 91, "L", "F"): "D",
(60, 91, "L", "U"): "U",
(60, 91, "R", "B"): "D",
(60, 91, "R", "D"): "U",
(60, 91, "R", "F"): "D",
(60, 91, "R", "U"): "U",
(60, 91, "U", "B"): "D",
(60, 91, "U", "F"): "D",
(60, 91, "U", "L"): "D",
(60, 91, "U", "R"): "D",
(61, 174, "B", "D"): "D",
(61, 174, "B", "L"): "D",
(61, 174, "B", "R"): "D",
(61, 174, "B", "U"): "D",
(61, 174, "D", "B"): "U",
(61, 174, "D", "F"): "U",
(61, 174, "D", "L"): "U",
(61, 174, "D", "R"): "U",
(61, 174, "F", "D"): "D",
(61, 174, "F", "L"): "D",
(61, 174, "F", "R"): "D",
(61, 174, "F", "U"): "D",
(61, 174, "L", "B"): "U",
(61, 174, "L", "D"): "D",
(61, 174, "L", "F"): "U",
(61, 174, "L", "U"): "D",
(61, 174, "R", "B"): "U",
(61, 174, "R", "D"): "D",
(61, 174, "R", "F"): "U",
(61, 174, "R", "U"): "D",
(61, 174, "U", "B"): "U",
(61, 174, "U", "F"): "U",
(61, 174, "U", "L"): "U",
(61, 174, "U", "R"): "U",
(66, 97, "B", "D"): "U",
(66, 97, "B", "L"): "U",
(66, 97, "B", "R"): "U",
(66, 97, "B", "U"): "U",
(66, 97, "D", "B"): "D",
(66, 97, "D", "F"): "D",
(66, 97, "D", "L"): "D",
(66, 97, "D", "R"): "D",
(66, 97, "F", "D"): "U",
(66, 97, "F", "L"): "U",
(66, 97, "F", "R"): "U",
(66, 97, "F", "U"): "U",
(66, 97, "L", "B"): "D",
(66, 97, "L", "D"): "U",
(66, 97, "L", "F"): "D",
(66, 97, "L", "U"): "U",
(66, 97, "R", "B"): "D",
(66, 97, "R", "D"): "U",
(66, 97, "R", "F"): "D",
(66, 97, "R", "U"): "U",
(66, 97, "U", "B"): "D",
(66, 97, "U", "F"): "D",
(66, 97, "U", "L"): "D",
(66, 97, "U", "R"): "D",
(68, 205, "B", "D"): "U",
(68, 205, "B", "L"): "U",
(68, 205, "B", "R"): "U",
(68, 205, "B", "U"): "U",
(68, 205, "D", "B"): "D",
(68, 205, "D", "F"): "D",
(68, 205, "D", "L"): "D",
(68, 205, "D", "R"): "D",
(68, 205, "F", "D"): "U",
(68, 205, "F", "L"): "U",
(68, 205, "F", "R"): "U",
(68, 205, "F", "U"): "U",
(68, 205, "L", "B"): "D",
(68, 205, "L", "D"): "U",
(68, 205, "L", "F"): "D",
(68, 205, "L", "U"): "U",
(68, 205, "R", "B"): "D",
(68, 205, "R", "D"): "U",
(68, 205, "R", "F"): "D",
(68, 205, "R", "U"): "U",
(68, 205, "U", "B"): "D",
(68, 205, "U", "F"): "D",
(68, 205, "U", "L"): "D",
(68, 205, "U", "R"): "D",
(69, 199, "B", "D"): "U",
(69, 199, "B", "L"): "U",
(69, 199, "B", "R"): "U",
(69, 199, "B", "U"): "U",
(69, 199, "D", "B"): "D",
(69, 199, "D", "F"): "D",
(69, 199, "D", "L"): "D",
(69, 199, "D", "R"): "D",
(69, 199, "F", "D"): "U",
(69, 199, "F", "L"): "U",
(69, 199, "F", "R"): "U",
(69, 199, "F", "U"): "U",
(69, 199, "L", "B"): "D",
(69, 199, "L", "D"): "U",
(69, 199, "L", "F"): "D",
(69, 199, "L", "U"): "U",
(69, 199, "R", "B"): "D",
(69, 199, "R", "D"): "U",
(69, 199, "R", "F"): "D",
(69, 199, "R", "U"): "U",
(69, 199, "U", "B"): "D",
(69, 199, "U", "F"): "D",
(69, 199, "U", "L"): "D",
(69, 199, "U", "R"): "D",
(70, 193, "B", "D"): "D",
(70, 193, "B", "L"): "D",
(70, 193, "B", "R"): "D",
(70, 193, "B", "U"): "D",
(70, 193, "D", "B"): "U",
(70, 193, "D", "F"): "U",
(70, 193, "D", "L"): "U",
(70, 193, "D", "R"): "U",
(70, 193, "F", "D"): "D",
(70, 193, "F", "L"): "D",
(70, 193, "F", "R"): "D",
(70, 193, "F", "U"): "D",
(70, 193, "L", "B"): "U",
(70, 193, "L", "D"): "D",
(70, 193, "L", "F"): "U",
(70, 193, "L", "U"): "D",
(70, 193, "R", "B"): "U",
(70, 193, "R", "D"): "D",
(70, 193, "R", "F"): "U",
(70, 193, "R", "U"): "D",
(70, 193, "U", "B"): "U",
(70, 193, "U", "F"): "U",
(70, 193, "U", "L"): "U",
(70, 193, "U", "R"): "U",
(71, 187, "B", "D"): "D",
(71, 187, "B", "L"): "D",
(71, 187, "B", "R"): "D",
(71, 187, "B", "U"): "D",
(71, 187, "D", "B"): "U",
(71, 187, "D", "F"): "U",
(71, 187, "D", "L"): "U",
(71, 187, "D", "R"): "U",
(71, 187, "F", "D"): "D",
(71, 187, "F", "L"): "D",
(71, 187, "F", "R"): "D",
(71, 187, "F", "U"): "D",
(71, 187, "L", "B"): "U",
(71, 187, "L", "D"): "D",
(71, 187, "L", "F"): "U",
(71, 187, "L", "U"): "D",
(71, 187, "R", "B"): "U",
(71, 187, "R", "D"): "D",
(71, 187, "R", "F"): "U",
(71, 187, "R", "U"): "D",
(71, 187, "U", "B"): "U",
(71, 187, "U", "F"): "U",
(71, 187, "U", "L"): "U",
(71, 187, "U", "R"): "U",
(74, 32, "B", "D"): "D",
(74, 32, "B", "L"): "D",
(74, 32, "B", "R"): "D",
(74, 32, "B", "U"): "D",
(74, 32, "D", "B"): "U",
(74, 32, "D", "F"): "U",
(74, 32, "D", "L"): "U",
(74, 32, "D", "R"): "U",
(74, 32, "F", "D"): "D",
(74, 32, "F", "L"): "D",
(74, 32, "F", "R"): "D",
(74, 32, "F", "U"): "D",
(74, 32, "L", "B"): "U",
(74, 32, "L", "D"): "D",
(74, 32, "L", "F"): "U",
(74, 32, "L", "U"): "D",
(74, 32, "R", "B"): "U",
(74, 32, "R", "D"): "D",
(74, 32, "R", "F"): "U",
(74, 32, "R", "U"): "D",
(74, 32, "U", "B"): "U",
(74, 32, "U", "F"): "U",
(74, 32, "U", "L"): "U",
(74, 32, "U", "R"): "U",
(75, 33, "B", "D"): "D",
(75, 33, "B", "L"): "D",
(75, 33, "B", "R"): "D",
(75, 33, "B", "U"): "D",
(75, 33, "D", "B"): "U",
(75, 33, "D", "F"): "U",
(75, 33, "D", "L"): "U",
(75, 33, "D", "R"): "U",
(75, 33, "F", "D"): "D",
(75, 33, "F", "L"): "D",
(75, 33, "F", "R"): "D",
(75, 33, "F", "U"): "D",
(75, 33, "L", "B"): "U",
(75, 33, "L", "D"): "D",
(75, 33, "L", "F"): "U",
(75, 33, "L", "U"): "D",
(75, 33, "R", "B"): "U",
(75, 33, "R", "D"): "D",
(75, 33, "R", "F"): "U",
(75, 33, "R", "U"): "D",
(75, 33, "U", "B"): "U",
(75, 33, "U", "F"): "U",
(75, 33, "U", "L"): "U",
(75, 33, "U", "R"): "U",
(76, 34, "B", "D"): "U",
(76, 34, "B", "L"): "U",
(76, 34, "B", "R"): "U",
(76, 34, "B", "U"): "U",
(76, 34, "D", "B"): "D",
(76, 34, "D", "F"): "D",
(76, 34, "D", "L"): "D",
(76, 34, "D", "R"): "D",
(76, 34, "F", "D"): "U",
(76, 34, "F", "L"): "U",
(76, 34, "F", "R"): "U",
(76, 34, "F", "U"): "U",
(76, 34, "L", "B"): "D",
(76, 34, "L", "D"): "U",
(76, 34, "L", "F"): "D",
(76, 34, "L", "U"): "U",
(76, 34, "R", "B"): "D",
(76, 34, "R", "D"): "U",
(76, 34, "R", "F"): "D",
(76, 34, "R", "U"): "U",
(76, 34, "U", "B"): "D",
(76, 34, "U", "F"): "D",
(76, 34, "U", "L"): "D",
(76, 34, "U", "R"): "D",
(77, 35, "B", "D"): "U",
(77, 35, "B", "L"): "U",
(77, 35, "B", "R"): "U",
(77, 35, "B", "U"): "U",
(77, 35, "D", "B"): "D",
(77, 35, "D", "F"): "D",
(77, 35, "D", "L"): "D",
(77, 35, "D", "R"): "D",
(77, 35, "F", "D"): "U",
(77, 35, "F", "L"): "U",
(77, 35, "F", "R"): "U",
(77, 35, "F", "U"): "U",
(77, 35, "L", "B"): "D",
(77, 35, "L", "D"): "U",
(77, 35, "L", "F"): "D",
(77, 35, "L", "U"): "U",
(77, 35, "R", "B"): "D",
(77, 35, "R", "D"): "U",
(77, 35, "R", "F"): "D",
(77, 35, "R", "U"): "U",
(77, 35, "U", "B"): "D",
(77, 35, "U", "F"): "D",
(77, 35, "U", "L"): "D",
(77, 35, "U", "R"): "D",
(79, 48, "B", "D"): "U",
(79, 48, "B", "L"): "U",
(79, 48, "B", "R"): "U",
(79, 48, "B", "U"): "U",
(79, 48, "D", "B"): "D",
(79, 48, "D", "F"): "D",
(79, 48, "D", "L"): "D",
(79, 48, "D", "R"): "D",
(79, 48, "F", "D"): "U",
(79, 48, "F", "L"): "U",
(79, 48, "F", "R"): "U",
(79, 48, "F", "U"): "U",
(79, 48, "L", "B"): "D",
(79, 48, "L", "D"): "U",
(79, 48, "L", "F"): "D",
(79, 48, "L", "U"): "U",
(79, 48, "R", "B"): "D",
(79, 48, "R", "D"): "U",
(79, 48, "R", "F"): "D",
(79, 48, "R", "U"): "U",
(79, 48, "U", "B"): "D",
(79, 48, "U", "F"): "D",
(79, 48, "U", "L"): "D",
(79, 48, "U", "R"): "D",
(84, 115, "B", "D"): "D",
(84, 115, "B", "L"): "D",
(84, 115, "B", "R"): "D",
(84, 115, "B", "U"): "D",
(84, 115, "D", "B"): "U",
(84, 115, "D", "F"): "U",
(84, 115, "D", "L"): "U",
(84, 115, "D", "R"): "U",
(84, 115, "F", "D"): "D",
(84, 115, "F", "L"): "D",
(84, 115, "F", "R"): "D",
(84, 115, "F", "U"): "D",
(84, 115, "L", "B"): "U",
(84, 115, "L", "D"): "D",
(84, 115, "L", "F"): "U",
(84, 115, "L", "U"): "D",
(84, 115, "R", "B"): "U",
(84, 115, "R", "D"): "D",
(84, 115, "R", "F"): "U",
(84, 115, "R", "U"): "D",
(84, 115, "U", "B"): "U",
(84, 115, "U", "F"): "U",
(84, 115, "U", "L"): "U",
(84, 115, "U", "R"): "U",
(85, 54, "B", "D"): "U",
(85, 54, "B", "L"): "U",
(85, 54, "B", "R"): "U",
(85, 54, "B", "U"): "U",
(85, 54, "D", "B"): "D",
(85, 54, "D", "F"): "D",
(85, 54, "D", "L"): "D",
(85, 54, "D", "R"): "D",
(85, 54, "F", "D"): "U",
(85, 54, "F", "L"): "U",
(85, 54, "F", "R"): "U",
(85, 54, "F", "U"): "U",
(85, 54, "L", "B"): "D",
(85, 54, "L", "D"): "U",
(85, 54, "L", "F"): "D",
(85, 54, "L", "U"): "U",
(85, 54, "R", "B"): "D",
(85, 54, "R", "D"): "U",
(85, 54, "R", "F"): "D",
(85, 54, "R", "U"): "U",
(85, 54, "U", "B"): "D",
(85, 54, "U", "F"): "D",
(85, 54, "U", "L"): "D",
(85, 54, "U", "R"): "D",
(90, 121, "B", "D"): "D",
(90, 121, "B", "L"): "D",
(90, 121, "B", "R"): "D",
(90, 121, "B", "U"): "D",
(90, 121, "D", "B"): "U",
(90, 121, "D", "F"): "U",
(90, 121, "D", "L"): "U",
(90, 121, "D", "R"): "U",
(90, 121, "F", "D"): "D",
(90, 121, "F", "L"): "D",
(90, 121, "F", "R"): "D",
(90, 121, "F", "U"): "D",
(90, 121, "L", "B"): "U",
(90, 121, "L", "D"): "D",
(90, 121, "L", "F"): "U",
(90, 121, "L", "U"): "D",
(90, 121, "R", "B"): "U",
(90, 121, "R", "D"): "D",
(90, 121, "R", "F"): "U",
(90, 121, "R", "U"): "D",
(90, 121, "U", "B"): "U",
(90, 121, "U", "F"): "U",
(90, 121, "U", "L"): "U",
(90, 121, "U", "R"): "U",
(91, 60, "B", "D"): "D",
(91, 60, "B", "L"): "D",
(91, 60, "B", "R"): "D",
(91, 60, "B", "U"): "D",
(91, 60, "D", "B"): "U",
(91, 60, "D", "F"): "U",
(91, 60, "D", "L"): "U",
(91, 60, "D", "R"): "U",
(91, 60, "F", "D"): "D",
(91, 60, "F", "L"): "D",
(91, 60, "F", "R"): "D",
(91, 60, "F", "U"): "D",
(91, 60, "L", "B"): "U",
(91, 60, "L", "D"): "D",
(91, 60, "L", "F"): "U",
(91, 60, "L", "U"): "D",
(91, 60, "R", "B"): "U",
(91, 60, "R", "D"): "D",
(91, 60, "R", "F"): "U",
(91, 60, "R", "U"): "D",
(91, 60, "U", "B"): "U",
(91, 60, "U", "F"): "U",
(91, 60, "U", "L"): "U",
(91, 60, "U", "R"): "U",
(96, 127, "B", "D"): "U",
(96, 127, "B", "L"): "U",
(96, 127, "B", "R"): "U",
(96, 127, "B", "U"): "U",
(96, 127, "D", "B"): "D",
(96, 127, "D", "F"): "D",
(96, 127, "D", "L"): "D",
(96, 127, "D", "R"): "D",
(96, 127, "F", "D"): "U",
(96, 127, "F", "L"): "U",
(96, 127, "F", "R"): "U",
(96, 127, "F", "U"): "U",
(96, 127, "L", "B"): "D",
(96, 127, "L", "D"): "U",
(96, 127, "L", "F"): "D",
(96, 127, "L", "U"): "U",
(96, 127, "R", "B"): "D",
(96, 127, "R", "D"): "U",
(96, 127, "R", "F"): "D",
(96, 127, "R", "U"): "U",
(96, 127, "U", "B"): "D",
(96, 127, "U", "F"): "D",
(96, 127, "U", "L"): "D",
(96, 127, "U", "R"): "D",
(97, 66, "B", "D"): "D",
(97, 66, "B", "L"): "D",
(97, 66, "B", "R"): "D",
(97, 66, "B", "U"): "D",
(97, 66, "D", "B"): "U",
(97, 66, "D", "F"): "U",
(97, 66, "D", "L"): "U",
(97, 66, "D", "R"): "U",
(97, 66, "F", "D"): "D",
(97, 66, "F", "L"): "D",
(97, 66, "F", "R"): "D",
(97, 66, "F", "U"): "D",
(97, 66, "L", "B"): "U",
(97, 66, "L", "D"): "D",
(97, 66, "L", "F"): "U",
(97, 66, "L", "U"): "D",
(97, 66, "R", "B"): "U",
(97, 66, "R", "D"): "D",
(97, 66, "R", "F"): "U",
(97, 66, "R", "U"): "D",
(97, 66, "U", "B"): "U",
(97, 66, "U", "F"): "U",
(97, 66, "U", "L"): "U",
(97, 66, "U", "R"): "U",
(102, 133, "B", "D"): "U",
(102, 133, "B", "L"): "U",
(102, 133, "B", "R"): "U",
(102, 133, "B", "U"): "U",
(102, 133, "D", "B"): "D",
(102, 133, "D", "F"): "D",
(102, 133, "D", "L"): "D",
(102, 133, "D", "R"): "D",
(102, 133, "F", "D"): "U",
(102, 133, "F", "L"): "U",
(102, 133, "F", "R"): "U",
(102, 133, "F", "U"): "U",
(102, 133, "L", "B"): "D",
(102, 133, "L", "D"): "U",
(102, 133, "L", "F"): "D",
(102, 133, "L", "U"): "U",
(102, 133, "R", "B"): "D",
(102, 133, "R", "D"): "U",
(102, 133, "R", "F"): "D",
(102, 133, "R", "U"): "U",
(102, 133, "U", "B"): "D",
(102, 133, "U", "F"): "D",
(102, 133, "U", "L"): "D",
(102, 133, "U", "R"): "D",
(104, 182, "B", "D"): "U",
(104, 182, "B", "L"): "U",
(104, 182, "B", "R"): "U",
(104, 182, "B", "U"): "U",
(104, 182, "D", "B"): "D",
(104, 182, "D", "F"): "D",
(104, 182, "D", "L"): "D",
(104, 182, "D", "R"): "D",
(104, 182, "F", "D"): "U",
(104, 182, "F", "L"): "U",
(104, 182, "F", "R"): "U",
(104, 182, "F", "U"): "U",
(104, 182, "L", "B"): "D",
(104, 182, "L", "D"): "U",
(104, 182, "L", "F"): "D",
(104, 182, "L", "U"): "U",
(104, 182, "R", "B"): "D",
(104, 182, "R", "D"): "U",
(104, 182, "R", "F"): "D",
(104, 182, "R", "U"): "U",
(104, 182, "U", "B"): "D",
(104, 182, "U", "F"): "D",
(104, 182, "U", "L"): "D",
(104, 182, "U", "R"): "D",
(105, 183, "B", "D"): "U",
(105, 183, "B", "L"): "U",
(105, 183, "B", "R"): "U",
(105, 183, "B", "U"): "U",
(105, 183, "D", "B"): "D",
(105, 183, "D", "F"): "D",
(105, 183, "D", "L"): "D",
(105, 183, "D", "R"): "D",
(105, 183, "F", "D"): "U",
(105, 183, "F", "L"): "U",
(105, 183, "F", "R"): "U",
(105, 183, "F", "U"): "U",
(105, 183, "L", "B"): "D",
(105, 183, "L", "D"): "U",
(105, 183, "L", "F"): "D",
(105, 183, "L", "U"): "U",
(105, 183, "R", "B"): "D",
(105, 183, "R", "D"): "U",
(105, 183, "R", "F"): "D",
(105, 183, "R", "U"): "U",
(105, 183, "U", "B"): "D",
(105, 183, "U", "F"): "D",
(105, 183, "U", "L"): "D",
(105, 183, "U", "R"): "D",
(106, 184, "B", "D"): "D",
(106, 184, "B", "L"): "D",
(106, 184, "B", "R"): "D",
(106, 184, "B", "U"): "D",
(106, 184, "D", "B"): "U",
(106, 184, "D", "F"): "U",
(106, 184, "D", "L"): "U",
(106, 184, "D", "R"): "U",
(106, 184, "F", "D"): "D",
(106, 184, "F", "L"): "D",
(106, 184, "F", "R"): "D",
(106, 184, "F", "U"): "D",
(106, 184, "L", "B"): "U",
(106, 184, "L", "D"): "D",
(106, 184, "L", "F"): "U",
(106, 184, "L", "U"): "D",
(106, 184, "R", "B"): "U",
(106, 184, "R", "D"): "D",
(106, 184, "R", "F"): "U",
(106, 184, "R", "U"): "D",
(106, 184, "U", "B"): "U",
(106, 184, "U", "F"): "U",
(106, 184, "U", "L"): "U",
(106, 184, "U", "R"): "U",
(107, 185, "B", "D"): "D",
(107, 185, "B", "L"): "D",
(107, 185, "B", "R"): "D",
(107, 185, "B", "U"): "D",
(107, 185, "D", "B"): "U",
(107, 185, "D", "F"): "U",
(107, 185, "D", "L"): "U",
(107, 185, "D", "R"): "U",
(107, 185, "F", "D"): "D",
(107, 185, "F", "L"): "D",
(107, 185, "F", "R"): "D",
(107, 185, "F", "U"): "D",
(107, 185, "L", "B"): "U",
(107, 185, "L", "D"): "D",
(107, 185, "L", "F"): "U",
(107, 185, "L", "U"): "D",
(107, 185, "R", "B"): "U",
(107, 185, "R", "D"): "D",
(107, 185, "R", "F"): "U",
(107, 185, "R", "U"): "D",
(107, 185, "U", "B"): "U",
(107, 185, "U", "F"): "U",
(107, 185, "U", "L"): "U",
(107, 185, "U", "R"): "U",
(110, 30, "B", "D"): "D",
(110, 30, "B", "L"): "D",
(110, 30, "B", "R"): "D",
(110, 30, "B", "U"): "D",
(110, 30, "D", "B"): "U",
(110, 30, "D", "F"): "U",
(110, 30, "D", "L"): "U",
(110, 30, "D", "R"): "U",
(110, 30, "F", "D"): "D",
(110, 30, "F", "L"): "D",
(110, 30, "F", "R"): "D",
(110, 30, "F", "U"): "D",
(110, 30, "L", "B"): "U",
(110, 30, "L", "D"): "D",
(110, 30, "L", "F"): "U",
(110, 30, "L", "U"): "D",
(110, 30, "R", "B"): "U",
(110, 30, "R", "D"): "D",
(110, 30, "R", "F"): "U",
(110, 30, "R", "U"): "D",
(110, 30, "U", "B"): "U",
(110, 30, "U", "F"): "U",
(110, 30, "U", "L"): "U",
(110, 30, "U", "R"): "U",
(111, 24, "B", "D"): "D",
(111, 24, "B", "L"): "D",
(111, 24, "B", "R"): "D",
(111, 24, "B", "U"): "D",
(111, 24, "D", "B"): "U",
(111, 24, "D", "F"): "U",
(111, 24, "D", "L"): "U",
(111, 24, "D", "R"): "U",
(111, 24, "F", "D"): "D",
(111, 24, "F", "L"): "D",
(111, 24, "F", "R"): "D",
(111, 24, "F", "U"): "D",
(111, 24, "L", "B"): "U",
(111, 24, "L", "D"): "D",
(111, 24, "L", "F"): "U",
(111, 24, "L", "U"): "D",
(111, 24, "R", "B"): "U",
(111, 24, "R", "D"): "D",
(111, 24, "R", "F"): "U",
(111, 24, "R", "U"): "D",
(111, 24, "U", "B"): "U",
(111, 24, "U", "F"): "U",
(111, 24, "U", "L"): "U",
(111, 24, "U", "R"): "U",
(112, 18, "B", "D"): "U",
(112, 18, "B", "L"): "U",
(112, 18, "B", "R"): "U",
(112, 18, "B", "U"): "U",
(112, 18, "D", "B"): "D",
(112, 18, "D", "F"): "D",
(112, 18, "D", "L"): "D",
(112, 18, "D", "R"): "D",
(112, 18, "F", "D"): "U",
(112, 18, "F", "L"): "U",
(112, 18, "F", "R"): "U",
(112, 18, "F", "U"): "U",
(112, 18, "L", "B"): "D",
(112, 18, "L", "D"): "U",
(112, 18, "L", "F"): "D",
(112, 18, "L", "U"): "U",
(112, 18, "R", "B"): "D",
(112, 18, "R", "D"): "U",
(112, 18, "R", "F"): "D",
(112, 18, "R", "U"): "U",
(112, 18, "U", "B"): "D",
(112, 18, "U", "F"): "D",
(112, 18, "U", "L"): "D",
(112, 18, "U", "R"): "D",
(113, 12, "B", "D"): "U",
(113, 12, "B", "L"): "U",
(113, 12, "B", "R"): "U",
(113, 12, "B", "U"): "U",
(113, 12, "D", "B"): "D",
(113, 12, "D", "F"): "D",
(113, 12, "D", "L"): "D",
(113, 12, "D", "R"): "D",
(113, 12, "F", "D"): "U",
(113, 12, "F", "L"): "U",
(113, 12, "F", "R"): "U",
(113, 12, "F", "U"): "U",
(113, 12, "L", "B"): "D",
(113, 12, "L", "D"): "U",
(113, 12, "L", "F"): "D",
(113, 12, "L", "U"): "U",
(113, 12, "R", "B"): "D",
(113, 12, "R", "D"): "U",
(113, 12, "R", "F"): "D",
(113, 12, "R", "U"): "U",
(113, 12, "U", "B"): "D",
(113, 12, "U", "F"): "D",
(113, 12, "U", "L"): "D",
(113, 12, "U", "R"): "D",
(115, 84, "B", "D"): "U",
(115, 84, "B", "L"): "U",
(115, 84, "B", "R"): "U",
(115, 84, "B", "U"): "U",
(115, 84, "D", "B"): "D",
(115, 84, "D", "F"): "D",
(115, 84, "D", "L"): "D",
(115, 84, "D", "R"): "D",
(115, 84, "F", "D"): "U",
(115, 84, "F", "L"): "U",
(115, 84, "F", "R"): "U",
(115, 84, "F", "U"): "U",
(115, 84, "L", "B"): "D",
(115, 84, "L", "D"): "U",
(115, 84, "L", "F"): "D",
(115, 84, "L", "U"): "U",
(115, 84, "R", "B"): "D",
(115, 84, "R", "D"): "U",
(115, 84, "R", "F"): "D",
(115, 84, "R", "U"): "U",
(115, 84, "U", "B"): "D",
(115, 84, "U", "F"): "D",
(115, 84, "U", "L"): "D",
(115, 84, "U", "R"): "D",
(120, 151, "B", "D"): "D",
(120, 151, "B", "L"): "D",
(120, 151, "B", "R"): "D",
(120, 151, "B", "U"): "D",
(120, 151, "D", "B"): "U",
(120, 151, "D", "F"): "U",
(120, 151, "D", "L"): "U",
(120, 151, "D", "R"): "U",
(120, 151, "F", "D"): "D",
(120, 151, "F", "L"): "D",
(120, 151, "F", "R"): "D",
(120, 151, "F", "U"): "D",
(120, 151, "L", "B"): "U",
(120, 151, "L", "D"): "D",
(120, 151, "L", "F"): "U",
(120, 151, "L", "U"): "D",
(120, 151, "R", "B"): "U",
(120, 151, "R", "D"): "D",
(120, 151, "R", "F"): "U",
(120, 151, "R", "U"): "D",
(120, 151, "U", "B"): "U",
(120, 151, "U", "F"): "U",
(120, 151, "U", "L"): "U",
(120, 151, "U", "R"): "U",
(121, 90, "B", "D"): "U",
(121, 90, "B", "L"): "U",
(121, 90, "B", "R"): "U",
(121, 90, "B", "U"): "U",
(121, 90, "D", "B"): "D",
(121, 90, "D", "F"): "D",
(121, 90, "D", "L"): "D",
(121, 90, "D", "R"): "D",
(121, 90, "F", "D"): "U",
(121, 90, "F", "L"): "U",
(121, 90, "F", "R"): "U",
(121, 90, "F", "U"): "U",
(121, 90, "L", "B"): "D",
(121, 90, "L", "D"): "U",
(121, 90, "L", "F"): "D",
(121, 90, "L", "U"): "U",
(121, 90, "R", "B"): "D",
(121, 90, "R", "D"): "U",
(121, 90, "R", "F"): "D",
(121, 90, "R", "U"): "U",
(121, 90, "U", "B"): "D",
(121, 90, "U", "F"): "D",
(121, 90, "U", "L"): "D",
(121, 90, "U", "R"): "D",
(126, 157, "B", "D"): "D",
(126, 157, "B", "L"): "D",
(126, 157, "B", "R"): "D",
(126, 157, "B", "U"): "D",
(126, 157, "D", "B"): "U",
(126, 157, "D", "F"): "U",
(126, 157, "D", "L"): "U",
(126, 157, "D", "R"): "U",
(126, 157, "F", "D"): "D",
(126, 157, "F", "L"): "D",
(126, 157, "F", "R"): "D",
(126, 157, "F", "U"): "D",
(126, 157, "L", "B"): "U",
(126, 157, "L", "D"): "D",
(126, 157, "L", "F"): "U",
(126, 157, "L", "U"): "D",
(126, 157, "R", "B"): "U",
(126, 157, "R", "D"): "D",
(126, 157, "R", "F"): "U",
(126, 157, "R", "U"): "D",
(126, 157, "U", "B"): "U",
(126, 157, "U", "F"): "U",
(126, 157, "U", "L"): "U",
(126, 157, "U", "R"): "U",
(127, 96, "B", "D"): "D",
(127, 96, "B", "L"): "D",
(127, 96, "B", "R"): "D",
(127, 96, "B", "U"): "D",
(127, 96, "D", "B"): "U",
(127, 96, "D", "F"): "U",
(127, 96, "D", "L"): "U",
(127, 96, "D", "R"): "U",
(127, 96, "F", "D"): "D",
(127, 96, "F", "L"): "D",
(127, 96, "F", "R"): "D",
(127, 96, "F", "U"): "D",
(127, 96, "L", "B"): "U",
(127, 96, "L", "D"): "D",
(127, 96, "L", "F"): "U",
(127, 96, "L", "U"): "D",
(127, 96, "R", "B"): "U",
(127, 96, "R", "D"): "D",
(127, 96, "R", "F"): "U",
(127, 96, "R", "U"): "D",
(127, 96, "U", "B"): "U",
(127, 96, "U", "F"): "U",
(127, 96, "U", "L"): "U",
(127, 96, "U", "R"): "U",
(132, 163, "B", "D"): "U",
(132, 163, "B", "L"): "U",
(132, 163, "B", "R"): "U",
(132, 163, "B", "U"): "U",
(132, 163, "D", "B"): "D",
(132, 163, "D", "F"): "D",
(132, 163, "D", "L"): "D",
(132, 163, "D", "R"): "D",
(132, 163, "F", "D"): "U",
(132, 163, "F", "L"): "U",
(132, 163, "F", "R"): "U",
(132, 163, "F", "U"): "U",
(132, 163, "L", "B"): "D",
(132, 163, "L", "D"): "U",
(132, 163, "L", "F"): "D",
(132, 163, "L", "U"): "U",
(132, 163, "R", "B"): "D",
(132, 163, "R", "D"): "U",
(132, 163, "R", "F"): "D",
(132, 163, "R", "U"): "U",
(132, 163, "U", "B"): "D",
(132, 163, "U", "F"): "D",
(132, 163, "U", "L"): "D",
(132, 163, "U", "R"): "D",
(133, 102, "B", "D"): "D",
(133, 102, "B", "L"): "D",
(133, 102, "B", "R"): "D",
(133, 102, "B", "U"): "D",
(133, 102, "D", "B"): "U",
(133, 102, "D", "F"): "U",
(133, 102, "D", "L"): "U",
(133, 102, "D", "R"): "U",
(133, 102, "F", "D"): "D",
(133, 102, "F", "L"): "D",
(133, 102, "F", "R"): "D",
(133, 102, "F", "U"): "D",
(133, 102, "L", "B"): "U",
(133, 102, "L", "D"): "D",
(133, 102, "L", "F"): "U",
(133, 102, "L", "U"): "D",
(133, 102, "R", "B"): "U",
(133, 102, "R", "D"): "D",
(133, 102, "R", "F"): "U",
(133, 102, "R", "U"): "D",
(133, 102, "U", "B"): "U",
(133, 102, "U", "F"): "U",
(133, 102, "U", "L"): "U",
(133, 102, "U", "R"): "U",
(138, 169, "B", "D"): "U",
(138, 169, "B", "L"): "U",
(138, 169, "B", "R"): "U",
(138, 169, "B", "U"): "U",
(138, 169, "D", "B"): "D",
(138, 169, "D", "F"): "D",
(138, 169, "D", "L"): "D",
(138, 169, "D", "R"): "D",
(138, 169, "F", "D"): "U",
(138, 169, "F", "L"): "U",
(138, 169, "F", "R"): "U",
(138, 169, "F", "U"): "U",
(138, 169, "L", "B"): "D",
(138, 169, "L", "D"): "U",
(138, 169, "L", "F"): "D",
(138, 169, "L", "U"): "U",
(138, 169, "R", "B"): "D",
(138, 169, "R", "D"): "U",
(138, 169, "R", "F"): "D",
(138, 169, "R", "U"): "U",
(138, 169, "U", "B"): "D",
(138, 169, "U", "F"): "D",
(138, 169, "U", "L"): "D",
(138, 169, "U", "R"): "D",
(140, 192, "B", "D"): "U",
(140, 192, "B", "L"): "U",
(140, 192, "B", "R"): "U",
(140, 192, "B", "U"): "U",
(140, 192, "D", "B"): "D",
(140, 192, "D", "F"): "D",
(140, 192, "D", "L"): "D",
(140, 192, "D", "R"): "D",
(140, 192, "F", "D"): "U",
(140, 192, "F", "L"): "U",
(140, 192, "F", "R"): "U",
(140, 192, "F", "U"): "U",
(140, 192, "L", "B"): "D",
(140, 192, "L", "D"): "U",
(140, 192, "L", "F"): "D",
(140, 192, "L", "U"): "U",
(140, 192, "R", "B"): "D",
(140, 192, "R", "D"): "U",
(140, 192, "R", "F"): "D",
(140, 192, "R", "U"): "U",
(140, 192, "U", "B"): "D",
(140, 192, "U", "F"): "D",
(140, 192, "U", "L"): "D",
(140, 192, "U", "R"): "D",
(141, 198, "B", "D"): "U",
(141, 198, "B", "L"): "U",
(141, 198, "B", "R"): "U",
(141, 198, "B", "U"): "U",
(141, 198, "D", "B"): "D",
(141, 198, "D", "F"): "D",
(141, 198, "D", "L"): "D",
(141, 198, "D", "R"): "D",
(141, 198, "F", "D"): "U",
(141, 198, "F", "L"): "U",
(141, 198, "F", "R"): "U",
(141, 198, "F", "U"): "U",
(141, 198, "L", "B"): "D",
(141, 198, "L", "D"): "U",
(141, 198, "L", "F"): "D",
(141, 198, "L", "U"): "U",
(141, 198, "R", "B"): "D",
(141, 198, "R", "D"): "U",
(141, 198, "R", "F"): "D",
(141, 198, "R", "U"): "U",
(141, 198, "U", "B"): "D",
(141, 198, "U", "F"): "D",
(141, 198, "U", "L"): "D",
(141, 198, "U", "R"): "D",
(142, 204, "B", "D"): "D",
(142, 204, "B", "L"): "D",
(142, 204, "B", "R"): "D",
(142, 204, "B", "U"): "D",
(142, 204, "D", "B"): "U",
(142, 204, "D", "F"): "U",
(142, 204, "D", "L"): "U",
(142, 204, "D", "R"): "U",
(142, 204, "F", "D"): "D",
(142, 204, "F", "L"): "D",
(142, 204, "F", "R"): "D",
(142, 204, "F", "U"): "D",
(142, 204, "L", "B"): "U",
(142, 204, "L", "D"): "D",
(142, 204, "L", "F"): "U",
(142, 204, "L", "U"): "D",
(142, 204, "R", "B"): "U",
(142, 204, "R", "D"): "D",
(142, 204, "R", "F"): "U",
(142, 204, "R", "U"): "D",
(142, 204, "U", "B"): "U",
(142, 204, "U", "F"): "U",
(142, 204, "U", "L"): "U",
(142, 204, "U", "R"): "U",
(143, 210, "B", "D"): "D",
(143, 210, "B", "L"): "D",
(143, 210, "B", "R"): "D",
(143, 210, "B", "U"): "D",
(143, 210, "D", "B"): "U",
(143, 210, "D", "F"): "U",
(143, 210, "D", "L"): "U",
(143, 210, "D", "R"): "U",
(143, 210, "F", "D"): "D",
(143, 210, "F", "L"): "D",
(143, 210, "F", "R"): "D",
(143, 210, "F", "U"): "D",
(143, 210, "L", "B"): "U",
(143, 210, "L", "D"): "D",
(143, 210, "L", "F"): "U",
(143, 210, "L", "U"): "D",
(143, 210, "R", "B"): "U",
(143, 210, "R", "D"): "D",
(143, 210, "R", "F"): "U",
(143, 210, "R", "U"): "D",
(143, 210, "U", "B"): "U",
(143, 210, "U", "F"): "U",
(143, 210, "U", "L"): "U",
(143, 210, "U", "R"): "U",
(146, 5, "B", "D"): "D",
(146, 5, "B", "L"): "D",
(146, 5, "B", "R"): "D",
(146, 5, "B", "U"): "D",
(146, 5, "D", "B"): "U",
(146, 5, "D", "F"): "U",
(146, 5, "D", "L"): "U",
(146, 5, "D", "R"): "U",
(146, 5, "F", "D"): "D",
(146, 5, "F", "L"): "D",
(146, 5, "F", "R"): "D",
(146, 5, "F", "U"): "D",
(146, 5, "L", "B"): "U",
(146, 5, "L", "D"): "D",
(146, 5, "L", "F"): "U",
(146, 5, "L", "U"): "D",
(146, 5, "R", "B"): "U",
(146, 5, "R", "D"): "D",
(146, 5, "R", "F"): "U",
(146, 5, "R", "U"): "D",
(146, 5, "U", "B"): "U",
(146, 5, "U", "F"): "U",
(146, 5, "U", "L"): "U",
(146, 5, "U", "R"): "U",
(147, 4, "B", "D"): "D",
(147, 4, "B", "L"): "D",
(147, 4, "B", "R"): "D",
(147, 4, "B", "U"): "D",
(147, 4, "D", "B"): "U",
(147, 4, "D", "F"): "U",
(147, 4, "D", "L"): "U",
(147, 4, "D", "R"): "U",
(147, 4, "F", "D"): "D",
(147, 4, "F", "L"): "D",
(147, 4, "F", "R"): "D",
(147, 4, "F", "U"): "D",
(147, 4, "L", "B"): "U",
(147, 4, "L", "D"): "D",
(147, 4, "L", "F"): "U",
(147, 4, "L", "U"): "D",
(147, 4, "R", "B"): "U",
(147, 4, "R", "D"): "D",
(147, 4, "R", "F"): "U",
(147, 4, "R", "U"): "D",
(147, 4, "U", "B"): "U",
(147, 4, "U", "F"): "U",
(147, 4, "U", "L"): "U",
(147, 4, "U", "R"): "U",
(148, 3, "B", "D"): "U",
(148, 3, "B", "L"): "U",
(148, 3, "B", "R"): "U",
(148, 3, "B", "U"): "U",
(148, 3, "D", "B"): "D",
(148, 3, "D", "F"): "D",
(148, 3, "D", "L"): "D",
(148, 3, "D", "R"): "D",
(148, 3, "F", "D"): "U",
(148, 3, "F", "L"): "U",
(148, 3, "F", "R"): "U",
(148, 3, "F", "U"): "U",
(148, 3, "L", "B"): "D",
(148, 3, "L", "D"): "U",
(148, 3, "L", "F"): "D",
(148, 3, "L", "U"): "U",
(148, 3, "R", "B"): "D",
(148, 3, "R", "D"): "U",
(148, 3, "R", "F"): "D",
(148, 3, "R", "U"): "U",
(148, 3, "U", "B"): "D",
(148, 3, "U", "F"): "D",
(148, 3, "U", "L"): "D",
(148, 3, "U", "R"): "D",
(149, 2, "B", "D"): "U",
(149, 2, "B", "L"): "U",
(149, 2, "B", "R"): "U",
(149, 2, "B", "U"): "U",
(149, 2, "D", "B"): "D",
(149, 2, "D", "F"): "D",
(149, 2, "D", "L"): "D",
(149, 2, "D", "R"): "D",
(149, 2, "F", "D"): "U",
(149, 2, "F", "L"): "U",
(149, 2, "F", "R"): "U",
(149, 2, "F", "U"): "U",
(149, 2, "L", "B"): "D",
(149, 2, "L", "D"): "U",
(149, 2, "L", "F"): "D",
(149, 2, "L", "U"): "U",
(149, 2, "R", "B"): "D",
(149, 2, "R", "D"): "U",
(149, 2, "R", "F"): "D",
(149, 2, "R", "U"): "U",
(149, 2, "U", "B"): "D",
(149, 2, "U", "F"): "D",
(149, 2, "U", "L"): "D",
(149, 2, "U", "R"): "D",
(151, 120, "B", "D"): "U",
(151, 120, "B", "L"): "U",
(151, 120, "B", "R"): "U",
(151, 120, "B", "U"): "U",
(151, 120, "D", "B"): "D",
(151, 120, "D", "F"): "D",
(151, 120, "D", "L"): "D",
(151, 120, "D", "R"): "D",
(151, 120, "F", "D"): "U",
(151, 120, "F", "L"): "U",
(151, 120, "F", "R"): "U",
(151, 120, "F", "U"): "U",
(151, 120, "L", "B"): "D",
(151, 120, "L", "D"): "U",
(151, 120, "L", "F"): "D",
(151, 120, "L", "U"): "U",
(151, 120, "R", "B"): "D",
(151, 120, "R", "D"): "U",
(151, 120, "R", "F"): "D",
(151, 120, "R", "U"): "U",
(151, 120, "U", "B"): "D",
(151, 120, "U", "F"): "D",
(151, 120, "U", "L"): "D",
(151, 120, "U", "R"): "D",
(156, 43, "B", "D"): "D",
(156, 43, "B", "L"): "D",
(156, 43, "B", "R"): "D",
(156, 43, "B", "U"): "D",
(156, 43, "D", "B"): "U",
(156, 43, "D", "F"): "U",
(156, 43, "D", "L"): "U",
(156, 43, "D", "R"): "U",
(156, 43, "F", "D"): "D",
(156, 43, "F", "L"): "D",
(156, 43, "F", "R"): "D",
(156, 43, "F", "U"): "D",
(156, 43, "L", "B"): "U",
(156, 43, "L", "D"): "D",
(156, 43, "L", "F"): "U",
(156, 43, "L", "U"): "D",
(156, 43, "R", "B"): "U",
(156, 43, "R", "D"): "D",
(156, 43, "R", "F"): "U",
(156, 43, "R", "U"): "D",
(156, 43, "U", "B"): "U",
(156, 43, "U", "F"): "U",
(156, 43, "U", "L"): "U",
(156, 43, "U", "R"): "U",
(157, 126, "B", "D"): "U",
(157, 126, "B", "L"): "U",
(157, 126, "B", "R"): "U",
(157, 126, "B", "U"): "U",
(157, 126, "D", "B"): "D",
(157, 126, "D", "F"): "D",
(157, 126, "D", "L"): "D",
(157, 126, "D", "R"): "D",
(157, 126, "F", "D"): "U",
(157, 126, "F", "L"): "U",
(157, 126, "F", "R"): "U",
(157, 126, "F", "U"): "U",
(157, 126, "L", "B"): "D",
(157, 126, "L", "D"): "U",
(157, 126, "L", "F"): "D",
(157, 126, "L", "U"): "U",
(157, 126, "R", "B"): "D",
(157, 126, "R", "D"): "U",
(157, 126, "R", "F"): "D",
(157, 126, "R", "U"): "U",
(157, 126, "U", "B"): "D",
(157, 126, "U", "F"): "D",
(157, 126, "U", "L"): "D",
(157, 126, "U", "R"): "D",
(162, 49, "B", "D"): "D",
(162, 49, "B", "L"): "D",
(162, 49, "B", "R"): "D",
(162, 49, "B", "U"): "D",
(162, 49, "D", "B"): "U",
(162, 49, "D", "F"): "U",
(162, 49, "D", "L"): "U",
(162, 49, "D", "R"): "U",
(162, 49, "F", "D"): "D",
(162, 49, "F", "L"): "D",
(162, 49, "F", "R"): "D",
(162, 49, "F", "U"): "D",
(162, 49, "L", "B"): "U",
(162, 49, "L", "D"): "D",
(162, 49, "L", "F"): "U",
(162, 49, "L", "U"): "D",
(162, 49, "R", "B"): "U",
(162, 49, "R", "D"): "D",
(162, 49, "R", "F"): "U",
(162, 49, "R", "U"): "D",
(162, 49, "U", "B"): "U",
(162, 49, "U", "F"): "U",
(162, 49, "U", "L"): "U",
(162, 49, "U", "R"): "U",
(163, 132, "B", "D"): "D",
(163, 132, "B", "L"): "D",
(163, 132, "B", "R"): "D",
(163, 132, "B", "U"): "D",
(163, 132, "D", "B"): "U",
(163, 132, "D", "F"): "U",
(163, 132, "D", "L"): "U",
(163, 132, "D", "R"): "U",
(163, 132, "F", "D"): "D",
(163, 132, "F", "L"): "D",
(163, 132, "F", "R"): "D",
(163, 132, "F", "U"): "D",
(163, 132, "L", "B"): "U",
(163, 132, "L", "D"): "D",
(163, 132, "L", "F"): "U",
(163, 132, "L", "U"): "D",
(163, 132, "R", "B"): "U",
(163, 132, "R", "D"): "D",
(163, 132, "R", "F"): "U",
(163, 132, "R", "U"): "D",
(163, 132, "U", "B"): "U",
(163, 132, "U", "F"): "U",
(163, 132, "U", "L"): "U",
(163, 132, "U", "R"): "U",
(168, 55, "B", "D"): "U",
(168, 55, "B", "L"): "U",
(168, 55, "B", "R"): "U",
(168, 55, "B", "U"): "U",
(168, 55, "D", "B"): "D",
(168, 55, "D", "F"): "D",
(168, 55, "D", "L"): "D",
(168, 55, "D", "R"): "D",
(168, 55, "F", "D"): "U",
(168, 55, "F", "L"): "U",
(168, 55, "F", "R"): "U",
(168, 55, "F", "U"): "U",
(168, 55, "L", "B"): "D",
(168, 55, "L", "D"): "U",
(168, 55, "L", "F"): "D",
(168, 55, "L", "U"): "U",
(168, 55, "R", "B"): "D",
(168, 55, "R", "D"): "U",
(168, 55, "R", "F"): "D",
(168, 55, "R", "U"): "U",
(168, 55, "U", "B"): "D",
(168, 55, "U", "F"): "D",
(168, 55, "U", "L"): "D",
(168, 55, "U", "R"): "D",
(169, 138, "B", "D"): "D",
(169, 138, "B", "L"): "D",
(169, 138, "B", "R"): "D",
(169, 138, "B", "U"): "D",
(169, 138, "D", "B"): "U",
(169, 138, "D", "F"): "U",
(169, 138, "D", "L"): "U",
(169, 138, "D", "R"): "U",
(169, 138, "F", "D"): "D",
(169, 138, "F", "L"): "D",
(169, 138, "F", "R"): "D",
(169, 138, "F", "U"): "D",
(169, 138, "L", "B"): "U",
(169, 138, "L", "D"): "D",
(169, 138, "L", "F"): "U",
(169, 138, "L", "U"): "D",
(169, 138, "R", "B"): "U",
(169, 138, "R", "D"): "D",
(169, 138, "R", "F"): "U",
(169, 138, "R", "U"): "D",
(169, 138, "U", "B"): "U",
(169, 138, "U", "F"): "U",
(169, 138, "U", "L"): "U",
(169, 138, "U", "R"): "U",
(174, 61, "B", "D"): "U",
(174, 61, "B", "L"): "U",
(174, 61, "B", "R"): "U",
(174, 61, "B", "U"): "U",
(174, 61, "D", "B"): "D",
(174, 61, "D", "F"): "D",
(174, 61, "D", "L"): "D",
(174, 61, "D", "R"): "D",
(174, 61, "F", "D"): "U",
(174, 61, "F", "L"): "U",
(174, 61, "F", "R"): "U",
(174, 61, "F", "U"): "U",
(174, 61, "L", "B"): "D",
(174, 61, "L", "D"): "U",
(174, 61, "L", "F"): "D",
(174, 61, "L", "U"): "U",
(174, 61, "R", "B"): "D",
(174, 61, "R", "D"): "U",
(174, 61, "R", "F"): "D",
(174, 61, "R", "U"): "U",
(174, 61, "U", "B"): "D",
(174, 61, "U", "F"): "D",
(174, 61, "U", "L"): "D",
(174, 61, "U", "R"): "D",
(176, 215, "B", "D"): "U",
(176, 215, "B", "L"): "U",
(176, 215, "B", "R"): "U",
(176, 215, "B", "U"): "U",
(176, 215, "D", "B"): "D",
(176, 215, "D", "F"): "D",
(176, 215, "D", "L"): "D",
(176, 215, "D", "R"): "D",
(176, 215, "F", "D"): "U",
(176, 215, "F", "L"): "U",
(176, 215, "F", "R"): "U",
(176, 215, "F", "U"): "U",
(176, 215, "L", "B"): "D",
(176, 215, "L", "D"): "U",
(176, 215, "L", "F"): "D",
(176, 215, "L", "U"): "U",
(176, 215, "R", "B"): "D",
(176, 215, "R", "D"): "U",
(176, 215, "R", "F"): "D",
(176, 215, "R", "U"): "U",
(176, 215, "U", "B"): "D",
(176, 215, "U", "F"): "D",
(176, 215, "U", "L"): "D",
(176, 215, "U", "R"): "D",
(177, 214, "B", "D"): "U",
(177, 214, "B", "L"): "U",
(177, 214, "B", "R"): "U",
(177, 214, "B", "U"): "U",
(177, 214, "D", "B"): "D",
(177, 214, "D", "F"): "D",
(177, 214, "D", "L"): "D",
(177, 214, "D", "R"): "D",
(177, 214, "F", "D"): "U",
(177, 214, "F", "L"): "U",
(177, 214, "F", "R"): "U",
(177, 214, "F", "U"): "U",
(177, 214, "L", "B"): "D",
(177, 214, "L", "D"): "U",
(177, 214, "L", "F"): "D",
(177, 214, "L", "U"): "U",
(177, 214, "R", "B"): "D",
(177, 214, "R", "D"): "U",
(177, 214, "R", "F"): "D",
(177, 214, "R", "U"): "U",
(177, 214, "U", "B"): "D",
(177, 214, "U", "F"): "D",
(177, 214, "U", "L"): "D",
(177, 214, "U", "R"): "D",
(178, 213, "B", "D"): "D",
(178, 213, "B", "L"): "D",
(178, 213, "B", "R"): "D",
(178, 213, "B", "U"): "D",
(178, 213, "D", "B"): "U",
(178, 213, "D", "F"): "U",
(178, 213, "D", "L"): "U",
(178, 213, "D", "R"): "U",
(178, 213, "F", "D"): "D",
(178, 213, "F", "L"): "D",
(178, 213, "F", "R"): "D",
(178, 213, "F", "U"): "D",
(178, 213, "L", "B"): "U",
(178, 213, "L", "D"): "D",
(178, 213, "L", "F"): "U",
(178, 213, "L", "U"): "D",
(178, 213, "R", "B"): "U",
(178, 213, "R", "D"): "D",
(178, 213, "R", "F"): "U",
(178, 213, "R", "U"): "D",
(178, 213, "U", "B"): "U",
(178, 213, "U", "F"): "U",
(178, 213, "U", "L"): "U",
(178, 213, "U", "R"): "U",
(179, 212, "B", "D"): "D",
(179, 212, "B", "L"): "D",
(179, 212, "B", "R"): "D",
(179, 212, "B", "U"): "D",
(179, 212, "D", "B"): "U",
(179, 212, "D", "F"): "U",
(179, 212, "D", "L"): "U",
(179, 212, "D", "R"): "U",
(179, 212, "F", "D"): "D",
(179, 212, "F", "L"): "D",
(179, 212, "F", "R"): "D",
(179, 212, "F", "U"): "D",
(179, 212, "L", "B"): "U",
(179, 212, "L", "D"): "D",
(179, 212, "L", "F"): "U",
(179, 212, "L", "U"): "D",
(179, 212, "R", "B"): "U",
(179, 212, "R", "D"): "D",
(179, 212, "R", "F"): "U",
(179, 212, "R", "U"): "D",
(179, 212, "U", "B"): "U",
(179, 212, "U", "F"): "U",
(179, 212, "U", "L"): "U",
(179, 212, "U", "R"): "U",
(182, 104, "B", "D"): "D",
(182, 104, "B", "L"): "D",
(182, 104, "B", "R"): "D",
(182, 104, "B", "U"): "D",
(182, 104, "D", "B"): "U",
(182, 104, "D", "F"): "U",
(182, 104, "D", "L"): "U",
(182, 104, "D", "R"): "U",
(182, 104, "F", "D"): "D",
(182, 104, "F", "L"): "D",
(182, 104, "F", "R"): "D",
(182, 104, "F", "U"): "D",
(182, 104, "L", "B"): "U",
(182, 104, "L", "D"): "D",
(182, 104, "L", "F"): "U",
(182, 104, "L", "U"): "D",
(182, 104, "R", "B"): "U",
(182, 104, "R", "D"): "D",
(182, 104, "R", "F"): "U",
(182, 104, "R", "U"): "D",
(182, 104, "U", "B"): "U",
(182, 104, "U", "F"): "U",
(182, 104, "U", "L"): "U",
(182, 104, "U", "R"): "U",
(183, 105, "B", "D"): "D",
(183, 105, "B", "L"): "D",
(183, 105, "B", "R"): "D",
(183, 105, "B", "U"): "D",
(183, 105, "D", "B"): "U",
(183, 105, "D", "F"): "U",
(183, 105, "D", "L"): "U",
(183, 105, "D", "R"): "U",
(183, 105, "F", "D"): "D",
(183, 105, "F", "L"): "D",
(183, 105, "F", "R"): "D",
(183, 105, "F", "U"): "D",
(183, 105, "L", "B"): "U",
(183, 105, "L", "D"): "D",
(183, 105, "L", "F"): "U",
(183, 105, "L", "U"): "D",
(183, 105, "R", "B"): "U",
(183, 105, "R", "D"): "D",
(183, 105, "R", "F"): "U",
(183, 105, "R", "U"): "D",
(183, 105, "U", "B"): "U",
(183, 105, "U", "F"): "U",
(183, 105, "U", "L"): "U",
(183, 105, "U", "R"): "U",
(184, 106, "B", "D"): "U",
(184, 106, "B", "L"): "U",
(184, 106, "B", "R"): "U",
(184, 106, "B", "U"): "U",
(184, 106, "D", "B"): "D",
(184, 106, "D", "F"): "D",
(184, 106, "D", "L"): "D",
(184, 106, "D", "R"): "D",
(184, 106, "F", "D"): "U",
(184, 106, "F", "L"): "U",
(184, 106, "F", "R"): "U",
(184, 106, "F", "U"): "U",
(184, 106, "L", "B"): "D",
(184, 106, "L", "D"): "U",
(184, 106, "L", "F"): "D",
(184, 106, "L", "U"): "U",
(184, 106, "R", "B"): "D",
(184, 106, "R", "D"): "U",
(184, 106, "R", "F"): "D",
(184, 106, "R", "U"): "U",
(184, 106, "U", "B"): "D",
(184, 106, "U", "F"): "D",
(184, 106, "U", "L"): "D",
(184, 106, "U", "R"): "D",
(185, 107, "B", "D"): "U",
(185, 107, "B", "L"): "U",
(185, 107, "B", "R"): "U",
(185, 107, "B", "U"): "U",
(185, 107, "D", "B"): "D",
(185, 107, "D", "F"): "D",
(185, 107, "D", "L"): "D",
(185, 107, "D", "R"): "D",
(185, 107, "F", "D"): "U",
(185, 107, "F", "L"): "U",
(185, 107, "F", "R"): "U",
(185, 107, "F", "U"): "U",
(185, 107, "L", "B"): "D",
(185, 107, "L", "D"): "U",
(185, 107, "L", "F"): "D",
(185, 107, "L", "U"): "U",
(185, 107, "R", "B"): "D",
(185, 107, "R", "D"): "U",
(185, 107, "R", "F"): "D",
(185, 107, "R", "U"): "U",
(185, 107, "U", "B"): "D",
(185, 107, "U", "F"): "D",
(185, 107, "U", "L"): "D",
(185, 107, "U", "R"): "D",
(187, 71, "B", "D"): "U",
(187, 71, "B", "L"): "U",
(187, 71, "B", "R"): "U",
(187, 71, "B", "U"): "U",
(187, 71, "D", "B"): "D",
(187, 71, "D", "F"): "D",
(187, 71, "D", "L"): "D",
(187, 71, "D", "R"): "D",
(187, 71, "F", "D"): "U",
(187, 71, "F", "L"): "U",
(187, 71, "F", "R"): "U",
(187, 71, "F", "U"): "U",
(187, 71, "L", "B"): "D",
(187, 71, "L", "D"): "U",
(187, 71, "L", "F"): "D",
(187, 71, "L", "U"): "U",
(187, 71, "R", "B"): "D",
(187, 71, "R", "D"): "U",
(187, 71, "R", "F"): "D",
(187, 71, "R", "U"): "U",
(187, 71, "U", "B"): "D",
(187, 71, "U", "F"): "D",
(187, 71, "U", "L"): "D",
(187, 71, "U", "R"): "D",
(192, 140, "B", "D"): "D",
(192, 140, "B", "L"): "D",
(192, 140, "B", "R"): "D",
(192, 140, "B", "U"): "D",
(192, 140, "D", "B"): "U",
(192, 140, "D", "F"): "U",
(192, 140, "D", "L"): "U",
(192, 140, "D", "R"): "U",
(192, 140, "F", "D"): "D",
(192, 140, "F", "L"): "D",
(192, 140, "F", "R"): "D",
(192, 140, "F", "U"): "D",
(192, 140, "L", "B"): "U",
(192, 140, "L", "D"): "D",
(192, 140, "L", "F"): "U",
(192, 140, "L", "U"): "D",
(192, 140, "R", "B"): "U",
(192, 140, "R", "D"): "D",
(192, 140, "R", "F"): "U",
(192, 140, "R", "U"): "D",
(192, 140, "U", "B"): "U",
(192, 140, "U", "F"): "U",
(192, 140, "U", "L"): "U",
(192, 140, "U", "R"): "U",
(193, 70, "B", "D"): "U",
(193, 70, "B", "L"): "U",
(193, 70, "B", "R"): "U",
(193, 70, "B", "U"): "U",
(193, 70, "D", "B"): "D",
(193, 70, "D", "F"): "D",
(193, 70, "D", "L"): "D",
(193, 70, "D", "R"): "D",
(193, 70, "F", "D"): "U",
(193, 70, "F", "L"): "U",
(193, 70, "F", "R"): "U",
(193, 70, "F", "U"): "U",
(193, 70, "L", "B"): "D",
(193, 70, "L", "D"): "U",
(193, 70, "L", "F"): "D",
(193, 70, "L", "U"): "U",
(193, 70, "R", "B"): "D",
(193, 70, "R", "D"): "U",
(193, 70, "R", "F"): "D",
(193, 70, "R", "U"): "U",
(193, 70, "U", "B"): "D",
(193, 70, "U", "F"): "D",
(193, 70, "U", "L"): "D",
(193, 70, "U", "R"): "D",
(198, 141, "B", "D"): "D",
(198, 141, "B", "L"): "D",
(198, 141, "B", "R"): "D",
(198, 141, "B", "U"): "D",
(198, 141, "D", "B"): "U",
(198, 141, "D", "F"): "U",
(198, 141, "D", "L"): "U",
(198, 141, "D", "R"): "U",
(198, 141, "F", "D"): "D",
(198, 141, "F", "L"): "D",
(198, 141, "F", "R"): "D",
(198, 141, "F", "U"): "D",
(198, 141, "L", "B"): "U",
(198, 141, "L", "D"): "D",
(198, 141, "L", "F"): "U",
(198, 141, "L", "U"): "D",
(198, 141, "R", "B"): "U",
(198, 141, "R", "D"): "D",
(198, 141, "R", "F"): "U",
(198, 141, "R", "U"): "D",
(198, 141, "U", "B"): "U",
(198, 141, "U", "F"): "U",
(198, 141, "U", "L"): "U",
(198, 141, "U", "R"): "U",
(199, 69, "B", "D"): "D",
(199, 69, "B", "L"): "D",
(199, 69, "B", "R"): "D",
(199, 69, "B", "U"): "D",
(199, 69, "D", "B"): "U",
(199, 69, "D", "F"): "U",
(199, 69, "D", "L"): "U",
(199, 69, "D", "R"): "U",
(199, 69, "F", "D"): "D",
(199, 69, "F", "L"): "D",
(199, 69, "F", "R"): "D",
(199, 69, "F", "U"): "D",
(199, 69, "L", "B"): "U",
(199, 69, "L", "D"): "D",
(199, 69, "L", "F"): "U",
(199, 69, "L", "U"): "D",
(199, 69, "R", "B"): "U",
(199, 69, "R", "D"): "D",
(199, 69, "R", "F"): "U",
(199, 69, "R", "U"): "D",
(199, 69, "U", "B"): "U",
(199, 69, "U", "F"): "U",
(199, 69, "U", "L"): "U",
(199, 69, "U", "R"): "U",
(204, 142, "B", "D"): "U",
(204, 142, "B", "L"): "U",
(204, 142, "B", "R"): "U",
(204, 142, "B", "U"): "U",
(204, 142, "D", "B"): "D",
(204, 142, "D", "F"): "D",
(204, 142, "D", "L"): "D",
(204, 142, "D", "R"): "D",
(204, 142, "F", "D"): "U",
(204, 142, "F", "L"): "U",
(204, 142, "F", "R"): "U",
(204, 142, "F", "U"): "U",
(204, 142, "L", "B"): "D",
(204, 142, "L", "D"): "U",
(204, 142, "L", "F"): "D",
(204, 142, "L", "U"): "U",
(204, 142, "R", "B"): "D",
(204, 142, "R", "D"): "U",
(204, 142, "R", "F"): "D",
(204, 142, "R", "U"): "U",
(204, 142, "U", "B"): "D",
(204, 142, "U", "F"): "D",
(204, 142, "U", "L"): "D",
(204, 142, "U", "R"): "D",
(205, 68, "B", "D"): "D",
(205, 68, "B", "L"): "D",
(205, 68, "B", "R"): "D",
(205, 68, "B", "U"): "D",
(205, 68, "D", "B"): "U",
(205, 68, "D", "F"): "U",
(205, 68, "D", "L"): "U",
(205, 68, "D", "R"): "U",
(205, 68, "F", "D"): "D",
(205, 68, "F", "L"): "D",
(205, 68, "F", "R"): "D",
(205, 68, "F", "U"): "D",
(205, 68, "L", "B"): "U",
(205, 68, "L", "D"): "D",
(205, 68, "L", "F"): "U",
(205, 68, "L", "U"): "D",
(205, 68, "R", "B"): "U",
(205, 68, "R", "D"): "D",
(205, 68, "R", "F"): "U",
(205, 68, "R", "U"): "D",
(205, 68, "U", "B"): "U",
(205, 68, "U", "F"): "U",
(205, 68, "U", "L"): "U",
(205, 68, "U", "R"): "U",
(210, 143, "B", "D"): "U",
(210, 143, "B", "L"): "U",
(210, 143, "B", "R"): "U",
(210, 143, "B", "U"): "U",
(210, 143, "D", "B"): "D",
(210, 143, "D", "F"): "D",
(210, 143, "D", "L"): "D",
(210, 143, "D", "R"): "D",
(210, 143, "F", "D"): "U",
(210, 143, "F", "L"): "U",
(210, 143, "F", "R"): "U",
(210, 143, "F", "U"): "U",
(210, 143, "L", "B"): "D",
(210, 143, "L", "D"): "U",
(210, 143, "L", "F"): "D",
(210, 143, "L", "U"): "U",
(210, 143, "R", "B"): "D",
(210, 143, "R", "D"): "U",
(210, 143, "R", "F"): "D",
(210, 143, "R", "U"): "U",
(210, 143, "U", "B"): "D",
(210, 143, "U", "F"): "D",
(210, 143, "U", "L"): "D",
(210, 143, "U", "R"): "D",
(212, 179, "B", "D"): "U",
(212, 179, "B", "L"): "U",
(212, 179, "B", "R"): "U",
(212, 179, "B", "U"): "U",
(212, 179, "D", "B"): "D",
(212, 179, "D", "F"): "D",
(212, 179, "D", "L"): "D",
(212, 179, "D", "R"): "D",
(212, 179, "F", "D"): "U",
(212, 179, "F", "L"): "U",
(212, 179, "F", "R"): "U",
(212, 179, "F", "U"): "U",
(212, 179, "L", "B"): "D",
(212, 179, "L", "D"): "U",
(212, 179, "L", "F"): "D",
(212, 179, "L", "U"): "U",
(212, 179, "R", "B"): "D",
(212, 179, "R", "D"): "U",
(212, 179, "R", "F"): "D",
(212, 179, "R", "U"): "U",
(212, 179, "U", "B"): "D",
(212, 179, "U", "F"): "D",
(212, 179, "U", "L"): "D",
(212, 179, "U", "R"): "D",
(213, 178, "B", "D"): "U",
(213, 178, "B", "L"): "U",
(213, 178, "B", "R"): "U",
(213, 178, "B", "U"): "U",
(213, 178, "D", "B"): "D",
(213, 178, "D", "F"): "D",
(213, 178, "D", "L"): "D",
(213, 178, "D", "R"): "D",
(213, 178, "F", "D"): "U",
(213, 178, "F", "L"): "U",
(213, 178, "F", "R"): "U",
(213, 178, "F", "U"): "U",
(213, 178, "L", "B"): "D",
(213, 178, "L", "D"): "U",
(213, 178, "L", "F"): "D",
(213, 178, "L", "U"): "U",
(213, 178, "R", "B"): "D",
(213, 178, "R", "D"): "U",
(213, 178, "R", "F"): "D",
(213, 178, "R", "U"): "U",
(213, 178, "U", "B"): "D",
(213, 178, "U", "F"): "D",
(213, 178, "U", "L"): "D",
(213, 178, "U", "R"): "D",
(214, 177, "B", "D"): "D",
(214, 177, "B", "L"): "D",
(214, 177, "B", "R"): "D",
(214, 177, "B", "U"): "D",
(214, 177, "D", "B"): "U",
(214, 177, "D", "F"): "U",
(214, 177, "D", "L"): "U",
(214, 177, "D", "R"): "U",
(214, 177, "F", "D"): "D",
(214, 177, "F", "L"): "D",
(214, 177, "F", "R"): "D",
(214, 177, "F", "U"): "D",
(214, 177, "L", "B"): "U",
(214, 177, "L", "D"): "D",
(214, 177, "L", "F"): "U",
(214, 177, "L", "U"): "D",
(214, 177, "R", "B"): "U",
(214, 177, "R", "D"): "D",
(214, 177, "R", "F"): "U",
(214, 177, "R", "U"): "D",
(214, 177, "U", "B"): "U",
(214, 177, "U", "F"): "U",
(214, 177, "U", "L"): "U",
(214, 177, "U", "R"): "U",
(215, 176, "B", "D"): "D",
(215, 176, "B", "L"): "D",
(215, 176, "B", "R"): "D",
(215, 176, "B", "U"): "D",
(215, 176, "D", "B"): "U",
(215, 176, "D", "F"): "U",
(215, 176, "D", "L"): "U",
(215, 176, "D", "R"): "U",
(215, 176, "F", "D"): "D",
(215, 176, "F", "L"): "D",
(215, 176, "F", "R"): "D",
(215, 176, "F", "U"): "D",
(215, 176, "L", "B"): "U",
(215, 176, "L", "D"): "D",
(215, 176, "L", "F"): "U",
(215, 176, "L", "U"): "D",
(215, 176, "R", "B"): "U",
(215, 176, "R", "D"): "D",
(215, 176, "R", "F"): "U",
(215, 176, "R", "U"): "D",
(215, 176, "U", "B"): "U",
(215, 176, "U", "F"): "U",
(215, 176, "U", "L"): "U",
(215, 176, "U", "R"): "U",
}
| dwalton76/rubiks-color-resolver | rubikscolorresolver/cube_666.py | Python | mit | 73,654 |
# Copyright (C) 2019 Dmitry Marakasov <[email protected]>
#
# This file is part of repology
#
# repology is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# repology is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with repology. If not, see <http://www.gnu.org/licenses/>.
from typing import Iterable, Tuple
from repology.logger import Logger
from repology.packagemaker import NameType, PackageFactory, PackageMaker
from repology.parsers import Parser
from repology.transformer import PackageTransformer
def _iter_index(path: str) -> Iterable[Tuple[str, str, int, str, str, str]]:
with open(path, encoding='latin-1') as listfile:
for line in listfile:
if line.startswith(';'):
continue
category, filename, size, date, version, description = line.rstrip().split(' ', 5)
if not description.startswith(':'):
raise RuntimeError('cannot parse line: {}'.format(line))
yield category, filename, int(size), date, version, description[1:]
class OS4DepotIndexParser(Parser):
def iter_parse(self, path: str, factory: PackageFactory, transformer: PackageTransformer) -> Iterable[PackageMaker]:
for category, filename, size, date, version, description in _iter_index(path):
with factory.begin(filename) as pkg:
pkg.set_extra_field('filename', filename)
pkg.add_name(filename.rsplit('.', 1)[0], NameType.OS4DEPOT_NAME)
if not version:
pkg.log('skipping, no version', Logger.ERROR)
continue
pkg.set_version(version)
pkg.set_summary(description)
pkg.add_categories(category)
yield pkg
| AMDmi3/repology | repology/parsers/parsers/os4depot.py | Python | gpl-3.0 | 2,191 |
# -*- coding: utf-8 -*-
r"""
.. _tut-imorting-eeg-data:
===============================
Importing data from EEG devices
===============================
MNE includes various functions and utilities for reading EEG data and electrode
locations.
.. _import-bv:
BrainVision (.vhdr, .vmrk, .eeg)
================================
The BrainVision file format consists of three separate files:
1. A text header file (``.vhdr``) containing meta data.
2. A text marker file (``.vmrk``) containing information about events in the
data.
3. A binary data file (``.eeg``) containing the voltage values of the EEG.
Both text files are based on the `INI format <https://en.wikipedia.org/wiki/INI_file>`_
consisting of
* sections marked as ``[square brackets]``,
* comments marked as ``; comment``,
* and key-value pairs marked as ``key=value``.
Brain Products provides documentation for their core BrainVision file format.
The format specification is hosted on the
`Brain Products website <https://www.brainproducts.com/productdetails.php?id=21&tab=5>`_.
BrainVision EEG files can be read using :func:`mne.io.read_raw_brainvision`,
passing the ``.vhdr`` header file as the argument.
.. warning:: Renaming BrainVision files can be problematic due to their
multi-file structure. See this
`example <https://mne.tools/mne-bids/stable/auto_examples/rename_brainvision_files.html#sphx-glr-auto-examples-rename-brainvision-files-py>`_
for instructions.
.. note:: For *writing* BrainVision files, you can use the Python package
`pybv <https://pypi.org/project/pybv/>`_.
.. _import-edf:
European data format (.edf)
===========================
`EDF <http://www.edfplus.info/specs/edf.html>`_ and
`EDF+ <http://www.edfplus.info/specs/edfplus.html>`_ files can be read using
:func:`mne.io.read_raw_edf`. Both variants are 16-bit formats.
EDF+ files may contain annotation channels which can be used to store trigger
and event information. These annotations are available in ``raw.annotations``.
Writing EDF files is not supported natively yet. `This gist
<https://gist.github.com/skjerns/bc660ef59dca0dbd53f00ed38c42f6be>`__ or
`MNELAB <https://github.com/cbrnr/mnelab>`_ (both of which use
`pyedflib <https://github.com/holgern/pyedflib>`_ under the hood) can be used
to export any :class:`mne.io.Raw` object to EDF/EDF+/BDF/BDF+.
.. _import-biosemi:
BioSemi data format (.bdf)
==========================
The `BDF format <http://www.biosemi.com/faq/file_format.htm>`_ is a 24-bit
variant of the EDF format used by EEG systems manufactured by BioSemi. It can
be imported with :func:`mne.io.read_raw_bdf`.
BioSemi amplifiers do not perform "common mode noise rejection" automatically.
The signals in the EEG file are the voltages between each electrode and the CMS
active electrode, which still contain some CM noise (50 Hz, ADC reference
noise, etc.). The `BioSemi FAQ <https://www.biosemi.com/faq/cms&drl.htm>`__
provides more details on this topic.
Therefore, it is advisable to choose a reference (e.g., a single channel like Cz,
average of linked mastoids, average of all electrodes, etc.) after importing
BioSemi data to avoid losing signal information. The data can be re-referenced
later after cleaning if desired.
.. warning:: Data samples in a BDF file are represented in a 3-byte
(24-bit) format. Since 3-byte raw data buffers are not presently
supported in the FIF format, these data will be changed to 4-byte
integers in the conversion.
.. _import-gdf:
General data format (.gdf)
==========================
GDF files can be read using :func:`mne.io.read_raw_gdf`.
`GDF (General Data Format) <https://arxiv.org/abs/cs/0608052>`_ is a flexible
format for biomedical signals that overcomes some of the limitations of the
EDF format. The original specification (GDF v1) includes a binary header
and uses an event table. An updated specification (GDF v2) was released in
2011 and adds fields for additional subject-specific information (gender,
age, etc.) and allows storing several physical units and other properties.
Both specifications are supported by MNE.
.. _import-cnt:
Neuroscan CNT (.cnt)
====================
CNT files can be read using :func:`mne.io.read_raw_cnt`.
Channel locations can be read from a montage or the file header. If read
from the header, the data channels (channels that are not assigned to EOG, ECG,
EMG or MISC) are fit to a sphere and assigned a z-value accordingly. If a
non-data channel does not fit to the sphere, it is assigned a z-value of 0.
.. warning::
Reading channel locations from the file header may be dangerous, as the
x_coord and y_coord in the ELECTLOC section of the header do not necessarily
translate to absolute locations. Furthermore, EEG electrode locations that
do not fit to a sphere will distort the layout when computing the z-values.
If you are not sure about the channel locations in the header, using a
montage is encouraged.
.. _import-egi:
EGI simple binary (.egi)
========================
EGI simple binary files can be read using :func:`mne.io.read_raw_egi`.
EGI raw files are simple binary files with a header and can be exported by the
EGI Netstation acquisition software.
.. _import-mff:
EGI MFF (.mff)
==============
EGI MFF files can be read with :func:`mne.io.read_raw_egi`.
.. _import-set:
EEGLAB files (.set, .fdt)
=========================
EEGLAB .set files (which sometimes come with a separate .fdt file) can be read
using :func:`mne.io.read_raw_eeglab` and :func:`mne.read_epochs_eeglab`.
.. _import-nicolet:
Nicolet (.data)
===============
These files can be read with :func:`mne.io.read_raw_nicolet`.
.. _import-nxe:
eXimia EEG data (.nxe)
======================
EEG data from the Nexstim eXimia system can be read with
:func:`mne.io.read_raw_eximia`.
.. _import-persyst:
Persyst EEG data (.lay, .dat)
=============================
EEG data from the Persyst system can be read with
:func:`mne.io.read_raw_persyst`.
Note that subject metadata may not be properly imported because Persyst
sometimes changes its specification from version to version. Please let us know
if you encounter a problem.
Nihon Kohden EEG data (.eeg, .21e, .pnt, .log)
==============================================
EEG data from the Nihon Kohden (NK) system can be read using the
:func:`mne.io.read_raw_nihon` function.
Files with the following extensions will be read:
- The ``.eeg`` file contains the actual raw EEG data.
- The ``.pnt`` file contains metadata related to the recording such as the
measurement date.
- The ``.log`` file contains annotations for the recording.
- The ``.21e`` file contains channel and electrode information.
Reading ``.11d``, ``.cmt``, ``.cn2``, and ``.edf`` files is currently not
supported.
Note that not all subject metadata may be properly read because NK changes the
specification sometimes from version to version. Please let us know if you
encounter a problem.
XDF data (.xdf, .xdfz)
======================
MNE-Python does not support loading
`XDF <https://github.com/sccn/xdf/wiki/Specifications>`_ files out of the box,
because the inherent flexibility of the XDF format makes it difficult to
provide a one-size-fits-all function. For example, XDF supports signals from
various modalities recorded with different sampling rates. However, it is
relatively straightforward to import only a specific stream (such as EEG
signals) using the `pyxdf <https://github.com/xdf-modules/pyxdf>`_ package.
See :ref:`ex-read-xdf` for a simple example.
A more sophisticated version, which supports selection of specific streams as
well as converting marker streams into annotations, is available in
`MNELAB <https://github.com/cbrnr/mnelab>`_. If you want to use this
functionality in a script, MNELAB records its history (View - History), which
contains all commands required to load an XDF file after successfully loading
that file with the graphical user interface.
Setting EEG references
======================
The preferred method for applying an EEG reference in MNE is
:func:`mne.set_eeg_reference`, or equivalent instance methods like
:meth:`raw.set_eeg_reference() <mne.io.Raw.set_eeg_reference>`. By default,
the data are assumed to already be properly referenced. See
:ref:`tut-set-eeg-ref` for more information.
Reading electrode locations and head shapes for EEG recordings
==============================================================
Some EEG formats (e.g., EGI, EDF/EDF+, BDF) contain neither electrode locations
nor head shape digitization information. Therefore, this information has to be
provided separately. For that purpose, all raw instances have a
:meth:`mne.io.Raw.set_montage` method to set electrode locations.
When using locations of fiducial points, the digitization data are converted to
the MEG head coordinate system employed in the MNE software, see
:ref:`coordinate_systems`.
""" # noqa:E501
# %%
| bloyl/mne-python | tutorials/io/20_reading_eeg_data.py | Python | bsd-3-clause | 8,987 |
# ubuntuone.storageprotocol.samples.ping_client - a ping client
#
# Author: Lucio Torre <[email protected]>
#
# Copyright 2009 Canonical Ltd.
#
# This program is free software: you can redistribute it and/or modify it
# under the terms of the GNU Affero General Public License version 3,
# as published by the Free Software Foundation.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranties of
# MERCHANTABILITY, SATISFACTORY QUALITY, or FITNESS FOR A PARTICULAR
# PURPOSE. See the GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""A simple ping client
"""
from twisted.internet import reactor
from ubuntuone.storageprotocol.client import (
StorageClientFactory, StorageClient)
class PingClient(StorageClient):
"""Simple client that calls a callback on connection."""
def connectionMade(self):
"""Setup and call callback."""
# pylint: disable=W0201
StorageClient.connectionMade(self)
print "Connection made."
d = self.ping()
def done(request):
"""We have the ping reply"""
print "Ping RTT:", request.rtt
reactor.stop()
def error(failure):
"""Something went wrong."""
print "Error:"
print failure.getTraceback()
reactor.stop()
d.addCallbacks(done, error)
class PingClientFactory(StorageClientFactory):
"""A test oriented protocol factory."""
# no init: pylint: disable=W0232
protocol = PingClient
def clientConnectionFailed(self, connector, reason):
"""We failed at connecting."""
print 'Connection failed. Reason:', reason
reactor.stop()
if __name__ == "__main__":
# these 3 lines show the different ways of connecting a client to the
# server
# using tcp
reactor.connectTCP('75.101.137.174', 80, PingClientFactory())
# using ssl
#reactor.connectSSL('localhost', 20101, StorageClientFactory(),
# ssl.ClientContextFactory())
# using ssl over a proxy
#from ubuntuone.storageprotocol import proxy_tunnel
#proxy_tunnel.connectHTTPS('localhost', 3128,
# 'localhost', 20101, StorageClientFactory(),
# user="test", passwd="test")
reactor.run()
| Alberto-Beralix/Beralix | i386-squashfs-root/usr/share/doc/python-ubuntuone-storageprotocol/examples/ping_client.py | Python | gpl-3.0 | 2,472 |
# -*- coding: UTF-8 -*-
# This file is part of Project-Stats
# Project-Stats is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# Project-Stats is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Project-Stats. If not, see <http://www.gnu.org/licenses/>.
#
# Source url (https://github.com/LuqueDaniel/ninja-project-stats)
"""
This module contain the main function of the plugin.
"""
#NINJA-IDE imports
from ninja_ide.core import plugin
#PyQt4.QtGui imports
from PyQt4.QtGui import (QMenu, QDialog, QLabel, QVBoxLayout, QTabWidget,
QTableWidgetItem, QTableWidget, QAbstractItemView, QGroupBox)
#PROJECT-STATS imports
from .get_stats import getStats
class projectStatsDialog(QDialog):
"""This class show project stats in a QDialog.
init Parameters:
projectInfo: Information of the current project.
Attributes:
projectStats: Contain stats of the project.
"""
def __init__(self, projectInfo):
' init projectStatsDialog class '
super(projectStatsDialog, self).__init__()
self.setWindowTitle('Project Stats - {}'.format(projectInfo.name))
self.setMinimumSize(500, 400)
self.setMaximumSize(0, 0)
#List contain the text of labels.
self.textLabels = ['Number of folders: {}', 'Number of files: {}',
'Total number of lines: {}', 'Number of .py files: {}',
'Number of .pyc files: {}', 'Total number of lines: {}']
#get project stats --> getStats
self.projectStats = getStats(projectInfo.path)
#Create tabMenu
tabMenu = QTabWidget()
tabMenu.tabCloseRequested.connect(tabMenu.removeTab)
tabMenu.setMovable(True)
tabMenu.setTabsClosable(True)
#LAYOUTS
#==Create central layout
vLayout = QVBoxLayout(self)
vLayout.setContentsMargins(15, 10, 15, 10)
#add label with project name
vLayout.addWidget(QLabel('<b>Project name:</b> {}'.format(
projectInfo.name)))
#==Create layoutTabGeneral and layoutTabs
layoutTabGeneral, layoutTabs = QVBoxLayout(), QVBoxLayout()
#Create labels for tabGeneral
self.generalNumberFolders = QLabel(self.textLabels[0].format(self.projectStats.info['numberFolders']))
self.generalNumberFiles = QLabel(self.textLabels[1].format(self.projectStats.info['numberFiles']))
self.generalNumberLines = QLabel(self.textLabels[2].format(self.projectStats.info['numberLines']))
#Create tablefilesgeneral
tableFilesGeneral = QTableWidget(0, 2)
self.__configTable(tableFilesGeneral, 'generalFilesLines')
#Add widgets to layoutTabGeneral
for each_widget in (self.generalNumberFolders, self.generalNumberFiles,
self.generalNumberLines, tableFilesGeneral):
layoutTabGeneral.addWidget(each_widget)
#==Create tabGeneral
tabGeneral = QGroupBox()
tabGeneral.setLayout(layoutTabGeneral)
#Add tabGeneral to tabMenu
tabMenu.addTab(tabGeneral, 'General')
#==if project contain py files add a tab
if self.projectStats.info['numberPyFiles'] != 0:
#Create layoutTabPy
layoutTabPy = QVBoxLayout()
#Create labels for tabPy
self.pyNumberFiles = QLabel(self.textLabels[3].format(self.projectStats.info['numberPyFiles']))
self.pyNumberFilesPyc = QLabel(self.textLabels[4].format(self.projectStats.info['numberPycFiles']))
self.pyNumberLines = QLabel(self.textLabels[5].format(self.projectStats.info['numberPyLines']))
#Create table tableFilesPy
tableFilesPy = QTableWidget(0, 2)
self.__configTable(tableFilesPy, 'pyFilesLines')
#Add widgets to layoutTabPy
for each_widget in (self.pyNumberFiles, self.pyNumberFilesPyc,
self.pyNumberLines, tableFilesPy):
layoutTabPy.addWidget(each_widget)
#Create tabPy
tabPy = QGroupBox()
tabPy.setLayout(layoutTabPy)
#add Widget TabPy to tabMenu
tabMenu.addTab(tabPy, '.py')
#Add tabMenu to layoutTabs
layoutTabs.addWidget(tabMenu)
#add tabMenu
vLayout.addLayout(layoutTabs)
def __configTable(self, table, dictKey):
"""This function configure a table.
Parameters:
table: Table to configure.
dictKey: The dictKey.
"""
self.tableHeaders = ('Path & File name', 'Number of lines')
table.setRowCount(len(self.projectStats.info[dictKey]))
table.setHorizontalHeaderLabels(self.tableHeaders)
#Disable edit items
table.setEditTriggers(QAbstractItemView.NoEditTriggers)
#Single selection
table.setSelectionMode(QTableWidget.SingleSelection)
#Select all columns
table.setSelectionBehavior(QAbstractItemView.SelectRows)
#Expand columns
table.horizontalHeader().setStretchLastSection(True)
#Set width of columns
table.setColumnWidth(0, 250)
#Set Alternating row colors
table.setAlternatingRowColors(True)
table.setStyleSheet("alternate-background-color: #222222;")
row = 0
for item in list(self.projectStats.info[dictKey].items()):
table.setItem(row, 0, QTableWidgetItem(item[1]['pathInProject']))
table.setItem(row, 1, QTableWidgetItem(str(item[1]['lines'])))
row += 1
class projectStatsMain(plugin.Plugin):
"""Main class of the plugin.
Attributes:
ex_locator: ninja-ide explorer service.
"""
def initialize(self):
"""This function start plugin"""
#Create plugin menu
menu = QMenu('Project Stats')
menu.addAction('Project Stats', lambda: self.projectStatsMenuAction())
#Add Project Stats menu
self.ex_locator = self.locator.get_service('explorer')
self.ex_locator.add_project_menu(menu)
def projectStatsMenuAction(self):
"""Init projectStatsDialog"""
#Get project properties
self.currentProject = self.ex_locator.get_tree_projects()._get_project_root()
#Instance projectStatDialog
self.projectStatsDialog = projectStatsDialog(self.currentProject)
self.projectStatsDialog.show()
| LuqueDaniel/ninja-project-stats | project_stats/project_stats.py | Python | gpl-3.0 | 6,879 |
# -*- coding: utf-8 -*-
from gluon import current
def config(settings):
"""
Template settings for Belize
- designed to be used in a Cascade with an application template
"""
#T = current.T
# Pre-Populate
settings.base.prepopulate.append("locations/BZ")
# Restrict to specific country/countries
settings.gis.countries.append("BZ")
# Dosable the Postcode selector in the LocationSelector
#settings.gis.postcode_selector = False
# L10n (Localization) settings
settings.L10n.languages["es"] = "Spanish"
settings.L10n.languages["bzj"] = "Kriol"
settings.L10n.languages["kek"] = "Q'eqchi'"
settings.L10n.languages["mop"] = "Mopan"
settings.L10n.languages["yua"] = "Yucatec Maya"
settings.L10n.languages["de"] = "German"
settings.L10n.languages["cab"] = "Garifuna"
settings.L10n.languages["zh"] = "Chinese"
# Default Language (put this in custom template if-required)
#settings.L10n.default_language = "es"
# Default timezone for users
settings.L10n.timezone = "America/Belize"
# Default Country Code for telephone numbers
settings.L10n.default_country_code = 501
settings.fin.currencies["BZD"] = "Belize Dollars"
settings.fin.currency_default = "BZD"
# END =========================================================================
| flavour/eden | modules/templates/locations/BZ/config.py | Python | mit | 1,352 |
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import glanceclient
import novaclient
from novaclient import api_versions
from novaclient import client as nova_client
from oslo_config import cfg
from oslo_log import log
from ceilometer import keystone_client
SERVICE_OPTS = [
cfg.StrOpt('nova',
default='compute',
help='Nova service type.'),
]
LOG = log.getLogger(__name__)
def logged(func):
@functools.wraps(func)
def with_logging(*args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
LOG.exception(e)
raise
return with_logging
class Client(object):
"""A client which gets information via python-novaclient."""
def __init__(self, conf):
"""Initialize a nova client object."""
creds = conf.service_credentials
ks_session = keystone_client.get_session(conf)
self.nova_client = nova_client.Client(
version=api_versions.APIVersion('2.1'),
session=ks_session,
# nova adapter options
region_name=creds.region_name,
endpoint_type=creds.interface,
service_type=conf.service_types.nova)
self.glance_client = glanceclient.Client(
version='2',
session=ks_session,
region_name=creds.region_name,
interface=creds.interface,
service_type=conf.service_types.glance)
def _with_flavor_and_image(self, instances):
flavor_cache = {}
image_cache = {}
for instance in instances:
self._with_flavor(instance, flavor_cache)
self._with_image(instance, image_cache)
return instances
def _with_flavor(self, instance, cache):
fid = instance.flavor['id']
if fid in cache:
flavor = cache.get(fid)
else:
try:
flavor = self.nova_client.flavors.get(fid)
except novaclient.exceptions.NotFound:
flavor = None
cache[fid] = flavor
attr_defaults = [('name', 'unknown-id-%s' % fid),
('vcpus', 0), ('ram', 0), ('disk', 0),
('ephemeral', 0)]
for attr, default in attr_defaults:
if not flavor:
instance.flavor[attr] = default
continue
instance.flavor[attr] = getattr(flavor, attr, default)
def _with_image(self, instance, cache):
try:
iid = instance.image['id']
except TypeError:
instance.image = None
instance.kernel_id = None
instance.ramdisk_id = None
return
if iid in cache:
image = cache.get(iid)
else:
try:
image = self.glance_client.images.get(iid)
except glanceclient.exc.HTTPNotFound:
image = None
cache[iid] = image
attr_defaults = [('kernel_id', None),
('ramdisk_id', None)]
instance.image['name'] = (
getattr(image, 'name') if image else 'unknown-id-%s' % iid)
image_metadata = getattr(image, 'metadata', None)
for attr, default in attr_defaults:
ameta = image_metadata.get(attr) if image_metadata else default
setattr(instance, attr, ameta)
@logged
def instance_get_all_by_host(self, hostname, since=None):
"""Returns list of instances on particular host.
If since is supplied, it will return the instances changed since that
datetime. since should be in ISO Format '%Y-%m-%dT%H:%M:%SZ'
"""
search_opts = {'host': hostname, 'all_tenants': True}
if since:
search_opts['changes-since'] = since
return self._with_flavor_and_image(self.nova_client.servers.list(
detailed=True,
search_opts=search_opts))
@logged
def instance_get_all(self, since=None):
"""Returns list of all instances.
If since is supplied, it will return the instances changes since that
datetime. since should be in ISO Format '%Y-%m-%dT%H:%M:%SZ'
"""
search_opts = {'all_tenants': True}
if since:
search_opts['changes-since'] = since
return self.nova_client.servers.list(
detailed=True,
search_opts=search_opts)
| openstack/ceilometer | ceilometer/nova_client.py | Python | apache-2.0 | 4,953 |
"""Routes bundles of ports (river routing).
"""
from typing import Callable, List, Optional
from gdsfactory.components.bend_euler import bend_euler
from gdsfactory.components.straight import straight
from gdsfactory.components.taper import taper as taper_function
from gdsfactory.cross_section import strip
from gdsfactory.port import Port
from gdsfactory.routing.get_bundle import (
_get_bundle_waypoints,
compute_ports_max_displacement,
)
from gdsfactory.routing.get_route import get_route_from_waypoints
from gdsfactory.routing.path_length_matching import path_length_matched_points
from gdsfactory.routing.sort_ports import sort_ports as sort_ports_function
from gdsfactory.types import ComponentFactory, CrossSectionFactory, Route
def get_bundle_path_length_match(
ports1: List[Port],
ports2: List[Port],
separation: float = 30.0,
end_straight_length: Optional[float] = None,
extra_length: float = 0.0,
nb_loops: int = 1,
modify_segment_i: int = -2,
bend: ComponentFactory = bend_euler,
straight: Callable = straight,
taper: Optional[Callable] = taper_function,
start_straight_length: float = 0.0,
route_filter: Callable = get_route_from_waypoints,
sort_ports: bool = True,
cross_section: CrossSectionFactory = strip,
**kwargs
) -> List[Route]:
"""Returns list of routes that are path length matched.
Args:
ports1: list of ports
ports2: list of ports
separation: between the loops
end_straight_length: if None tries to determine it
extra_length: distance added to all path length compensation.
Useful is we want to add space for extra taper on all branches
nb_loops: number of extra loops added in the path
modify_segment_i: index of the segment that accomodates the new turns
default is next to last segment
bend: for bends
straight: for straights
taper:
start_straight_length:
route_filter: get_route_from_waypoints
sort_ports: sorts ports before routing
cross_section: factory
**kwargs: cross_section settings
Tips:
- If path length matches the wrong segments, change `modify_segment_i` arguments.
- Adjust `nb_loops` to avoid too short or too long segments
- Adjust `separation` and `end_straight_offset` to avoid compensation collisions
.. plot::
:include-source:
import gdsfactory as gf
c = gf.Component("path_length_match_sample")
dy = 2000.0
xs1 = [-500, -300, -100, -90, -80, -55, -35, 200, 210, 240, 500, 650]
pitch = 100.0
N = len(xs1)
xs2 = [-20 + i * pitch for i in range(N)]
a1 = 90
a2 = a1 + 180
ports1 = [gf.Port(f"top_{i}", (xs1[i], 0), 0.5, a1) for i in range(N)]
ports2 = [gf.Port(f"bottom_{i}", (xs2[i], dy), 0.5, a2) for i in range(N)]
routes = gf.routing.get_bundle_path_length_match(
ports1, ports2, extra_length=44
)
for route in routes:
c.add(route.references)
c.plot()
"""
extra_length = extra_length / 2
# Heuristic to get a correct default end_straight_offset to leave
# enough space for path-length compensation
if sort_ports:
ports1, ports2 = sort_ports_function(ports1, ports2)
if end_straight_length is None:
if modify_segment_i == -2:
end_straight_length = (
compute_ports_max_displacement(ports1, ports2) / (2 * nb_loops)
+ separation
+ extra_length
)
else:
end_straight_length = 0
list_of_waypoints = _get_bundle_waypoints(
ports1=ports1,
ports2=ports2,
separation=separation,
end_straight_length=end_straight_length,
start_straight_length=start_straight_length,
cross_section=cross_section,
**kwargs,
)
list_of_waypoints = path_length_matched_points(
list_of_waypoints,
extra_length=extra_length,
bend=bend,
nb_loops=nb_loops,
modify_segment_i=modify_segment_i,
cross_section=cross_section,
**kwargs,
)
return [
route_filter(
waypoints,
bend=bend,
straight=straight,
taper=taper,
cross_section=cross_section,
**kwargs,
)
for waypoints in list_of_waypoints
]
if __name__ == "__main__":
import gdsfactory as gf
c = gf.Component()
c1 = c << gf.components.straight_array(spacing=50)
c2 = c << gf.components.straight_array(spacing=5)
c2.movex(200)
c1.y = 0
c2.y = 0
routes = gf.routing.get_bundle_path_length_match(
c1.get_ports_list(orientation=0),
c2.get_ports_list(orientation=180),
end_straight_length=0,
start_straight_length=0,
separation=50,
layer=(2, 0),
)
for route in routes:
c.add(route.references)
c.show()
| gdsfactory/gdsfactory | gdsfactory/routing/get_bundle_path_length_match.py | Python | mit | 4,998 |
#!/usr/bin/python
import sys,os
from sqlobject import *
PROTOCOL_VERSION_1_0 = '1.0'
PROTOCOL_VERSION_1_1 = '1.1'
sys.path.insert(0, os.path.join(os.path.dirname(__file__), "../.."))
from channel.channel_message import ChannelMessage as ChannelMessage
class AsteriskEvent(SQLObject):
added=DateTimeCol(default=sqlbuilder.func.NOW())
event = StringCol()
uniqueid = StringCol(default=None)
raw = StringCol(default=None)
#PhoneNumber.createTable(ifNotExists=True)
def send_message(stomp, message, agent):
print '='*80
print 'Agent:', agent
print 'Message:', message
print '='*80
conf = {}
#TODO: add message expiration
#conf={"expires":(int(time()) + int(connect(config.get('GENERAL', 'message_ttl'))) * 1000}
stomp.put(message, destination="/queue/messages/"+agent, persistent=False, conf=conf)
def handle_Dial(event, manager=None):
"""
{'CallerID': '1133', 'SrcUniqueID': '1306919118.7245', 'Destination': 'SIP/214-19bceeb0', 'DestUniqueID': '1306919118.7246', 'Source': 'SIP/1133-19ba80e0', 'CallerIDName': 'tamila', 'Privilege': 'call,all', 'Event': 'Dial'}
1.1
{'Destination': 'SIP/102-0000002a', 'CallerIDNum': '101', 'DestUniqueID': '1309439116.42', 'SubEvent': 'Begin', 'Dialstring': '102', 'UniqueID': '1309439116.41', 'CallerIDName': 'Andrew Kornilov', 'Privilege': 'call,all', 'Event': 'Dial', 'Channel': 'SIP/101-00000029'}
"""
if not isinstance(event, dict):
event = event.headers
print event
#TODO:
# - put into db
# - cleanup rule
AsteriskEvent(event=event['Event'], raw=str(event), uniqueid=event['DestUniqueID'])
# try:
# srcuniqueid=event.get_header('Uniqueid')
# except:
# srcuniqueid=None
#
# print event.get_header('Event'), event.headers
# AsteriskEvent(event=event.get_header('Event'), raw=str(event.headers), uniqueid=uniqueid)
def handle_Hangup(event, manager=None):
"""
{'Cause-txt': 'Unknown', 'Uniqueid': '1306918002.7160', 'Privilege': 'call,all', 'Cause': '0', 'Event': 'Hangup', 'Channel': 'SIP/1001-19a7e390'}
{'Cause-txt': 'User busy', 'Uniqueid': '1306918288.7182', 'Privilege': 'call,all', 'Cause': '17', 'Event': 'Hangup', 'Channel': 'SIP/1001-19bdadc0'}
{'Cause-txt': 'User alerting, no answer', 'Uniqueid': '1306918224.7179', 'Privilege': 'call,all', 'Cause': '19', 'Event': 'Hangup', 'Channel': 'SIP/1001-19b1a940'}
{'Cause-txt': 'Normal Clearing', 'Uniqueid': '1306919065.7238', 'Privilege': 'call,all', 'Cause': '16', 'Event': 'Hangup', 'Channel': 'SIP/1001-19b6ec20'}
{'Cause-txt': 'User busy', 'Uniqueid': '1306919079.7244', 'Privilege': 'call,all', 'Cause': '17', 'Event': 'Hangup', 'Channel': 'SIP/1001-19b746f0'}
"""
if not isinstance(event, dict):
event = event.headers
#if event['Cause-txt'] == 'Normal Clearing':
return handle_hangup_clearing(event, manager.stomp)
def handle_Link(event, manager=None):
pass
if not isinstance(event, dict):
event = event.headers
"""
{'Uniqueid2': '1306914758.6999', 'Uniqueid1': '1306914726.6994', 'Channel1': 'SIP/430913-19be0080', 'Channel2': 'SIP/1313-19ba26d0', 'CallerID2': '380352407040', 'Privilege': 'call,all', 'CallerID1': '430913', 'Event': 'Link'}
"""
message = ChannelMessage()
message.set_event(ChannelMessage.EVENT_LINK)
message.set_id(event['Uniqueid1'])
message.set_extension(event['CallerID1'])
message.set_caller(event['CallerID2'])
send_message(manager.stomp, message.dump_data_json(), getLocalNumber(event['Channel1']))
def handle_Bridge(event, manager=None):
if not isinstance(event, dict):
event = event.headers
"""
Event: Bridge
Privilege: call,all
Bridgestate: Link
Bridgetype: core
Channel1: SIP/101-00000058
Channel2: SIP/104-00000059
Uniqueid1: 1309443548.88
Uniqueid2: 1309443548.89
CallerID1: 101
CallerID2: 104
{'Uniqueid2': '1309506586.133', 'Uniqueid1': '1309506586.132', 'CallerID2': '104', 'Bridgestate': 'Link', 'CallerID1': '101', 'Channel2': 'SIP/104-00000085', 'Channel1': 'SIP/101-00000084', 'Bridgetype': 'core', 'Privilege': 'call,all', 'Event': 'Bridge'}
"""
message = ChannelMessage()
message.set_event(ChannelMessage.EVENT_LINK)
message.set_id(event['Uniqueid2'])
message.set_extension(event['CallerID2'])
message.set_caller(event['CallerID1'])
send_message(manager.stomp, message.dump_data_json(), getLocalNumber(event['Channel2']))
def handle_Newstate(event, manager=None):
"""
V 1.1
Event: Newstate
Privilege: call,all
Channel: SIP/102-00000023
ChannelState: 5
ChannelStateDesc: Ringing
CallerIDNum: 102
CallerIDName:
Uniqueid: 1309436568.35
"""
if not isinstance(event, dict):
event = event.headers
if event['ChannelStateDesc'] == 'Ringing':
return handle_newstate_ringing(event, manager.stomp, manager.version)
return None
def handle_Shutdown(event, manager):
print "Recieved shutdown event"
manager.close()
def getLocalNumber(channel):
return channel.split('-')[0]
def handle_newstate_ringing(event, stomp, protocol_version):
channel = event['Channel']
if channel == None:
return None
message = ChannelMessage()
message.set_event(ChannelMessage.EVENT_RINGING)
message.set_id(event['Uniqueid'])
try:
parent_event = AsteriskEvent.selectBy(event = 'Dial', uniqueid = event['Uniqueid'])[0]
except Exception as e:
print e
parent_event = None
if parent_event != None:
raw = eval(parent_event.raw)
else:
raw = None
if raw != None and protocol_version == PROTOCOL_VERSION_1_1:
caller = raw['CallerIDNum']
extension = event['CallerIDNum']
elif raw != None and protocol_version == PROTOCOL_VERSION_1_0:
caller = raw['CallerID']
extension = event['CallerID']
else:
caller = 'unknown'
extension = 'unknown'
message.set_extension(extension)
message.set_caller(caller)
send_message(stomp, message.dump_data_json(), getLocalNumber(channel))
def handle_hangup_clearing(event, stomp):
channel = event['Channel']
if channel == None:
return None
message = ChannelMessage()
message.set_event(ChannelMessage.EVENT_HANGUP_CLEANUP)
message.set_id(event['Uniqueid'])
send_message(stomp, message.dump_data_json(), getLocalNumber(channel))
global sqlhub
| gryzz/uCall | utils/asterisk-connector/handlers/commands_1_1.py | Python | gpl-3.0 | 7,989 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
import telemeta.models.fields
class Migration(migrations.Migration):
dependencies = [
('telemeta', '0002_auto_20170424_1110'),
]
operations = [
migrations.AlterField(
model_name='mediacollection',
name='acquisition_mode',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.AcquisitionMode', null=True, verbose_name='mode of acquisition'),
),
migrations.AlterField(
model_name='mediacollection',
name='ad_conversion',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.AdConversion', null=True, verbose_name='digitization'),
),
migrations.AlterField(
model_name='mediacollection',
name='copy_type',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.CopyType', null=True, verbose_name='copy type'),
),
migrations.AlterField(
model_name='mediacollection',
name='legal_rights',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.LegalRight', null=True, verbose_name='legal rights'),
),
migrations.AlterField(
model_name='mediacollection',
name='media_type',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.MediaType', null=True, verbose_name='media type'),
),
migrations.AlterField(
model_name='mediacollection',
name='metadata_author',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.MetadataAuthor', null=True, verbose_name='record author'),
),
migrations.AlterField(
model_name='mediacollection',
name='metadata_writer',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.MetadataWriter', null=True, verbose_name='record writer'),
),
migrations.AlterField(
model_name='mediacollection',
name='original_format',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.OriginalFormat', null=True, verbose_name='original format'),
),
migrations.AlterField(
model_name='mediacollection',
name='physical_format',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.PhysicalFormat', null=True, verbose_name='archive format'),
),
migrations.AlterField(
model_name='mediacollection',
name='publisher',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Publisher', null=True, verbose_name='publisher'),
),
migrations.AlterField(
model_name='mediacollection',
name='publisher_collection',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.PublisherCollection', null=True, verbose_name='publisher collection'),
),
migrations.AlterField(
model_name='mediacollection',
name='publishing_status',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.PublishingStatus', null=True, verbose_name='secondary edition'),
),
migrations.AlterField(
model_name='mediacollection',
name='recording_context',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.RecordingContext', null=True, verbose_name='recording context'),
),
migrations.AlterField(
model_name='mediacollection',
name='status',
field=telemeta.models.fields.ForeignKey(related_name='collections', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Status', null=True, verbose_name='collection status'),
),
migrations.AlterField(
model_name='mediacollectionidentifier',
name='type',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.IdentifierType', null=True, verbose_name='type'),
),
migrations.AlterField(
model_name='mediaitem',
name='ethnic_group',
field=telemeta.models.fields.ForeignKey(related_name='items', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.EthnicGroup', null=True, verbose_name='population / social group'),
),
migrations.AlterField(
model_name='mediaitem',
name='generic_style',
field=telemeta.models.fields.ForeignKey(related_name='items', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.GenericStyle', null=True, verbose_name='generic style'),
),
migrations.AlterField(
model_name='mediaitem',
name='location',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Location', null=True, verbose_name='location'),
),
migrations.AlterField(
model_name='mediaitem',
name='media_type',
field=telemeta.models.fields.ForeignKey(related_name='items', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.MediaType', null=True, verbose_name='media type'),
),
migrations.AlterField(
model_name='mediaitem',
name='organization',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Organization', null=True, verbose_name='organization'),
),
migrations.AlterField(
model_name='mediaitem',
name='rights',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Rights', null=True, verbose_name='rights'),
),
migrations.AlterField(
model_name='mediaitem',
name='topic',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Topic', null=True, verbose_name='topic'),
),
migrations.AlterField(
model_name='mediaitem',
name='vernacular_style',
field=telemeta.models.fields.ForeignKey(related_name='items', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.VernacularStyle', null=True, verbose_name='vernacular style'),
),
migrations.AlterField(
model_name='mediaitemidentifier',
name='type',
field=telemeta.models.fields.ForeignKey(on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.IdentifierType', null=True, verbose_name='type'),
),
migrations.AlterField(
model_name='mediaitemperformance',
name='alias',
field=telemeta.models.fields.ForeignKey(related_name='performances', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.InstrumentAlias', null=True, verbose_name='vernacular name'),
),
migrations.AlterField(
model_name='mediaitemperformance',
name='instrument',
field=telemeta.models.fields.ForeignKey(related_name='performances', on_delete=django.db.models.deletion.SET_NULL, default=None, blank=True, to='telemeta.Instrument', null=True, verbose_name='composition'),
),
]
| ANR-kamoulox/Telemeta | telemeta/migrations/0003_auto_20170718_1502.py | Python | agpl-3.0 | 8,878 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright: (c) 2017, F5 Networks Inc.
# GNU General Public License v3.0 (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import absolute_import, division, print_function
__metaclass__ = type
ANSIBLE_METADATA = {'metadata_version': '1.1',
'status': ['stableinterface'],
'supported_by': 'certified'}
DOCUMENTATION = r'''
---
module: bigip_gtm_pool
short_description: Manages F5 BIG-IP GTM pools
description:
- Manages F5 BIG-IP GTM pools.
version_added: 2.4
options:
state:
description:
- Pool state. When C(present), ensures that the pool is created and enabled.
When C(absent), ensures that the pool is removed from the system. When
C(enabled) or C(disabled), ensures that the pool is enabled or disabled
(respectively) on the remote device.
type: str
choices:
- present
- absent
- enabled
- disabled
default: present
preferred_lb_method:
description:
- The load balancing mode that the system tries first.
type: str
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
alternate_lb_method:
description:
- The load balancing mode that the system tries if the
C(preferred_lb_method) is unsuccessful in picking a pool.
type: str
choices:
- round-robin
- return-to-dns
- none
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- packet-rate
- drop-packet
- fallback-ip
- virtual-server-score
fallback_lb_method:
description:
- The load balancing mode that the system tries if both the
C(preferred_lb_method) and C(alternate_lb_method)s are unsuccessful
in picking a pool.
type: str
choices:
- round-robin
- return-to-dns
- ratio
- topology
- static-persistence
- global-availability
- virtual-server-capacity
- least-connections
- lowest-round-trip-time
- fewest-hops
- packet-rate
- cpu
- completion-rate
- quality-of-service
- kilobytes-per-second
- drop-packet
- fallback-ip
- virtual-server-score
- none
fallback_ip:
description:
- Specifies the IPv4, or IPv6 address of the server to which the system
directs requests when it cannot use one of its pools to do so.
Note that the system uses the fallback IP only if you select the
C(fallback_ip) load balancing method.
type: str
type:
description:
- The type of GTM pool that you want to create. On BIG-IP releases
prior to version 12, this parameter is not required. On later versions
of BIG-IP, this is a required parameter.
type: str
choices:
- a
- aaaa
- cname
- mx
- naptr
- srv
name:
description:
- Name of the GTM pool.
type: str
required: True
partition:
description:
- Device partition to manage resources on.
type: str
default: Common
version_added: 2.5
members:
description:
- Members to assign to the pool.
- The order of the members in this list is the order that they will be listed in the pool.
suboptions:
server:
description:
- Name of the server which the pool member is a part of.
type: str
required: True
virtual_server:
description:
- Name of the virtual server, associated with the server, that the pool member is a part of.
type: str
required: True
type: list
version_added: 2.6
monitors:
description:
- Specifies the health monitors that the system currently uses to monitor this resource.
- When C(availability_requirements.type) is C(require), you may only have a single monitor in the
C(monitors) list.
type: list
version_added: 2.6
availability_requirements:
description:
- Specifies, if you activate more than one health monitor, the number of health
monitors that must receive successful responses in order for the link to be
considered available.
suboptions:
type:
description:
- Monitor rule type when C(monitors) is specified.
- When creating a new pool, if this value is not specified, the default of 'all' will be used.
type: str
choices:
- all
- at_least
- require
at_least:
description:
- Specifies the minimum number of active health monitors that must be successful
before the link is considered up.
- This parameter is only relevant when a C(type) of C(at_least) is used.
- This parameter will be ignored if a type of either C(all) or C(require) is used.
type: int
number_of_probes:
description:
- Specifies the minimum number of probes that must succeed for this server to be declared up.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probers)
parameter must also be specified.
- The value of this parameter should always be B(lower) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
type: int
number_of_probers:
description:
- Specifies the number of probers that should be used when running probes.
- When creating a new virtual server, if this parameter is specified, then the C(number_of_probes)
parameter must also be specified.
- The value of this parameter should always be B(higher) than, or B(equal to), the value of C(number_of_probers).
- This parameter is only relevant when a C(type) of C(require) is used.
- This parameter will be ignored if a type of either C(all) or C(at_least) is used.
type: int
type: dict
version_added: 2.6
max_answers_returned:
description:
- Specifies the maximum number of available virtual servers that the system lists in a response.
- The maximum is 500.
type: int
version_added: 2.8
ttl:
description:
- Specifies the number of seconds that the IP address, once found, is valid.
type: int
version_added: 2.8
notes:
- Support for TMOS versions below v12.x has been deprecated for this module, and will be removed in Ansible 2.12.
extends_documentation_fragment: f5
author:
- Tim Rupp (@caphrim007)
- Wojciech Wypior (@wojtek0806)
'''
EXAMPLES = r'''
- name: Create a GTM pool
bigip_gtm_pool:
name: my_pool
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
- name: Disable pool
bigip_gtm_pool:
state: disabled
name: my_pool
provider:
user: admin
password: secret
server: lb.mydomain.com
delegate_to: localhost
'''
RETURN = r'''
preferred_lb_method:
description: New preferred load balancing method for the pool.
returned: changed
type: str
sample: topology
alternate_lb_method:
description: New alternate load balancing method for the pool.
returned: changed
type: str
sample: drop-packet
fallback_lb_method:
description: New fallback load balancing method for the pool.
returned: changed
type: str
sample: fewest-hops
fallback_ip:
description: New fallback IP used when load balacing using the C(fallback_ip) method.
returned: changed
type: str
sample: 10.10.10.10
monitors:
description: The new list of monitors for the resource.
returned: changed
type: list
sample: ['/Common/monitor1', '/Common/monitor2']
members:
description: List of members in the pool.
returned: changed
type: complex
contains:
server:
description: The name of the server portion of the member.
returned: changed
type: str
virtual_server:
description: The name of the virtual server portion of the member.
returned: changed
type: str
max_answers_returned:
description: The new Maximum Answers Returned value.
returned: changed
type: int
sample: 25
'''
import copy
import re
from ansible.module_utils.basic import AnsibleModule
from ansible.module_utils.basic import env_fallback
from distutils.version import LooseVersion
try:
from library.module_utils.network.f5.bigip import F5RestClient
from library.module_utils.network.f5.common import F5ModuleError
from library.module_utils.network.f5.common import AnsibleF5Parameters
from library.module_utils.network.f5.common import fq_name
from library.module_utils.network.f5.common import f5_argument_spec
from library.module_utils.network.f5.common import transform_name
from library.module_utils.network.f5.icontrol import tmos_version
from library.module_utils.network.f5.icontrol import module_provisioned
from library.module_utils.network.f5.ipaddress import is_valid_ip
except ImportError:
from ansible.module_utils.network.f5.bigip import F5RestClient
from ansible.module_utils.network.f5.common import F5ModuleError
from ansible.module_utils.network.f5.common import AnsibleF5Parameters
from ansible.module_utils.network.f5.common import fq_name
from ansible.module_utils.network.f5.common import f5_argument_spec
from ansible.module_utils.network.f5.common import transform_name
from ansible.module_utils.network.f5.icontrol import tmos_version
from ansible.module_utils.network.f5.icontrol import module_provisioned
from ansible.module_utils.network.f5.ipaddress import is_valid_ip
class Parameters(AnsibleF5Parameters):
api_map = {
'loadBalancingMode': 'preferred_lb_method',
'alternateMode': 'alternate_lb_method',
'fallbackMode': 'fallback_lb_method',
'verifyMemberAvailability': 'verify_member_availability',
'fallbackIpv4': 'fallback_ip',
'fallbackIpv6': 'fallback_ip',
'fallbackIp': 'fallback_ip',
'membersReference': 'members',
'monitor': 'monitors',
'maxAnswersReturned': 'max_answers_returned',
}
updatables = [
'alternate_lb_method',
'fallback_ip',
'fallback_lb_method',
'members',
'monitors',
'preferred_lb_method',
'state',
'max_answers_returned',
'ttl',
]
returnables = [
'alternate_lb_method',
'fallback_ip',
'fallback_lb_method',
'members',
'monitors',
'preferred_lb_method',
'enabled',
'disabled',
'availability_requirements',
'max_answers_returned',
'ttl',
]
api_attributes = [
'alternateMode',
'disabled',
'enabled',
'fallbackIp',
'fallbackIpv4',
'fallbackIpv6',
'fallbackMode',
'loadBalancingMode',
'members',
'verifyMemberAvailability',
'monitor',
'maxAnswersReturned',
'ttl',
]
@property
def type(self):
if self._values['type'] is None:
return None
return str(self._values['type'])
@property
def verify_member_availability(self):
if self._values['verify_member_availability'] is None:
return None
elif self._values['verify_member_availability']:
return 'enabled'
else:
return 'disabled'
@property
def fallback_ip(self):
if self._values['fallback_ip'] is None:
return None
if self._values['fallback_ip'] == 'any':
return 'any'
if self._values['fallback_ip'] == 'any6':
return 'any6'
if is_valid_ip(self._values['fallback_ip']):
return self._values['fallback_ip']
else:
raise F5ModuleError(
'The provided fallback address is not a valid IPv4 address'
)
@property
def state(self):
if self._values['state'] == 'enabled':
return 'present'
return self._values['state']
@property
def enabled(self):
if self._values['enabled'] is None:
return None
return True
@property
def disabled(self):
if self._values['disabled'] is None:
return None
return True
class ApiParameters(Parameters):
@property
def members(self):
result = []
if self._values['members'] is None or 'items' not in self._values['members']:
return []
for item in self._values['members']['items']:
result.append(dict(item=item['fullPath'], order=item['memberOrder']))
result = [x['item'] for x in sorted(result, key=lambda k: k['order'])]
return result
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if self._values['monitors'] == 'default':
return 'default'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probes')
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('probers')
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return matches.group('least')
class ModuleParameters(Parameters):
def _get_availability_value(self, type):
if self._values['availability_requirements'] is None:
return None
if self._values['availability_requirements'][type] is None:
return None
return int(self._values['availability_requirements'][type])
@property
def members(self):
if self._values['members'] is None:
return None
if len(self._values['members']) == 1 and self._values['members'][0] == '':
return []
result = []
for member in self._values['members']:
if 'server' not in member:
raise F5ModuleError(
"One of the provided members is missing a 'server' sub-option."
)
if 'virtual_server' not in member:
raise F5ModuleError(
"One of the provided members is missing a 'virtual_server' sub-option."
)
name = '{0}:{1}'.format(member['server'], member['virtual_server'])
name = fq_name(self.partition, name)
if name in result:
continue
result.append(name)
result = list(result)
return result
@property
def monitors_list(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def monitors(self):
if self._values['monitors'] is None:
return None
if len(self._values['monitors']) == 1 and self._values['monitors'][0] == '':
return 'default'
monitors = [fq_name(self.partition, x) for x in self.monitors_list]
if self.availability_requirement_type == 'at_least':
if self.at_least > len(self.monitors_list):
raise F5ModuleError(
"The 'at_least' value must not exceed the number of 'monitors'."
)
monitors = ' '.join(monitors)
result = 'min {0} of {{ {1} }}'.format(self.at_least, monitors)
elif self.availability_requirement_type == 'require':
monitors = ' '.join(monitors)
if self.number_of_probes > self.number_of_probers:
raise F5ModuleError(
"The 'number_of_probes' must not exceed the 'number_of_probers'."
)
result = 'require {0} from {1} {{ {2} }}'.format(self.number_of_probes, self.number_of_probers, monitors)
else:
result = ' and '.join(monitors).strip()
return result
@property
def availability_requirement_type(self):
if self._values['availability_requirements'] is None:
return None
return self._values['availability_requirements']['type']
@property
def number_of_probes(self):
return self._get_availability_value('number_of_probes')
@property
def number_of_probers(self):
return self._get_availability_value('number_of_probers')
@property
def at_least(self):
return self._get_availability_value('at_least')
class Changes(Parameters):
def to_return(self):
result = {}
for returnable in self.returnables:
result[returnable] = getattr(self, returnable)
result = self._filter_params(result)
return result
class UsableChanges(Changes):
@property
def monitors(self):
monitor_string = self._values['monitors']
if monitor_string is None:
return None
if '{' in monitor_string and '}':
tmp = monitor_string.strip('}').split('{')
monitor = ''.join(tmp).rstrip()
return monitor
return monitor_string
@property
def members(self):
results = []
if self._values['members'] is None:
return None
for idx, member in enumerate(self._values['members']):
result = dict(
name=member,
memberOrder=idx
)
results.append(result)
return results
class ReportableChanges(Changes):
@property
def members(self):
results = []
if self._values['members'] is None:
return None
for member in self._values['members']:
parts = member.split(':')
results.append(dict(
server=fq_name(self.partition, parts[0]),
virtual_server=fq_name(self.partition, parts[1])
))
return results
@property
def monitors(self):
if self._values['monitors'] is None:
return []
try:
result = re.findall(r'/\w+/[^\s}]+', self._values['monitors'])
result.sort()
return result
except Exception:
return self._values['monitors']
@property
def availability_requirement_type(self):
if self._values['monitors'] is None:
return None
if 'min ' in self._values['monitors']:
return 'at_least'
elif 'require ' in self._values['monitors']:
return 'require'
else:
return 'all'
@property
def number_of_probes(self):
"""Returns the probes value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probes" value that can be updated in the module.
Returns:
int: The probes value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+(?P<probes>\d+)\s+from'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probes'))
@property
def number_of_probers(self):
"""Returns the probers value from the monitor string.
The monitor string for a Require monitor looks like this.
require 1 from 2 { /Common/tcp }
This method parses out the first of the numeric values. This values represents
the "probers" value that can be updated in the module.
Returns:
int: The probers value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'require\s+\d+\s+from\s+(?P<probers>\d+)\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('probers'))
@property
def at_least(self):
"""Returns the 'at least' value from the monitor string.
The monitor string for a Require monitor looks like this.
min 1 of { /Common/gateway_icmp }
This method parses out the first of the numeric values. This values represents
the "at_least" value that can be updated in the module.
Returns:
int: The at_least value if found. None otherwise.
"""
if self._values['monitors'] is None:
return None
pattern = r'min\s+(?P<least>\d+)\s+of\s+'
matches = re.search(pattern, self._values['monitors'])
if matches is None:
return None
return int(matches.group('least'))
@property
def availability_requirements(self):
if self._values['monitors'] is None:
return None
result = dict()
result['type'] = self.availability_requirement_type
result['at_least'] = self.at_least
result['number_of_probers'] = self.number_of_probers
result['number_of_probes'] = self.number_of_probes
return result
class Difference(object):
def __init__(self, want, have=None):
self.want = want
self.have = have
def compare(self, param):
try:
result = getattr(self, param)
return result
except AttributeError:
return self.__default(param)
def __default(self, param):
attr1 = getattr(self.want, param)
try:
attr2 = getattr(self.have, param)
if attr1 != attr2:
return attr1
except AttributeError:
return attr1
@property
def state(self):
if self.want.state == 'disabled' and self.have.enabled:
return dict(
disabled=True
)
elif self.want.state in ['present', 'enabled'] and self.have.disabled:
return dict(
enabled=True
)
@property
def monitors(self):
if self.want.monitors is None:
return None
if self.want.monitors == 'default' and self.have.monitors == 'default':
return None
if self.want.monitors == 'default' and self.have.monitors is None:
return None
if self.want.monitors == 'default' and len(self.have.monitors) > 0:
return 'default'
if self.have.monitors is None:
return self.want.monitors
if self.have.monitors != self.want.monitors:
return self.want.monitors
class ModuleManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.kwargs = kwargs
def exec_module(self):
if not module_provisioned(self.client, 'gtm'):
raise F5ModuleError(
"GTM must be provisioned to use this module."
)
if self.version_is_less_than_12():
manager = self.get_manager('untyped')
else:
manager = self.get_manager('typed')
return manager.exec_module()
def get_manager(self, type):
if type == 'typed':
return TypedManager(**self.kwargs)
elif type == 'untyped':
return UntypedManager(**self.kwargs)
def version_is_less_than_12(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
class BaseManager(object):
def __init__(self, *args, **kwargs):
self.module = kwargs.get('module', None)
self.client = F5RestClient(**self.module.params)
self.have = None
self.want = ModuleParameters(params=self.module.params)
self.changes = UsableChanges()
def _set_changed_options(self):
changed = {}
for key in Parameters.returnables:
if getattr(self.want, key) is not None:
changed[key] = getattr(self.want, key)
if changed:
self.changes = UsableChanges(params=changed)
def _update_changed_options(self):
diff = Difference(self.want, self.have)
updatables = Parameters.updatables
changed = dict()
for k in updatables:
change = diff.compare(k)
if change is None:
continue
else:
if isinstance(change, dict):
changed.update(change)
else:
changed[k] = change
if changed:
self.changes = UsableChanges(params=changed)
return True
return False
def exec_module(self):
changed = False
result = dict()
state = self.want.state
if state in ["present", "disabled"]:
changed = self.present()
elif state == "absent":
changed = self.absent()
reportable = ReportableChanges(params=self.changes.to_return())
changes = reportable.to_return()
result.update(**changes)
result.update(dict(changed=changed))
self._announce_deprecations(result)
return result
def _announce_deprecations(self, result):
warnings = result.pop('__warnings', [])
if self.version_is_less_than_12():
self._deprecate_v11(warnings)
for warning in warnings:
self.module.deprecate(
msg=warning['msg'],
version=warning['version']
)
def version_is_less_than_12(self):
version = tmos_version(self.client)
if LooseVersion(version) < LooseVersion('12.0.0'):
return True
else:
return False
def _deprecate_v11(self, result):
result.append(
dict(
msg='The support for this TMOS version is deprecated.',
version='2.12'
)
)
def present(self):
if self.exists():
return self.update()
else:
return self.create()
def absent(self):
if self.exists():
return self.remove()
return False
def should_update(self):
result = self._update_changed_options()
if result:
return True
return False
def update(self):
self.have = self.read_current_from_device()
if not self.should_update():
return False
if self.module.check_mode:
return True
self.update_on_device()
return True
def create(self):
if self.want.state == 'disabled':
self.want.update({'disabled': True})
elif self.want.state in ['present', 'enabled']:
self.want.update({'enabled': True})
self._set_changed_options()
if self.want.availability_requirement_type == 'require' and len(self.want.monitors_list) > 1:
raise F5ModuleError(
"Only one monitor may be specified when using an availability_requirement type of 'require'"
)
if self.module.check_mode:
return True
self.create_on_device()
if self.exists():
return True
else:
raise F5ModuleError("Failed to create the GTM pool")
def remove(self):
if self.module.check_mode:
return True
self.remove_from_device()
if self.exists():
raise F5ModuleError("Failed to delete the GTM pool")
return True
class TypedManager(BaseManager):
def __init__(self, *args, **kwargs):
super(TypedManager, self).__init__(**kwargs)
if self.want.type is None:
raise F5ModuleError(
"The 'type' option is required for BIG-IP instances "
"greater than or equal to 12.x"
)
def present(self):
types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
if self.want.type is None:
raise F5ModuleError(
"A pool 'type' must be specified"
)
elif self.want.type not in types:
raise F5ModuleError(
"The specified pool type is invalid"
)
return super(TypedManager, self).present()
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.type,
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.type,
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.type,
transform_name(self.want.partition, self.want.name)
)
query = '?expandSubcollections=true'
resp = self.client.api.get(uri + query)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.type
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}/{3}".format(
self.client.provider['server'],
self.client.provider['server_port'],
self.want.type,
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class UntypedManager(BaseManager):
def exists(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError:
return False
if resp.status == 404 or 'code' in response and response['code'] == 404:
return False
return True
def create_on_device(self):
params = self.changes.api_params()
params['name'] = self.want.name
params['partition'] = self.want.partition
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/".format(
self.client.provider['server'],
self.client.provider['server_port']
)
resp = self.client.api.post(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] in [400, 403]:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return response['selfLink']
def update_on_device(self):
params = self.changes.api_params()
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.patch(uri, json=params)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
def read_current_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
resp = self.client.api.get(uri)
try:
response = resp.json()
except ValueError as ex:
raise F5ModuleError(str(ex))
if 'code' in response and response['code'] == 400:
if 'message' in response:
raise F5ModuleError(response['message'])
else:
raise F5ModuleError(resp.content)
return ApiParameters(params=response)
def remove_from_device(self):
uri = "https://{0}:{1}/mgmt/tm/gtm/pool/{2}".format(
self.client.provider['server'],
self.client.provider['server_port'],
transform_name(self.want.partition, self.want.name)
)
response = self.client.api.delete(uri)
if response.status == 200:
return True
raise F5ModuleError(response.content)
class ArgumentSpec(object):
def __init__(self):
self.states = ['absent', 'present', 'enabled', 'disabled']
self.preferred_lb_methods = [
'round-robin', 'return-to-dns', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'least-connections',
'lowest-round-trip-time', 'fewest-hops', 'packet-rate', 'cpu',
'completion-rate', 'quality-of-service', 'kilobytes-per-second',
'drop-packet', 'fallback-ip', 'virtual-server-score'
]
self.alternate_lb_methods = [
'round-robin', 'return-to-dns', 'none', 'ratio', 'topology',
'static-persistence', 'global-availability',
'virtual-server-capacity', 'packet-rate', 'drop-packet',
'fallback-ip', 'virtual-server-score'
]
self.fallback_lb_methods = copy.copy(self.preferred_lb_methods)
self.fallback_lb_methods.append('none')
self.types = [
'a', 'aaaa', 'cname', 'mx', 'naptr', 'srv'
]
self.supports_check_mode = True
argument_spec = dict(
name=dict(required=True),
state=dict(
default='present',
choices=self.states,
),
preferred_lb_method=dict(
choices=self.preferred_lb_methods,
),
fallback_lb_method=dict(
choices=self.fallback_lb_methods,
),
alternate_lb_method=dict(
choices=self.alternate_lb_methods,
),
fallback_ip=dict(),
type=dict(
choices=self.types
),
partition=dict(
default='Common',
fallback=(env_fallback, ['F5_PARTITION'])
),
members=dict(
type='list',
options=dict(
server=dict(required=True),
virtual_server=dict(required=True)
)
),
availability_requirements=dict(
type='dict',
options=dict(
type=dict(
choices=['all', 'at_least', 'require'],
required=True
),
at_least=dict(type='int'),
number_of_probes=dict(type='int'),
number_of_probers=dict(type='int')
),
mutually_exclusive=[
['at_least', 'number_of_probes'],
['at_least', 'number_of_probers'],
],
required_if=[
['type', 'at_least', ['at_least']],
['type', 'require', ['number_of_probes', 'number_of_probers']]
]
),
monitors=dict(type='list'),
max_answers_returned=dict(type='int'),
ttl=dict(type='int')
)
self.argument_spec = {}
self.argument_spec.update(f5_argument_spec)
self.argument_spec.update(argument_spec)
self.required_if = [
['preferred_lb_method', 'fallback-ip', ['fallback_ip']],
['fallback_lb_method', 'fallback-ip', ['fallback_ip']],
['alternate_lb_method', 'fallback-ip', ['fallback_ip']]
]
def main():
spec = ArgumentSpec()
module = AnsibleModule(
argument_spec=spec.argument_spec,
supports_check_mode=spec.supports_check_mode,
required_if=spec.required_if
)
try:
mm = ModuleManager(module=module)
results = mm.exec_module()
module.exit_json(**results)
except F5ModuleError as ex:
module.fail_json(msg=str(ex))
if __name__ == '__main__':
main()
| tersmitten/ansible | lib/ansible/modules/network/f5/bigip_gtm_pool.py | Python | gpl-3.0 | 42,441 |
#!/usr/bin/python
"""WARNING: Script is in beta and needs to be tested thoroughly.
The script generates a rudimentary appcache file based upon the content.opf file located in either:
an uncompressed epub directory or a compressed epub file and places it in the current directory
Usage: acm_gen.py --input='/path/to/content.opf' which links to the uncompressed epub directory that includes the content.opf
OR --input='/path/to/book.epub' which links to the compressed epub file
"""
__author__ = 'Luis Aguilar'
__email__ = '[email protected]'
import os
import xml.etree.ElementTree as ET
import zipfile
import datetime
import epub
from optparse import OptionParser
def get_parameters():
"""
Parse the user input
"""
parser = OptionParser()
parser.add_option('-i', '--input', dest='input')
parser.add_option('-o', '--output', dest='output', default='.')
(options, args) = parser.parse_args()
# code block to check for empty path, needed? path that includes proper filename, then valid file check
if not options.input:
return parser.error('input path is empty, use --input="path.to.opf.or.epub.filename"')
elif not (options.input[-3:].lower() == 'pub' or options.input[-3:].lower() == 'opf'):
return parser.error('Please include opf or epub filename in path')
elif not os.path.isfile(options.input):
return parser.error('input epub or content.opf file could not be found, please verify path and filename')
else:
return {'input': options.input, 'output': options.output, 'file': options.input[-3:].lower()}
def process_extracted_opf(userParams):
"""
Parse the content.opf file. Is it good practice to close file used
for ElementTree processing?
"""
namespaces = {'xmlns': 'http://www.idpf.org/2007/opf',
'dc':'http://purl.org/dc/elements/1.1/',
'dcterms':'http://purl.org/dc/terms/'}
print "Parsing content.opf file at " + userParams['input']
# return list
itemHrefs = []
# begin parsing content.opf
tree = ET.parse(userParams['input'])
root = tree.getroot()
# extract item hrefs and place in return list
for child in root.findall('xmlns:manifest/xmlns:item', namespaces=namespaces):
itemHrefs.append(child.attrib['href'])
return itemHrefs
def process_epub(userParams):
"""
Parse manifest items using epub library
"""
book = epub.open_epub(userParams['input'])
print "Parsing epub file at " + userParams['input']
itemHrefs = []
for item in book.opf.manifest.values():
itemHrefs.append(item.href)
return itemHrefs
def write_appcache(itemHrefs):
"""
Create offline_appcache with extracted hrefs
"""
fileName = 'epub.appcache'
cacheHeader = 'CACHE MANIFEST\n'
# open pointer to new appcache file
# will need to add functionality that checks for existing appcache
f_appcache = open(fileName, "w")
# write file
f_appcache.write(cacheHeader)
f_appcache.write('# '+ str(datetime.datetime.now()) + '\n')
for href in itemHrefs:
f_appcache.write(href + '\n')
# close file
f_appcache.close()
def main():
# get user defined parameters
userParams = get_parameters()
# process the epub or the content file extracted from an epub
if (userParams['file']=='pub'):
itemHrefs = process_epub(userParams)
elif(userParams['file']=='opf'):
itemHrefs = process_extracted_opf(userParams)
# take extracted items and generate the appcache
write_appcache(itemHrefs)
if __name__ == '__main__':
main() | ravi-sai/epub.js | tools/appcache.py | Python | bsd-2-clause | 3,659 |
from datetime import timedelta
import pytest
from django.urls import reverse
from django.utils import timezone
from model_bakery import baker
from pythonpro.cohorts.models import LiveClass
from pythonpro.memberkit.models import Subscription
@pytest.fixture
def live_class(db, cohort, fake) -> LiveClass:
now = timezone.now()
return baker.make(
LiveClass,
cohort=cohort,
vimeo_id='1212',
start=now + timedelta(days=1),
description=fake.paragraph(nb_sentences=3, variable_nb_sentences=True, ext_word_list=None),
memberkit_url='https://plataforma.dev.pro.br'
)
@pytest.fixture
def resp(client_with_user, live_class: LiveClass):
return client_with_user.get(reverse('cohorts:live_class', kwargs={'pk': live_class.id}))
def test_logged_user(resp):
assert resp.status_code == 302
assert resp.url == reverse('checkout:bootcamp_lp')
def test_link_unavailable_for_non_users(client):
resp = client.get(reverse('cohorts:live_class', kwargs={'pk': 1}))
assert resp.status_code == 302
def test_redirect_user_not_migrated_to_memberkit(client_with_user, live_class, logged_user):
baker.make(
Subscription,
subscriber=logged_user,
activated_at=None,
memberkit_user_id=None
)
resp = client_with_user.get(reverse('cohorts:live_class', kwargs={'pk': live_class.id}))
assert resp.status_code == 301
assert resp.url == reverse('migrate_to_memberkit')
def test_redirect_user_migrated_to_memberkit(client_with_user, live_class, logged_user):
baker.make(
Subscription,
status=Subscription.Status.ACTIVE,
subscriber=logged_user,
activated_at=timezone.now(),
memberkit_user_id=1
)
resp = client_with_user.get(reverse('cohorts:live_class', kwargs={'pk': live_class.id}))
assert resp.status_code == 301
assert resp.url == live_class.memberkit_url
| pythonprobr/pythonpro-website | pythonpro/cohorts/tests/test_live_class_detail.py | Python | agpl-3.0 | 1,924 |
#!/usr/bin/python2.5
#from __future__ import with_statement # Until Python 2.6
"""
Converts LaTeX math to png images.
Run latexmath2png.py --help for usage instructions.
"""
"""
Author:
Kamil Kisiel <[email protected]>
URL: http://www.kamilkisiel.net
Revision History:
2007/04/20 - Initial version
TODO:
- Make handling of bad input more graceful?
---
Some ideas borrowed from Kjell Fauske's article at http://fauskes.net/nb/htmleqII/
Licensed under the MIT License:
Copyright (c) 2007 Kamil Kisiel <[email protected]>
Permission is hereby granted, free of charge, to any person obtaining a copy
of this software and associated documentation files (the "Software"), to deal
in the Software without restriction, including without limitation the rights
to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the Software is
furnished to do so, subject to the following conditions:
The above copyright notice and this permission notice shall be included in
all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS
IN THE SOFTWARE.
"""
import os
import sys
import tempfile
import getopt
from StringIO import StringIO
from subprocess import *
# Default packages to use when generating output
default_packages = [
#'amsmath',
#'amsthm',
#'amssymb',
]
def __build_preamble(packages):
preamble = '\documentclass{article}\n'
for p in packages:
preamble += "\usepackage{%s}\n" % p
#preamble += "\usepackage[active]{preview}\n"
preamble += "\pagestyle{empty}\n\\begin{document}\n"
return preamble
def __write_output(infile, outdir, workdir = '.', prefix = '', dpi = 100):
try:
# Generate the DVI file
latexcmd = 'latex -file-line-error-style -interaction=nonstopmode -output-directory %s %s'\
% (workdir, infile)
p = Popen(latexcmd, shell=True, stdout=PIPE)
rc = p.wait()
# Something bad happened, abort
if rc != 0:
print p.stdout.read()
raise Exception('latex error')
# Convert the DVI file to PNG's
dvifile = infile.replace('.tex', '.dvi')
outprefix = os.path.join(outdir, prefix)
dvicmd = "dvipng --freetype0 -Q 8 --depth -q -T tight -D %i -z 3 -bg Transparent "\
"-o %s.png %s" % (dpi, outprefix, dvifile)
p = Popen(dvicmd, shell=True, stdout=PIPE)
rc = p.wait()
if rc != 0:
raise Exception('dvipng error')
depth = int(p.stdout.readlines()[-1].split('=')[-1])
finally:
# Cleanup temporaries
basefile = infile.replace('.tex', '')
tempext = [ '.aux', '.dvi', '.log' ]
for te in tempext:
t = basefile + te
if os.path.exists(t):
os.remove(t)
return depth
def math2png(eq, outdir, packages = default_packages, prefix = '', dpi = 100):
#try:
# Set the working directory
workdir = tempfile.gettempdir()
# Get a temporary file
fd, texfile = tempfile.mkstemp('.tex', '', workdir, True)
# Create the TeX document
#with os.fdopen(fd, 'w+') as f:
f = os.fdopen(fd, 'w')
f.write(__build_preamble(packages))
f.write("$%s$\n" % eq)
f.write('\end{document}')
f.close()
depth = __write_output(texfile, outdir, workdir, prefix, dpi)
#finally:
# pass
# #if os.path.exists(texfile):
# # os.remove(texfile)
return depth
def math2pngwl(eq, outdir, packages = default_packages, prefix = '', dpi = 100):
#try:
# Set the working directory
workdir = tempfile.gettempdir()
# Get a temporary file
fd, texfile = tempfile.mkstemp('.tex', '', workdir, True)
# Create the TeX document
#with os.fdopen(fd, 'w+') as f:
f = os.fdopen(fd, 'w')
f.write(__build_preamble(packages))
f.write("\\[%s\\]\n\\newpage\n" % eq)
f.write('\end{document}')
f.close()
depth = __write_output(texfile, outdir, workdir, prefix, dpi)
#finally:
# pass
# #if os.path.exists(texfile):
# # os.remove(texfile)
return depth
| liubenyuan/liubenyuan.github.io | latexmath2png.py | Python | mit | 4,617 |
"""
Implements the emitter decorator, class and desciptor.
"""
import weakref
from ._action import BaseDescriptor
def emitter(func):
""" Decorator to turn a method of a Component into an
:class:`Emitter <flexx.event.Emitter>`.
An emitter makes it easy to emit specific events, and is also a
placeholder for documenting an event.
.. code-block:: python
class MyObject(event.Component):
@emitter
def spam(self, v):
return dict(value=v)
m = MyObject()
m.spam(42) # emit the spam event
The method being decorated can have any number of arguments, and
should return a dictionary that represents the event to generate.
The method's docstring is used as the emitter's docstring.
"""
if not callable(func):
raise TypeError('The event.emitter() decorator needs a function.')
if getattr(func, '__self__', None) is not None: # builtin funcs have __self__
raise TypeError('Invalid use of emitter decorator.')
return EmitterDescriptor(func, func.__name__, func.__doc__)
class EmitterDescriptor(BaseDescriptor):
""" Placeholder for documentation and easy emitting of the event.
"""
def __init__(self, func, name, doc):
self._func = func
self._name = name
self.__doc__ = self._format_doc('emitter', name, doc, func)
def __get__(self, instance, owner):
if instance is None:
return self
private_name = '_' + self._name + '_emitter'
try:
emitter = getattr(instance, private_name)
except AttributeError:
emitter = Emitter(instance, self._func, self._name, self.__doc__)
setattr(instance, private_name, emitter)
emitter._use_once(self._func) # make super() work, see _action.py
return emitter
class Emitter:
""" Emitter objects are wrappers around Component methods. They take
care of emitting an event when called and function as a placeholder
for documenting an event. This class should not be instantiated
directly; use ``event.emitter()`` instead.
"""
def __init__(self, ob, func, name, doc):
assert callable(func)
# Store func, name, and docstring (e.g. for sphinx docs)
self._ob1 = weakref.ref(ob)
self._func = func
self._func_once = func
self._name = name
self.__doc__ = doc
def __repr__(self):
cname = self.__class__.__name__
return '<%s %r at 0x%x>' % (cname, self._name, id(self))
def _use_once(self, func):
""" To support super().
"""
self._func_once = func
def __call__(self, *args):
""" Emit the event.
"""
func = self._func_once
self._func_once = self._func
ob = self._ob1()
if ob is not None:
ev = func(ob, *args)
if ev is not None:
ob.emit(self._name, ev)
| zoofIO/flexx | flexx/event/_emitter.py | Python | bsd-2-clause | 2,960 |
patches = [
{
"op": "replace",
"path": "/ResourceTypes/AWS::FMS::Policy/Properties/ResourceTags/ItemType",
"value": "Tag",
},
{
"op": "remove",
"path": "/PropertyTypes/AWS::FMS::Policy.ResourceTag",
},
{
"op": "replace",
"path": "/ResourceTypes/AWS::FMS::Policy/Properties/Tags/ItemType",
"value": "Tag",
},
{
"op": "remove",
"path": "/PropertyTypes/AWS::FMS::Policy.PolicyTag",
},
]
| cloudtools/troposphere | scripts/patches/fms.py | Python | bsd-2-clause | 496 |
#!/usr/bin/python2.4
#
# Copyright 2009 Empeeric LTD. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from django.utils import simplejson
import urllib,urllib2
import urlparse
import string
BITLY_BASE_URL = "http://api.bit.ly/"
BITLY_API_VERSION = "2.0.1"
VERBS_PARAM = {
'shorten':'longUrl',
'expand':'shortUrl',
'info':'shortUrl',
'stats':'shortUrl',
'errors':'',
}
class BitlyError(Exception):
'''Base class for bitly errors'''
@property
def message(self):
'''Returns the first argument used to construct this error.'''
return self.args[0]
class Api():
""" API class for bit.ly """
def __init__(self, login, apikey):
self.login = login
self.apikey = apikey
self._urllib = urllib2
def shorten(self,longURL):
"""
Takes either:
A long URL string and returns shortened URL string
Or a list of long URL strings and returnes a list of shortened URL strings.
"""
if not isinstance(longURL, list):
longURL = [longURL]
for index,url in enumerate(longURL):
if not url.startswith("http"):
longURL[index] = "http://" + url
request = self._getURL("shorten",longURL)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
res = []
for item in json['results'].values():
if item['shortKeywordUrl'] == "":
res.append(item['shortUrl'])
else:
res.append(item['shortKeywordUrl'])
if len(res) == 1:
return res[0]
else:
return res
def expand(self,shortURL):
""" Given a bit.ly url or hash, return long source url """
request = self._getURL("expand",shortURL)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results'][string.split(shortURL, '/')[-1]]['longUrl']
def info(self,shortURL):
"""
Given a bit.ly url or hash,
return information about that page,
such as the long source url
"""
request = self._getURL("info",shortURL)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results'][string.split(shortURL, '/')[-1]]
def stats(self,shortURL):
""" Given a bit.ly url or hash, return traffic and referrer data. """
request = self._getURL("stats",shortURL)
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return Stats.NewFromJsonDict(json['results'])
def errors(self):
""" Get a list of bit.ly API error codes. """
request = self._getURL("errors","")
result = self._fetchUrl(request)
json = simplejson.loads(result)
self._CheckForError(json)
return json['results']
def setUrllib(self, urllib):
'''Override the default urllib implementation.
Args:
urllib: an instance that supports the same API as the urllib2 module
'''
self._urllib = urllib
def _getURL(self,verb,paramVal):
if not isinstance(paramVal, list):
paramVal = [paramVal]
params = [
('version',BITLY_API_VERSION),
('format','json'),
('login',self.login),
('apiKey',self.apikey),
]
verbParam = VERBS_PARAM[verb]
if verbParam:
for val in paramVal:
params.append(( verbParam,val ))
encoded_params = urllib.urlencode(params)
return "%s%s?%s" % (BITLY_BASE_URL,verb,encoded_params)
def _fetchUrl(self,url):
'''Fetch a URL
Args:
url: The URL to retrieve
Returns:
A string containing the body of the response.
'''
# Open and return the URL
url_data = self._urllib.urlopen(url).read()
return url_data
def _CheckForError(self, data):
"""Raises a BitlyError if bitly returns an error message.
Args:
data: A python dict created from the bitly json response
Raises:
BitlyError wrapping the bitly error message if one exists.
"""
# bitly errors are relatively unlikely, so it is faster
# to check first, rather than try and catch the exception
if 'ERROR' in data or data['statusCode'] == 'ERROR':
raise BitlyError, data['errorMessage']
for key in data['results']:
if type(data['results']) is dict and type(data['results'][key]) is dict:
if 'statusCode' in data['results'][key] and data['results'][key]['statusCode'] == 'ERROR':
raise BitlyError, data['results'][key]['errorMessage']
class Stats(object):
'''A class representing the Statistics returned by the bitly api.
The Stats structure exposes the following properties:
status.user_clicks # read only
status.clicks # read only
'''
def __init__(self,user_clicks=None,total_clicks=None):
self.user_clicks = user_clicks
self.total_clicks = total_clicks
@staticmethod
def NewFromJsonDict(data):
'''Create a new instance based on a JSON dict.
Args:
data: A JSON dict, as converted from the JSON in the bitly API
Returns:
A bitly.Stats instance
'''
return Stats(user_clicks=data.get('userClicks', None),
total_clicks=data.get('clicks', None))
if __name__ == '__main__':
testURL1="www.yahoo.com"
testURL2="www.cnn.com"
a=Api(login="pythonbitly",apikey="R_06871db6b7fd31a4242709acaf1b6648")
short=a.shorten(testURL1)
print "Short URL = %s" % short
urlList=[testURL1,testURL2]
shortList=a.shorten(urlList)
print "Short URL list = %s" % shortList
long=a.expand(short)
print "Expanded URL = %s" % long
info=a.info(short)
print "Info: %s" % info
stats=a.stats(short)
print "User clicks %s, total clicks: %s" % (stats.user_clicks,stats.total_clicks)
errors=a.errors()
print "Errors: %s" % errors | poeks/twitterbelle | lib/bitly.py | Python | apache-2.0 | 7,225 |
def create_skill_attributes_table(curs):
sql = '\n'.join([
"CREATE TABLE skill_attributes (",
" skill_attribute_id INTEGER PRIMARY KEY,",
" section_id INTEGER NOT NULL,",
" attribute TEXT,",
" armor_check_penalty INT,",
" trained_only INT",
")"])
curs.execute(sql)
def create_skill_attributes_index(curs):
sql = '\n'.join([
"CREATE INDEX skill_attributes_section_id",
" ON skill_attributes (section_id)"])
curs.execute(sql)
def insert_skill_attribute(curs, section_id, attribute, armor_check_penalty, trained_only):
values = [section_id, attribute]
if armor_check_penalty:
values.append(1)
else:
values.append(0)
if trained_only:
values.append(1)
else:
values.append(0)
sql = '\n'.join([
"INSERT INTO skill_attributes",
" (section_id, attribute, armor_check_penalty, trained_only)",
" VALUES",
" (?, ?, ?, ?)"])
curs.execute(sql, values)
def delete_skill_attribute(curs, section_id):
values = [section_id]
sql = '\n'.join([
"DELETE FROM skill_attributes",
" WHERE section_id = ?"])
curs.execute(sql, values)
def fetch_skill_attribute(curs, section_id):
values = [section_id]
sql = '\n'.join([
"SELECT *",
" FROM skill_attributes",
" WHERE section_id = ?"])
curs.execute(sql, values)
def fetch_skills_by_quality(curs, attribute=None, armor_check_penalty=None, trained_only=None):
values = []
sqla = [
"SELECT s.*",
" FROM sections s, skill_attributes sa",
" WHERE s.section_id = sa.section_id"]
if attribute:
sqla.append(" AND sa.attribute = ?")
values.append(attribute)
if armor_check_penalty != None:
sqla.append(" AND sa.armor_check_penalty = ?")
if armor_check_penalty:
values.append(1)
else:
values.append(0)
if trained_only != None:
sqla.append(" AND sa.trained_only = ?")
if trained_only:
values.append(1)
else:
values.append(0)
sql = '\n'.join(sqla)
curs.execute(sql, values)
| devonjones/PSRD-Parser | src/psrd/sql/skills.py | Python | gpl-3.0 | 1,898 |
import random
import textwrap
import cmd
import time
import combat
SCREEN_WIDTH = 80
NORTH = 'north'
SOUTH = 'south'
WEST = 'west'
EAST = 'east'
DESC = 'desc'
CHESTS = 'chests'
MOBS = 'mobs'
show_full_exits = True
chest_opened = False
mob_dead = False
gold_pouch = 0
def chest_gen():
return random.randint(0, 4) == 1
def spawn_mob():
spawn = (random.randint(0, 1) == 1)
_mobs = ['spider', 'rat', 'zombie', 'skeleton']
if spawn:
return _mobs[random.randint(0, 3)]
def gold_get():
global gold_pouch
print('You open the chest...')
time.sleep(1)
added_gold = random.randint(10, 100)
print('You find ', added_gold, 'in the chest! Wow!')
gold_pouch += added_gold
print('You have ', gold_pouch, ' gold in your pouch!')
return
def room_gen():
return random.randint(1, 11)
location = room_gen()
cave_rooms = {
1: {
DESC: 'There\'s a door in every direction!',
NORTH: room_gen(),
EAST: room_gen(),
SOUTH: room_gen(),
WEST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
2: {
DESC: 'There\'s a door to the north, east, and south of you!',
NORTH: room_gen(),
EAST: room_gen(),
SOUTH: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
3: {
DESC: 'There\'s a door to the north and east of you!',
NORTH: room_gen(),
EAST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
4: {
DESC: 'There\'s a door to the north!',
NORTH: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
5: {
DESC: 'There\'s a door to the north and west of you!',
NORTH: room_gen(),
WEST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
6: {
DESC: 'There\'s a door to the north and south of you!',
NORTH: room_gen(),
SOUTH: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
7: {
DESC: 'There\'s a door to your east and west of you!',
EAST: room_gen(),
WEST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
8: {
DESC: 'There\'s a door to the south and west of you!',
SOUTH: room_gen(),
WEST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
9: {
DESC: 'There\'s a door to your east!',
EAST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
10: {
DESC: 'There\'s a door to the south!',
SOUTH: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
11: {
DESC: 'There\'s a door to the west!',
WEST: room_gen(),
CHESTS: chest_gen(),
MOBS: spawn_mob()},
}
def display_location(loc):
"""A helper function for displaying an area's description and exits."""
global mob_dead
# Print the room's description (using textwrap.wrap())
print('\n'.join(textwrap.wrap(cave_rooms[loc][DESC], SCREEN_WIDTH)))
# Print all chests in the area
if cave_rooms[loc][CHESTS]:
print('There\'s a chest!')
# Print mob in area
if cave_rooms[loc][MOBS] is None:
print('You got lucky! No monsters in here!')
else:
print('There\'s a %s in the room!' % (cave_rooms[loc][MOBS]))
def move_direction(direction):
"""A helper function that changes the location of the player."""
global location
global mob_dead
if mob_dead:
if direction in cave_rooms[location]:
mob_dead = False
print('You move to the %s.' % direction)
location = cave_rooms[location][direction]
display_location(location)
else:
print('You cannot move in that direction')
else:
print('There\'s a %s in the way!' % cave_rooms[location][MOBS])
class CaveCommands(cmd.Cmd):
prompt = '\n> '
# The default() method is called when none of the other do_*() command methods match.
def default(self, arg):
print('I do not understand that command. Type "help" for a list of commands.')
# A very simple "quit" command to terminate the program:
def do_quit(self, arg):
"""Quit th game"""
return True # This exits the cmd application loop in TextAdventureCmd.cmdloop()
def do_look(self, arg):
"""Print surrounding area"""
display_location(location)
def do_north(self, arg):
"""Move north"""
global chest_opened
chest_opened = False
move_direction('north')
def do_south(self, arg):
"""Move south"""
global chest_opened
chest_opened = False
move_direction('south')
def do_east(self, arg):
"""Move south"""
global chest_opened
chest_opened = False
move_direction('east')
def do_west(self, arg):
"""move west"""
global chest_opened
chest_opened = False
move_direction('west')
do_n = do_north
do_s = do_south
do_e = do_east
do_w = do_west
def do_open(self, args):
"""open <chest> - opens a chest."""
global chest_opened
what_to_open = args.lower()
if what_to_open == 'chest':
if cave_rooms[location][CHESTS]:
if not chest_opened:
gold_get()
chest_opened = True
else:
print('You\'ve already opened this chest!')
else:
print('Open what?')
def do_fight(self, args):
"""fight <mob> - fights a mob"""
global mob_dead
what_to_fight = args.lower()
mob = cave_rooms[location][MOBS]
if mob_dead:
print('The', mob, 'is already dead!')
elif what_to_fight == mob:
mob_dead = combat.fight(mob)
elif mob is None:
print('There\'s nothing to fight!')
else:
print('Fight what?')
def enter_cave():
print('You enter the cave...')
time.sleep(1)
display_location(location)
CaveCommands().cmdloop() | cjPrograms/miniQuest | cave.py | Python | gpl-2.0 | 6,132 |
import os
import requests
import time
import datetime
from sqlalchemy import or_, and_
from flask import jsonify, render_template, redirect,flash, request, url_for
from flask_login import login_required, logout_user, login_user, current_user
from werkzeug.utils import secure_filename
import bibtexparser
from bibtexparser.bparser import BibTexParser
from bibtexparser.customization import convert_to_unicode
from bibtexparser.bibdatabase import BibDatabase
from collections import defaultdict
from apps.models.models import *
from apps.bibtex import add_bibtex_string, add_xml_string
from apps import app, db, lm
from config import DB_NAME, HAL_QUERY_API, ALLOWED_EXTENSIONS, \
ITEMS_PER_PAGE
@app.errorhandler(404)
def not_found_error(error):
return render_template('404.html', title="Not found"), 404
@app.route('/', methods=['GET', 'POST'])
@app.route('/login', methods=["GET", "POST"])
def login():
"""Login to application."""
form = LoginForm()
if form.validate_on_submit():
user = User.query.filter_by(name=form.name.data).first()
if user:
user.authenticated = True
db.session.add(user)
db.session.commit()
login_user(user, remember=True)
flash("Login success")
return redirect(request.args.get('next') or "/index")
return redirect('/login')
return render_template('about.html', form=form, title="Log in")
@app.route('/logout')
def logout():
"""Log out from application."""
user = current_user
user.authenticated = False
db.session.add(user)
db.session.commit()
logout_user()
return redirect('/')
@app.route("/index", methods=["GET", "POST"])
def get_index():
# If a bibtex is being posted, process:
if request.method == 'POST':
if 'file' not in request.files:
flash('No file part')
return redirect(request.url)
file = request.files['file']
if file.filename == '':
flash('No selected file')
return redirect(request.url)
if file:
bibtexstr = file.read().decode("utf8")
if file.filename[-4:] == ".bib":
add_bibtex_string(bibtexstr)
elif file.filename[-4:] == ".xml":
add_xml_string(bibtexstr)
flash("{} has been added to database.".format(file.filename))
return redirect(request.url)
# then, display page:
form = ExtendedSearchForm()
activity = db.session.query(Event).all()
events = []
# Store events in a dictionnary
for p in activity:
date = datetime.datetime.fromtimestamp(p.time).strftime("%d/%m %H:%M")
events.append({
"author": p.author,
"article": p.article,
"date": date,
"type":p.event
})
num_entries = db.session.query(BiblioEntry).count()
return render_template("index.html",
title="Index",
form=form,
user=current_user.name,
events=events[::-1],
num_entries=num_entries)
@app.route('/request', methods=["POST"])
def follow_request():
form = ExtendedSearchForm()
if form.validate_on_submit():
print(form.source.data)
if form.source.data == "local":
redirect("/biblio/search", code=307)
elif form.source.data == "hal":
redirect("/hal/"+form.name.data)
else:
flash("Not implemented yet")
return redirect("/index")
@app.route('/biblio/search', methods=['GET', 'POST'])
@login_required
def search_biblio():
"""Search entries in biblio."""
# Get the form corresponding to the query:
form = ExtendedSearchForm()
if form.validate_on_submit():
if form.source.data == "local":
s = "%" + form.name.data + "%"
# Send request to database:
bibdat = convert_rows_to_dict(db.session.query(BiblioEntry)\
.filter(or_(BiblioEntry.authors.like(s),
BiblioEntry.title.like(s))))
# Format bibdat and sort by years:
templateVars = format_bibdatabase(bibdat)
if len(bibdat) == 0:
flash("No entry found")
return render_template("references.html", **templateVars)
elif form.source.data == "hal":
redirect("/hal/"+form.name.data)
else:
flash("Not implemented yet")
return redirect("/biblio")
@app.route('/bibmanager', methods=['GET'])
@login_required
def bibmanager():
return render_template("bibmanager.html", title="References")
@app.route('/biblio/addentry', methods=['GET', 'POST'])
@login_required
def add_entry():
"""Add a new entry to the bibliography."""
form = BiblioForm()
if form.validate_on_submit():
bib_entry = BiblioEntry(ID=form.ID.data,
ENTRYTYPE=form.typ.data,
authors=form.author.data,
title=form.title.data,
year=form.year.data,
school="",
publisher="",
keywords=form.keywords.data,
url=form.url.data,
journal=form.journal.data)
db.session.add(bib_entry)
user = current_user.name
event = Event(author=user, article=form.ID.data,
event="ADD", time=time.time())
db.session.add(event)
db.session.commit()
return redirect("/biblio")
return redirect("/biblio")
@app.route('/biblio/updateentry', methods=['GET', 'POST'])
@login_required
def update_entry():
"""Add a new entry to the bibliography."""
form = BiblioForm()
article_name = request.environ["HTTP_REFERER"].split("=")[-1]
if form.validate_on_submit():
article = BiblioEntry.query.filter_by(ID=form.ID.data).first()
article.ID = form.ID.data
article.ENTRYTYPE = form.typ.data
article.authors = form.author.data
article.title = form.title.data
article.year = form.year.data
article.journal = form.journal.data
article.school = form.school.data
article.url = form.url.data
article.keywords = form.keywords.data
article.tag = form.tag.data
db.session.add(article)
user = current_user.name
event = Event(author=user, article=form.ID.data,
event="UPDATE", time=time.time())
db.session.add(event)
db.session.commit()
return redirect("/biblio/article=" + article_name)
return redirect("/biblio")
@app.route('/biblio/postcomment', methods=['GET', 'POST'])
@login_required
def post_comment():
"""Add post to article."""
form = PostForm()
article = request.environ["HTTP_REFERER"].split("=")[-1]
tim = time.time()
user = current_user.name
post = Post(author=user, article=article,
message=form.message.data, time=tim)
db.session.add(post)
user = current_user.name
event = Event(author=user, article=article,
event="COMMENT", time=time.time())
db.session.add(event)
db.session.commit()
return redirect("/biblio/article=" + article)
@app.route('/bibtex=<string:idx>', methods=['GET'])
@login_required
def get_bibtex(idx):
"""Return bibtex entry with id *idx*."""
bibdat = BiblioEntry.query.filter_by(ID=idx).first()
result = bibdat.__dict__
del result["_sa_instance_state"]
return jsonify(result)
@app.route('/biblio/article=<string:idx>', methods=['GET'])
@login_required
def display_article(idx):
"""Return bibtex entry with id *idx*."""
bibdat = BiblioEntry.query.filter_by(ID=idx).first()
if bibdat:
try:
keyword = (bibdat.keywords).split(",")
except:
keyword = ""
posts = Post.query.filter_by(article=idx).all()
dposts = []
# Store posts in a dictionnary
for p in posts:
date = datetime.datetime.fromtimestamp(p.time).\
strftime("%d-%m-%Y %H:%M")
dposts.append({
"author": p.author,
"message": p.message,
"date": date
})
templateVars = {
"license_info": "Distributed under MIT license.",
"title": "Article",
"engine": "Powered by Flask",
"article": bibdat,
"keyword": keyword,
"bibform": BiblioForm(),
"commentform": PostForm(),
"posts": dposts
}
return render_template("article.html", **templateVars)
return render_template('404.html', title="Not found"), 404
@app.route('/biblio', methods=['GET'])
@app.route('/biblio/<int:page>', methods=['GET'])
@login_required
def get_all_biblio(page=1):
"""Return all bibliography, without filters."""
query = BiblioEntry.query.paginate(page, ITEMS_PER_PAGE, False).items
bibdat = convert_rows_to_dict(query)
years = [str(value.year)
for value in db.session.query(BiblioEntry.year).distinct()]
tags = [str(value.tag)
for value in db.session.query(BiblioEntry.tag).distinct()]
templateVars = format_bibdatabase(bibdat)
years.sort()
templateVars["years"] = years[::-1]
templateVars["tags"] = tags
templateVars["nentries"] = BiblioEntry.query.count()
return render_template("references.html", **templateVars)
@app.route('/biblio/query', methods=['GET'])
@login_required
def request_api():
"""Request given years and types"""
# Process arguments of query:
query = []
year = request.args.get("year")
if year:
query.append([BiblioEntry.year.like(yy) for yy in year.split(":")])
types = request.args.get("type")
if types:
query.append([BiblioEntry.ENTRYTYPE.like(tt) for tt in types.split(":")])
tags_str = request.args.get("tags")
if tags_str:
query.append([BiblioEntry.tag.like(tt) for tt in tags_str.split(":")])
fil = and_(*[or_(*q) for q in query])
rows = db.session.query(BiblioEntry).filter(fil)
bibdat = convert_rows_to_dict(rows)
years = [str(value.year)
for value in db.session.query(BiblioEntry.year).distinct()]
tags = [str(value.tag)
for value in db.session.query(BiblioEntry.tag).distinct()]
templateVars = format_bibdatabase(bibdat, type_filter=types)
years.sort(key=lambda x:int(x))
templateVars["years"] = years
templateVars["tags"] = tags
if year:
templateVars["checked"] = [str(y) for y in year.split(":")]
if types:
templateVars["checked"].extend(types.split(":"))
if tags_str:
templateVars["checked"].extend(tags_str.split(":"))
return render_template("references.html", **templateVars)
@app.route('/biblio/author=<string:auth>', methods=['GET'])
@login_required
def get_biblio_author(auth):
"""Return bibliography corresponding to given author."""
auth = "%" + auth + "%"
bibdat = convert_rows_to_dict(
db.session.query(BiblioEntry).\
filter(BiblioEntry.authors.like(auth)).\
all()
)
years = [str(value.year)
for value in db.session.query(BiblioEntry.year).distinct()]
tags = [str(value.tag)
for value in db.session.query(BiblioEntry.tag).distinct()]
templateVars = format_bibdatabase(bibdat)
years.sort()
templateVars["years"] = years[::-1]
templateVars["tags"] = tags
return render_template("references.html", **templateVars)
@app.route('/hal/<string:keywords>', methods=['GET'])
@login_required
def render_hal_biblio(keywords):
"""Send a query to HAL API and display returned bibtex entries."""
biblio = requests.get(HAL_QUERY_API.format(keywords)).text
parser = BibTexParser()
parser.customization = convert_to_unicode
bib_database = bibtexparser.loads(biblio, parser=parser)
bib_database.entries.sort(key=lambda x: x['year'], reverse=True)
templateVars = format_bibdatabase(bib_database.entries)
return render_template("hal.html", **templateVars)
def format_bibdatabase(bib_database, year_filter=None,
type_filter=None, type_author=None):
"""Format bibtex database and apply specified filters.
Parameters:
- bib_database (bibtexparser.BibtexDatabase)
Store all bibtex entries in a list
- year_filter (str) - Defaut is None
If specified, filter entries by year
- type_filter (str) - Default is None
If specified, filter entries by types
(phdthesis, book, inproceedings, article)
- type_author (str) - Default is None
If specified, filter entries by author
"""
form = ExtendedSearchForm()
bibform = BiblioForm()
templateVars = {
"license_info": "Distributed under GNU license.",
"title": "References",
"form": form,
"bibform": bibform,
"engine": "Powered by Flask",
"years": [],
"references": [],
"authors": [],
"checked": [],
"nentries": 0,
"types": ["book", "article", "phdthesis",
"inproceedings", "misc", "techreport"]
}
base = defaultdict(list)
# TODO: clean application of different filters:
for bib in bib_database:
# Preprocess different type of entries:
if bib['ENTRYTYPE'] == "book":
bib['origin'] = bib.get('publisher', '').replace('\&', '&')
elif bib['ENTRYTYPE'] == "article":
bib['origin'] = bib.get('journal', '')
elif bib['ENTRYTYPE'] == "phdthesis":
bib['origin'] = "PhD Thesis, " + bib.get('school', '')
elif bib['ENTRYTYPE'] == "inproceedings":
bib['origin'] = bib.get('booktitle', '')
base[bib['year']].append(bib)
# process authors:
try:
authors = bib["authors"]
except:
authors = bib["author"]
bib["authors"] = bib["author"]
for auth in authors.split("and "):
name = auth.split(", ")[0]
if name not in templateVars["authors"]:
templateVars["authors"].append(name)
# process keywords:
if bib.get("keyword"):
bib["keywords"] = bib.get("keyword", "").split(";")
elif bib.get("keywords"):
keywords = bib.get("keywords")
keywords = keywords.replace(",", ";")
bib["keywords"] = keywords.split(";")
refsbyyear = []
for year in base.keys():
refsbyyear.append((year, base[year]))
refsbyyear.sort(key=lambda x: x[0], reverse=True)
# Update dictionnary to send to jinja template:
templateVars["references"] = refsbyyear
return templateVars
def convert_rows_to_dict(rows):
return [row.__dict__ for row in rows]
# upload file:
# (code taken from official flask documentation)
def allowed_file(filename):
return '.' in filename and \
filename.rsplit('.', 1)[1] in ALLOWED_EXTENSIONS
@lm.user_loader
def load_user(id):
return User.query.get(int(id))
| frapac/bibtex-browser | apps/views/views.py | Python | mit | 15,666 |
# -*- coding: utf-8 -*-
"""QGIS Unit tests for the postgres provider.
Note: to prepare the DB, you need to run the sql files specified in
tests/testdata/provider/testdata_pg.sh
Read tests/README.md about writing/launching tests with PostgreSQL.
Run with ctest -V -R PyQgsPostgresProvider
.. note:: This program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2 of the License, or
(at your option) any later version.
"""
from builtins import next
__author__ = 'Matthias Kuhn'
__date__ = '2015-04-23'
__copyright__ = 'Copyright 2015, The QGIS Project'
import qgis # NOQA
import psycopg2
import os
import time
from datetime import datetime
from qgis.core import (
QgsVectorLayer,
QgsVectorLayerExporter,
QgsFeatureRequest,
QgsFeatureSource,
QgsFeature,
QgsFieldConstraints,
QgsDataProvider,
NULL,
QgsVectorLayerUtils,
QgsSettings,
QgsTransactionGroup,
QgsReadWriteContext,
QgsRectangle,
QgsDefaultValue,
QgsCoordinateReferenceSystem,
QgsProject,
QgsWkbTypes,
QgsGeometry,
QgsProviderRegistry,
QgsVectorDataProvider,
QgsDataSourceUri,
QgsProviderConnectionException,
)
from qgis.gui import QgsGui, QgsAttributeForm
from qgis.PyQt.QtCore import QDate, QTime, QDateTime, QVariant, QDir, QObject, QByteArray, QTemporaryDir
from qgis.PyQt.QtWidgets import QLabel
from qgis.testing import start_app, unittest
from qgis.PyQt.QtXml import QDomDocument
from utilities import unitTestDataPath, compareWkt
from providertestbase import ProviderTestCase
QGISAPP = start_app()
TEST_DATA_DIR = unitTestDataPath()
class TestPyQgsPostgresProvider(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
cls.poly_vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
assert cls.poly_vl.isValid()
cls.poly_provider = cls.poly_vl.dataProvider()
QgsGui.editorWidgetRegistry().initEditors()
cls.con = psycopg2.connect(cls.dbconn)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def execSQLCommand(self, sql):
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute(sql)
cur.close()
self.con.commit()
def getSource(self):
# create temporary table for edit tests
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."editData" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."editData" ( pk SERIAL NOT NULL PRIMARY KEY, cnt integer, name text, name2 text, num_char text, dt timestamp without time zone, "date" date, "time" time without time zone, geom public.geometry(Point, 4326))')
self.execSQLCommand("INSERT INTO qgis_test.\"editData\" (pk, cnt, name, name2, num_char, dt, \"date\", \"time\", geom) VALUES "
"(5, -200, NULL, 'NuLl', '5', TIMESTAMP '2020-05-04 12:13:14', '2020-05-02', '12:13:01', '0101000020E61000001D5A643BDFC751C01F85EB51B88E5340'),"
"(3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL, NULL),"
"(1, 100, 'Orange', 'oranGe', '1', TIMESTAMP '2020-05-03 12:13:14', '2020-05-03', '12:13:14', '0101000020E61000006891ED7C3F9551C085EB51B81E955040'),"
"(2, 200, 'Apple', 'Apple', '2', TIMESTAMP '2020-05-04 12:14:14', '2020-05-04', '12:14:14', '0101000020E6100000CDCCCCCCCC0C51C03333333333B35140'),"
"(4, 400, 'Honey', 'Honey', '4', TIMESTAMP '2021-05-04 13:13:14', '2021-05-04', '13:13:14', '0101000020E610000014AE47E17A5450C03333333333935340')")
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."editData" (geom) sql=',
'test', 'postgres')
return vl
def getEditableLayer(self):
return self.getSource()
def getEditableLayerWithCheckConstraint(self):
"""Returns the layer for attribute change CHECK constraint violation"""
return QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'id\' srid=4326 type=POINT table="public"."test_check_constraint" (geom) sql=', 'test_check_constraint', 'postgres')
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def getGeneratedColumnsData(self):
"""
return a tuple with the generated column test layer and the expected generated value
"""
cur = self.con.cursor()
cur.execute("SHOW server_version_num")
pgversion = int(cur.fetchone()[0])
# don't trigger this test when PostgreSQL versions earlier than 12.
if pgversion < 120000:
return (None, None)
else:
return (QgsVectorLayer(self.dbconn + ' sslmode=disable table="qgis_test"."generated_columns"', 'test', 'postgres'),
"""('test:'::text || ((pk)::character varying)::text)""")
# HERE GO THE PROVIDER SPECIFIC TESTS
def testDefaultValue(self):
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, True)
self.assertIsInstance(self.source.defaultValue(0), int)
self.assertEqual(self.source.defaultValue(1), NULL)
self.assertEqual(self.source.defaultValue(2), 'qgis')
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
def testDefaultValueClause(self):
self.source.setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertEqual(self.source.defaultValueClause(
0), 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)')
self.assertFalse(self.source.defaultValueClause(1))
self.assertEqual(self.source.defaultValueClause(2), '\'qgis\'::text')
def testDateTimeTypes(self):
vl = QgsVectorLayer('%s table="qgis_test"."date_times" sql=' % (
self.dbconn), "testdatetimes", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'date_field')).type(), QVariant.Date)
self.assertEqual(fields.at(fields.indexFromName(
'time_field')).type(), QVariant.Time)
self.assertEqual(fields.at(fields.indexFromName(
'datetime_field')).type(), QVariant.DateTime)
f = next(vl.getFeatures(QgsFeatureRequest()))
date_idx = vl.fields().lookupField('date_field')
self.assertIsInstance(f.attributes()[date_idx], QDate)
self.assertEqual(f.attributes()[date_idx], QDate(2004, 3, 4))
time_idx = vl.fields().lookupField('time_field')
self.assertIsInstance(f.attributes()[time_idx], QTime)
self.assertEqual(f.attributes()[time_idx], QTime(13, 41, 52))
datetime_idx = vl.fields().lookupField('datetime_field')
self.assertIsInstance(f.attributes()[datetime_idx], QDateTime)
self.assertEqual(f.attributes()[datetime_idx], QDateTime(
QDate(2004, 3, 4), QTime(13, 41, 52)))
def testBooleanType(self):
vl = QgsVectorLayer('{} table="qgis_test"."boolean_table" sql='.format(
self.dbconn), "testbool", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('fld1')).type(), QVariant.Bool)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: True,
2: False,
3: NULL
}
self.assertEqual(values, expected)
def testByteaType(self):
vl = QgsVectorLayer('{} table="qgis_test"."byte_a_table" sql='.format(
self.dbconn), "testbytea", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'fld1')).type(), QVariant.ByteArray)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'YmludmFsdWU='),
2: QByteArray()
}
self.assertEqual(values, expected)
# editing binary values
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."byte_a_table_edit" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."byte_a_table_edit" ( pk SERIAL NOT NULL PRIMARY KEY, blobby bytea)')
self.execSQLCommand("INSERT INTO qgis_test.\"byte_a_table_edit\" (pk, blobby) VALUES "
"(1, encode('bbb', 'base64')::bytea)")
vl = QgsVectorLayer(
self.dbconn + ' sslmode=disable table="qgis_test"."byte_a_table_edit" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'YmJi')
}
self.assertEqual(values, expected)
# change attribute value
self.assertTrue(vl.dataProvider().changeAttributeValues(
{1: {1: QByteArray(b'bbbvx')}}))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx')
}
self.assertEqual(values, expected)
# add feature
f = QgsFeature()
f.setAttributes([2, QByteArray(b'cccc')])
self.assertTrue(vl.dataProvider().addFeature(f))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx'),
2: QByteArray(b'cccc')
}
self.assertEqual(values, expected)
# change feature
self.assertTrue(vl.dataProvider().changeFeatures(
{2: {1: QByteArray(b'dddd')}}, {}))
values = {feat['pk']: feat['blobby'] for feat in vl.getFeatures()}
expected = {
1: QByteArray(b'bbbvx'),
2: QByteArray(b'dddd')
}
self.assertEqual(values, expected)
def testCitextType(self):
vl = QgsVectorLayer('{} table="qgis_test"."citext_table" sql='.format(
self.dbconn), "testbytea", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('fld1')).type(), QVariant.String)
values = {feat['id']: feat['fld1'] for feat in vl.getFeatures()}
expected = {
1: 'test val',
2: NULL
}
self.assertEqual(values, expected)
# editing citext values
self.execSQLCommand(
'DROP TABLE IF EXISTS qgis_test."citext_table_edit" CASCADE')
self.execSQLCommand(
'CREATE TABLE qgis_test."citext_table_edit" ( pk SERIAL NOT NULL PRIMARY KEY, txt citext)')
self.execSQLCommand("INSERT INTO qgis_test.\"citext_table_edit\" (pk, txt) VALUES "
"(1, 'text')")
vl = QgsVectorLayer(
self.dbconn + ' sslmode=disable table="qgis_test"."citext_table_edit" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'text'
}
self.assertEqual(values, expected)
# change attribute value
self.assertTrue(
vl.dataProvider().changeAttributeValues({1: {1: 'teeeext'}}))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext'
}
self.assertEqual(values, expected)
# add feature
f = QgsFeature()
f.setAttributes([2, 'teeeeeeeeeext'])
self.assertTrue(vl.dataProvider().addFeature(f))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext',
2: 'teeeeeeeeeext'
}
self.assertEqual(values, expected)
# change feature
self.assertTrue(vl.dataProvider().changeFeatures(
{2: {1: 'teeeeeeeeeeeeeeeeeeeeeeext'}}, {}))
values = {feat['pk']: feat['txt'] for feat in vl.getFeatures()}
expected = {
1: 'teeeext',
2: 'teeeeeeeeeeeeeeeeeeeeeeext'
}
self.assertEqual(values, expected)
def testQueryLayers(self):
def test_query(dbconn, query, key):
ql = QgsVectorLayer(
'%s srid=4326 table="%s" (geom) key=\'%s\' sql=' % (
dbconn, query.replace('"', '\\"'), key), "testgeom",
"postgres")
self.assertTrue(ql.isValid(), '{} ({})'.format(query, key))
test_query(self.dbconn,
'(SELECT NULL::integer "Id1", NULL::integer "Id2", NULL::geometry(Point, 4326) geom LIMIT 0)',
'"Id1","Id2"')
def testWkbTypes(self):
def test_table(dbconn, table_name, wkt):
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (dbconn, table_name), "testgeom",
"postgres")
self.assertTrue(vl.isValid())
for f in vl.getFeatures():
self.assertEqual(f.geometry().asWkt(), wkt)
test_table(self.dbconn, 'p2d', 'Polygon ((0 0, 1 0, 1 1, 0 1, 0 0))')
test_table(self.dbconn, 'p3d',
'PolygonZ ((0 0 0, 1 0 0, 1 1 0, 0 1 0, 0 0 0))')
test_table(self.dbconn, 'triangle2d', 'Polygon ((0 0, 1 0, 1 1, 0 0))')
test_table(self.dbconn, 'triangle3d',
'PolygonZ ((0 0 0, 1 0 0, 1 1 0, 0 0 0))')
test_table(self.dbconn, 'tin2d',
'MultiPolygon (((0 0, 1 0, 1 1, 0 0)),((0 0, 0 1, 1 1, 0 0)))')
test_table(self.dbconn, 'tin3d',
'MultiPolygonZ (((0 0 0, 1 0 0, 1 1 0, 0 0 0)),((0 0 0, 0 1 0, 1 1 0, 0 0 0)))')
test_table(self.dbconn, 'ps2d',
'MultiPolygon (((0 0, 1 0, 1 1, 0 1, 0 0)))')
test_table(self.dbconn, 'ps3d',
'MultiPolygonZ (((0 0 0, 0 1 0, 1 1 0, 1 0 0, 0 0 0)),((0 0 1, 1 0 1, 1 1 1, 0 1 1, 0 0 1)),((0 0 0, 0 0 1, 0 1 1, 0 1 0, 0 0 0)),((0 1 0, 0 1 1, 1 1 1, 1 1 0, 0 1 0)),((1 1 0, 1 1 1, 1 0 1, 1 0 0, 1 1 0)),((1 0 0, 1 0 1, 0 0 1, 0 0 0, 1 0 0)))')
test_table(self.dbconn, 'mp3d',
'MultiPolygonZ (((0 0 0, 0 1 0, 1 1 0, 1 0 0, 0 0 0)),((0 0 1, 1 0 1, 1 1 1, 0 1 1, 0 0 1)),((0 0 0, 0 0 1, 0 1 1, 0 1 0, 0 0 0)),((0 1 0, 0 1 1, 1 1 1, 1 1 0, 0 1 0)),((1 1 0, 1 1 1, 1 0 1, 1 0 0, 1 1 0)),((1 0 0, 1 0 1, 0 0 1, 0 0 0, 1 0 0)))')
test_table(self.dbconn, 'pt2d', 'Point (0 0)')
test_table(self.dbconn, 'pt3d', 'PointZ (0 0 0)')
test_table(self.dbconn, 'ls2d', 'LineString (0 0, 1 1)')
test_table(self.dbconn, 'ls3d', 'LineStringZ (0 0 0, 1 1 1)')
test_table(self.dbconn, 'mpt2d', 'MultiPoint ((0 0),(1 1))')
test_table(self.dbconn, 'mpt3d', 'MultiPointZ ((0 0 0),(1 1 1))')
test_table(self.dbconn, 'mls2d',
'MultiLineString ((0 0, 1 1),(2 2, 3 3))')
test_table(self.dbconn, 'mls3d',
'MultiLineStringZ ((0 0 0, 1 1 1),(2 2 2, 3 3 3))')
test_table(self.dbconn, 'pt4d', 'PointZM (1 2 3 4)')
def testMetadata(self):
""" Test that metadata is correctly acquired from provider """
metadata = self.vl.metadata()
self.assertEqual(
metadata.crs(), QgsCoordinateReferenceSystem.fromEpsgId(4326))
self.assertEqual(metadata.type(), 'dataset')
self.assertEqual(metadata.abstract(), 'QGIS Test Table')
def testGetFeaturesUniqueId(self):
"""
Test tables with inheritance for unique ids
"""
def test_unique(features, num_features):
featureids = []
for f in features:
self.assertFalse(f.id() in featureids)
featureids.append(f.id())
self.assertEqual(len(features), num_features)
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'someData'), "testgeom",
"postgres")
self.assertTrue(vl.isValid())
# Test someData
test_unique([f for f in vl.getFeatures()], 5)
# Test base_table_bad: layer is invalid
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'base_table_bad'),
"testgeom", "postgres")
self.assertFalse(vl.isValid())
# Test base_table_bad with use estimated metadata: layer is valid because the unique test is skipped
vl = QgsVectorLayer(
'%s srid=4326 estimatedmetadata="true" table="qgis_test".%s (geom) sql=' % (
self.dbconn, 'base_table_bad'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
# Test base_table_good: layer is valid
vl = QgsVectorLayer('%s srid=4326 table="qgis_test".%s (geom) sql=' % (self.dbconn, 'base_table_good'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
test_unique([f for f in vl.getFeatures()], 4)
# Test base_table_good with use estimated metadata: layer is valid
vl = QgsVectorLayer(
'%s srid=4326 estimatedmetadata="true" table="qgis_test".%s (geom) sql=' % (
self.dbconn, 'base_table_good'),
"testgeom", "postgres")
self.assertTrue(vl.isValid())
test_unique([f for f in vl.getFeatures()], 4)
# See https://github.com/qgis/QGIS/issues/22258
# TODO: accept multi-featured layers, and an array of values/fids
def testSignedIdentifiers(self):
def test_layer(ql, att, val, fidval):
self.assertTrue(ql.isValid())
features = ql.getFeatures()
att_idx = ql.fields().lookupField(att)
count = 0
for f in features:
count += 1
self.assertEqual(f.attributes()[att_idx], val)
self.assertEqual(f.id(), fidval)
self.assertEqual(count, 1)
def test(dbconn, query, att, val, fidval):
table = query.replace('"', '\\"')
uri = '%s table="%s" (g) key=\'%s\'' % (dbconn, table, att)
ql = QgsVectorLayer(uri, "t", "postgres")
test_layer(ql, att, val, fidval)
# now with estimated metadata
uri += ' estimatedmetadata="true"'
test_layer(ql, att, val, fidval)
# --- INT16 ----
# zero
test(self.dbconn, '(SELECT 0::int2 i, NULL::geometry(Point) g)', 'i', 0, 0)
# low positive
test(self.dbconn, '(SELECT 1::int2 i, NULL::geometry(Point) g)', 'i', 1, 1)
# low negative
test(self.dbconn, '(SELECT -1::int2 i, NULL::geometry(Point) g)',
'i', -1, 4294967295)
# max positive signed 16bit integer
test(self.dbconn, '(SELECT 32767::int2 i, NULL::geometry(Point) g)',
'i', 32767, 32767)
# max negative signed 16bit integer
test(self.dbconn, '(SELECT (-32768)::int2 i, NULL::geometry(Point) g)',
'i', -32768, 4294934528)
# --- INT32 ----
# zero
test(self.dbconn, '(SELECT 0::int4 i, NULL::geometry(Point) g)', 'i', 0, 0)
# low positive
test(self.dbconn, '(SELECT 2::int4 i, NULL::geometry(Point) g)', 'i', 2, 2)
# low negative
test(self.dbconn, '(SELECT -2::int4 i, NULL::geometry(Point) g)',
'i', -2, 4294967294)
# max positive signed 32bit integer
test(self.dbconn, '(SELECT 2147483647::int4 i, NULL::geometry(Point) g)',
'i', 2147483647, 2147483647)
# max negative signed 32bit integer
test(self.dbconn, '(SELECT (-2147483648)::int4 i, NULL::geometry(Point) g)',
'i', -2147483648, 2147483648)
# --- INT64 (FIDs are always 1 because assigned ex-novo) ----
# zero
test(self.dbconn, '(SELECT 0::int8 i, NULL::geometry(Point) g)', 'i', 0, 1)
# low positive
test(self.dbconn, '(SELECT 3::int8 i, NULL::geometry(Point) g)', 'i', 3, 1)
# low negative
test(self.dbconn, '(SELECT -3::int8 i, NULL::geometry(Point) g)', 'i', -3, 1)
# max positive signed 64bit integer
test(self.dbconn, '(SELECT 9223372036854775807::int8 i, NULL::geometry(Point) g)',
'i', 9223372036854775807, 1)
# max negative signed 32bit integer
test(self.dbconn, '(SELECT (-9223372036854775808)::int8 i, NULL::geometry(Point) g)', 'i', -9223372036854775808,
1)
def testPktIntInsert(self):
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="pk" sql='.format(self.dbconn, 'bikes_view'), "bikes_view",
"postgres")
self.assertTrue(vl.isValid())
f = QgsFeature(vl.fields())
f['pk'] = NULL
f['name'] = 'Cilo'
r, f = vl.dataProvider().addFeatures([f])
self.assertTrue(r)
self.assertNotEqual(f[0]['pk'], NULL, f[0].attributes())
vl.deleteFeatures([f[0].id()])
def testGeneratedFields(self):
"""Test if GENERATED geometry/geography columns are correctly handled by the provider."""
cur = self.con.cursor()
cur.execute("SHOW server_version_num")
pgversion = int(cur.fetchone()[0])
# GENERATED columns are unsupported by PostgreSQL versions earlier than 12.
if pgversion < 120000:
return
# Geometry columns
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
# writing geometry...
f = QgsFeature(vl.fields())
ix_name = f.fieldNameIndex('name')
f.setGeometry(QgsGeometry.fromWkt('Polygon ((-67 -2, -67 0, -68 0, -70 -1, -67 -2))'))
f.setAttribute(ix_name, 'QGIS-3')
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([f]))
self.assertTrue(vl.commitChanges())
# reading back to see if we saved the centroid correctly.
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
generated_geometry = f2.geometry().asWkt()
expected_geometry = 'Point (-68.047619047619051 -0.90476190476190477)'
expected_area = 43069568296.34387
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
self.assertEqual(f2['name'], 'QGIS-3')
# Checking if we can correctly change values of an existing feature.
self.assertTrue(vl2.startEditing())
ix2_name = f2.fieldNameIndex('name')
fid2 = f2.id()
vl2.changeAttributeValue(fid2, ix2_name, 'New')
self.assertTrue(vl2.commitChanges())
# getting a brand new QgsVectorLayer
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
# checking if the name field was correctly updated
f = next(vl.getFeatures(QgsFeatureRequest()))
self.assertEqual(f['name'], 'New')
# Now, check if we can change the value of a GENERATED field (we shouldn't)
self.assertTrue(vl.startEditing())
ix_area = f.fieldNameIndex('poly_area')
fid = f.id()
vl.changeAttributeValue(fid, ix_area, 42)
self.assertTrue(vl.commitChanges())
# reading back
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
# now, getting a brand new QgsVectorLayer to check if changes (UPDATE) in the geometry are reflected in the generated fields
vl = QgsVectorLayer('{} table="qgis_test"."{}" (geom) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
self.assertTrue(vl.isValid())
f = next(vl.getFeatures(QgsFeatureRequest()))
vl.startEditing()
fid = f.id()
vl.changeGeometry(fid, QgsGeometry.fromWkt('Polygon ((-67 -2, -65 0, -68 0, -70 -1, -67 -2))'))
vl.commitChanges()
# reading back...
vl2 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_col"), "test_gen_col", "postgres")
f2 = next(vl2.getFeatures(QgsFeatureRequest()))
generated_geometry = f2.geometry().asWkt()
generated_geometry = f2.geometry().asWkt()
expected_geometry = 'Point (-67.42424242424242209 -0.81818181818181823)'
expected_area = 67718478405.28429
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertAlmostEqual(f2['poly_area'], expected_area, places=4)
self.assertEqual(f2['name'], 'New')
# Geography columns
vl3 = QgsVectorLayer('{} table="qgis_test"."{}" (geog) srid=4326 type=POLYGON key="id" sql='.format(self.dbconn, "test_gen_geog_col"), "test_gen_geog_col", "postgres")
self.assertTrue(vl3.isValid())
# writing geography...
f3 = QgsFeature(vl3.fields())
f3.setGeometry(QgsGeometry.fromWkt('Polygon ((-67 -2, -67 0, -68 0, -70 -1, -67 -2))'))
self.assertTrue(vl3.startEditing())
self.assertTrue(vl3.addFeatures([f3]))
self.assertTrue(vl3.commitChanges())
# reading back geography and checking values
vl4 = QgsVectorLayer('{} table="qgis_test"."{}" (cent) srid=4326 type=POINT key="id" sql='.format(self.dbconn, "test_gen_geog_col"), "test_gen_geog_col", "postgres")
f4 = next(vl4.getFeatures(QgsFeatureRequest()))
generated_geometry = f4.geometry().asWkt()
expected_geometry = 'Point (-68.0477406158202 -0.904960604589168)'
expected_area = 43088884296.69713
assert compareWkt(generated_geometry, expected_geometry), "Geometry mismatch! Expected:\n{}\nGot:\n{}\n".format(expected_geometry, generated_geometry)
self.assertEqual(f4['poly_area'], expected_area)
def testNonPkBigintField(self):
"""Test if we can correctly insert, read and change attributes(fields) of type bigint and which are not PKs."""
vl = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
self.assertTrue(vl.isValid())
flds = vl.fields()
# check if default values are correctly read back
f = next(vl.getFeatures(QgsFeatureRequest()))
bigint_with_default_idx = vl.fields().lookupField('bigint_attribute_def')
self.assertEqual(f.attributes()[bigint_with_default_idx], 42)
# check if NULL values are correctly read
bigint_def_null_idx = vl.fields().lookupField('bigint_attribute')
self.assertEqual(f.attributes()[bigint_def_null_idx], NULL)
# check if we can overwrite a default value
vl.startEditing()
vl.changeAttributeValue(f.id(), bigint_with_default_idx, 43)
pkidx = vl.fields().lookupField('pk')
editedid = f.attributes()[pkidx]
self.assertTrue(vl.commitChanges())
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
flds = vl2.fields()
self.assertTrue(vl2.isValid())
f = next(vl2.getFeatures(
QgsFeatureRequest().setFilterExpression('pk = ' + str(editedid))))
bigint_with_default_idx = vl2.fields().lookupField('bigint_attribute_def')
self.assertEqual(f.attributes()[bigint_with_default_idx], 43)
# check if we can insert a new value
dp = vl2.dataProvider()
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
pkidx = vl2.fields().lookupField('pk')
vl2.startEditing()
f = QgsFeature(vl2.fields())
f['pk'] = NULL
f['value'] = 'The answer.'
f['bigint_attribute'] = 84
f.setAttribute(pkidx, vl2.dataProvider().defaultValue(pkidx))
f.setAttribute(bigint_with_default_idx,
vl2.dataProvider().defaultValue(bigint_with_default_idx))
r, f = vl2.dataProvider().addFeatures([f])
self.assertTrue(r)
vl2.commitChanges()
inserted_id = f[0]['pk']
f = next(vl2.getFeatures(
QgsFeatureRequest().setFilterExpression('pk = ' + str(inserted_id))))
self.assertEqual(f['bigint_attribute'], 84)
self.assertEqual(f['bigint_attribute_def'], 42)
def testPktUpdateBigintPk(self):
"""Test if we can update objects with positive, zero and negative bigint PKs."""
vl = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
flds = vl.fields()
self.assertTrue(vl.isValid())
vl.startEditing()
statuses = [-1, -1, -1, -1]
# changing values...
for ft in vl.getFeatures():
if ft['value'] == 'first value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '1st value')
statuses[0] = 0
elif ft['value'] == 'second value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '2nd value')
statuses[1] = 0
elif ft['value'] == 'zero value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '0th value')
statuses[2] = 0
elif ft['value'] == 'negative value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '-1th value')
statuses[3] = 0
self.assertTrue(vl.commitChanges())
self.assertTrue(all(x == 0 for x in statuses))
# now, let's see if the values were changed
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk", "postgres")
self.assertTrue(vl2.isValid())
for ft in vl2.getFeatures():
if ft['value'] == '1st value':
statuses[0] = 1
elif ft['value'] == '2nd value':
statuses[1] = 1
elif ft['value'] == '0th value':
statuses[2] = 1
elif ft['value'] == '-1th value':
statuses[3] = 1
self.assertTrue(all(x == 1 for x in statuses))
def testPktUpdateBigintPkNonFirst(self):
"""Test if we can update objects with positive, zero and negative bigint PKs in tables whose PK is not the first field"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(self.dbconn,
'bigint_non_first_pk'),
"bigint_non_first_pk", "postgres")
flds = vl.fields()
self.assertTrue(vl.isValid())
vl.startEditing()
statuses = [-1, -1, -1, -1]
# changing values...
for ft in vl.getFeatures():
if ft['value'] == 'first value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '1st value')
statuses[0] = 0
elif ft['value'] == 'second value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '2nd value')
statuses[1] = 0
elif ft['value'] == 'zero value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '0th value')
statuses[2] = 0
elif ft['value'] == 'negative value':
vl.changeAttributeValue(
ft.id(), flds.indexOf('value'), '-1th value')
statuses[3] = 0
self.assertTrue(vl.commitChanges())
self.assertTrue(all(x == 0 for x in statuses))
# now, let's see if the values were changed
vl2 = QgsVectorLayer(
'{} sslmode=disable srid=4326 key="pk" table="qgis_test".{} (geom)'.format(
self.dbconn, 'bigint_pk'),
"bigint_pk_nonfirst", "postgres")
self.assertTrue(vl2.isValid())
for ft in vl2.getFeatures():
if ft['value'] == '1st value':
statuses[0] = 1
elif ft['value'] == '2nd value':
statuses[1] = 1
elif ft['value'] == '0th value':
statuses[2] = 1
elif ft['value'] == '-1th value':
statuses[3] = 1
self.assertTrue(all(x == 1 for x in statuses))
def testPktComposite(self):
"""
Check that tables with PKs composed of many fields of different types are correctly read and written to
"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2"\' table="qgis_test"."tb_test_compound_pk" (geom)'.format(self.dbconn), "test_compound", "postgres")
self.assertTrue(vl.isValid())
fields = vl.fields()
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 1 AND pk2 = 2')))
# first of all: we must be able to fetch a valid feature
self.assertTrue(f.isValid())
self.assertEqual(f['pk1'], 1)
self.assertEqual(f['pk2'], 2)
self.assertEqual(f['value'], 'test 2')
# can we edit a field?
vl.startEditing()
vl.changeAttributeValue(f.id(), fields.indexOf('value'), 'Edited Test 2')
self.assertTrue(vl.commitChanges())
# Did we get it right? Let's create a new QgsVectorLayer and try to read back our changes:
vl2 = QgsVectorLayer('{} sslmode=disable srid=4326 table="qgis_test"."tb_test_compound_pk" (geom) key=\'"pk1","pk2"\' '.format(self.dbconn), "test_compound2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 1 AND pk2 = 2')))
self.assertTrue(f2.isValid())
# Then, making sure we really did change our value.
self.assertEqual(f2['value'], 'Edited Test 2')
# How about inserting a new field?
f3 = QgsFeature(vl2.fields())
f3['pk1'] = 4
f3['pk2'] = -9223372036854775800
f3['value'] = 'other test'
vl.startEditing()
res, f3 = vl.dataProvider().addFeatures([f3])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# can we catch it on another layer?
f4 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk2 = -9223372036854775800')))
self.assertTrue(f4.isValid())
expected_attrs = [4, -9223372036854775800, 'other test']
self.assertEqual(f4.attributes(), expected_attrs)
# Finally, let's delete one of the features.
f5 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 2 AND pk2 = 1')))
vl2.startEditing()
vl2.deleteFeatures([f5.id()])
self.assertTrue(vl2.commitChanges())
# did we really delete? Let's try to get the deleted feature from the first layer.
f_iterator = vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk1 = 2 AND pk2 = 1'))
got_feature = True
try:
f6 = next(f_iterator)
got_feature = f6.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
def testPktCompositeFloat(self):
"""
Check that tables with PKs composed of many fields of different types are correctly read and written to
"""
vl = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2","pk3"\' table="qgis_test"."tb_test_composite_float_pk" (geom)'.format(self.dbconn), "test_composite_float", "postgres")
self.assertTrue(vl.isValid())
fields = vl.fields()
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '3.14159274'")))
# first of all: we must be able to fetch a valid feature
self.assertTrue(f.isValid())
self.assertEqual(f['pk1'], 1)
self.assertEqual(f['pk2'], 2)
self.assertAlmostEqual(f['pk3'], 3.14159274)
self.assertEqual(f['value'], 'test 2')
# can we edit a field?
vl.startEditing()
vl.changeAttributeValue(f.id(), fields.indexOf('value'), 'Edited Test 2')
self.assertTrue(vl.commitChanges())
# Did we get it right? Let's create a new QgsVectorLayer and try to read back our changes:
vl2 = QgsVectorLayer('{} sslmode=disable srid=4326 key=\'"pk1","pk2","pk3"\' table="qgis_test"."tb_test_composite_float_pk" (geom)'.format(self.dbconn), "test_composite_float2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '3.14159274'")))
self.assertTrue(f2.isValid())
# just making sure we have the correct feature
self.assertAlmostEqual(f2['pk3'], 3.14159274)
# Then, making sure we really did change our value.
self.assertEqual(f2['value'], 'Edited Test 2')
# How about inserting a new field?
f3 = QgsFeature(vl2.fields())
f3['pk1'] = 4
f3['pk2'] = -9223372036854775800
f3['pk3'] = 7.29154
f3['value'] = 'other test'
vl.startEditing()
res, f3 = vl.dataProvider().addFeatures([f3])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# can we catch it on another layer?
f4 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk2 = '-9223372036854775800'")))
self.assertTrue(f4.isValid())
expected_attrs = [4, -9223372036854775800, 7.29154, 'other test']
gotten_attrs = [f4['pk1'], f4['pk2'], f4['pk3'], f4['value']]
self.assertEqual(gotten_attrs[0], expected_attrs[0])
self.assertEqual(gotten_attrs[1], expected_attrs[1])
self.assertAlmostEqual(gotten_attrs[2], expected_attrs[2], places=4)
self.assertEqual(gotten_attrs[3], expected_attrs[3])
# Finally, let's delete one of the features.
f5 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '7.29154'")))
vl2.startEditing()
vl2.deleteFeatures([f5.id()])
self.assertTrue(vl2.commitChanges())
# did we really delete?
f_iterator = vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk3 = '7.29154'"))
got_feature = True
try:
f6 = next(f_iterator)
got_feature = f6.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
def testPktFloatingPoint(self):
"""
Check if we can handle floating point/numeric primary keys correctly
"""
# 1. 32 bit float (PostgreSQL "REAL" type)
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_float_pk" (geom)', "test_float_pk", "postgres")
self.assertTrue(vl.isValid())
# 1.1. Retrieving
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f.isValid())
self.assertEqual(f['value'], 'first teste')
# 1.2. Editing
self.assertTrue(vl.startEditing())
vl.changeAttributeValue(f.id(), vl.fields().indexOf('value'), 'Changed first')
self.assertTrue(vl.commitChanges())
# 1.2.1. Checking edit from another vector layer
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk1" table="qgis_test"."tb_test_float_pk" (geom)', "test_float_pk2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f2.isValid())
self.assertEqual(f2['value'], 'Changed first')
# 1.3. Deleting
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'")))
vl.startEditing()
vl.deleteFeatures([f.id()])
self.assertTrue(vl.commitChanges())
# 1.3.1. Checking deletion
f_iterator = vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'"))
got_feature = True
try:
f2 = next(f_iterator)
got_feature = f2.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
# 1.4. Inserting new feature
newpointwkt = 'Point(-47.751 -15.644)'
f = QgsFeature(vl.fields())
f['pk'] = 0.22222222222222222222222
f['value'] = 'newly inserted'
f.setGeometry(QgsGeometry.fromWkt(newpointwkt))
vl.startEditing()
res, f = vl.dataProvider().addFeatures([f])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# 1.4.1. Checking insertion
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '0.22222222222222222222222'")))
self.assertTrue(f2.isValid())
self.assertAlmostEqual(f2['pk'], 0.2222222222222222)
self.assertEqual(f2['value'], 'newly inserted')
assert compareWkt(f2.geometry().asWkt(), newpointwkt), "Geometry mismatch. Expected: {} Got: {} \n".format(f2.geometry().asWkt(), newpointwkt)
# One more check: can we retrieve the same row with the value that we got from this layer?
floatpk = f2['pk']
f3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '{}'".format(floatpk))))
self.assertTrue(f3.isValid())
self.assertEqual(f3['value'], 'newly inserted')
self.assertEqual(f3['pk'], floatpk)
# 2. 64 bit float (PostgreSQL "DOUBLE PRECISION" type)
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_double_pk" (geom)', "test_double_pk", "postgres")
self.assertTrue(vl.isValid())
# 2.1. Retrieving
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f.isValid())
self.assertEqual(f['value'], 'first teste')
# 2.2. Editing
self.assertTrue(vl.startEditing())
vl.changeAttributeValue(f.id(), vl.fields().indexOf('value'), 'Changed first')
self.assertTrue(vl.commitChanges())
# 2.2.1. Checking edit from another vector layer
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 key="pk" table="qgis_test"."tb_test_double_pk" (geom)', "test_double_pk2", "postgres")
self.assertTrue(vl2.isValid())
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '3.141592653589793238462643383279502884197169'")))
self.assertTrue(f2.isValid())
self.assertEqual(f2['value'], 'Changed first')
# 2.3. Deleting
f = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'")))
vl.startEditing()
vl.deleteFeatures([f.id()])
self.assertTrue(vl.commitChanges())
# 2.3.1. Checking deletion
f_iterator = vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '2.718281828459045235360287471352662497757247'"))
got_feature = True
try:
f2 = next(f_iterator)
got_feature = f2.isValid()
except StopIteration:
got_feature = False
self.assertFalse(got_feature)
# 2.4. Inserting new feature
newpointwkt = 'Point(-47.751 -15.644)'
f = QgsFeature(vl.fields())
f['pk'] = 0.22222222222222222222222
f['value'] = 'newly inserted'
f.setGeometry(QgsGeometry.fromWkt(newpointwkt))
vl.startEditing()
res, f = vl.dataProvider().addFeatures([f])
self.assertTrue(res)
self.assertTrue(vl.commitChanges())
# 2.4.1. Checking insertion
f2 = next(vl2.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '0.22222222222222222222222'")))
self.assertTrue(f2.isValid())
self.assertAlmostEqual(f2['pk'], 0.2222222222222222, places=15)
self.assertEqual(f2['value'], 'newly inserted')
assert compareWkt(f2.geometry().asWkt(), newpointwkt), "Geometry mismatch. Expected: {} Got: {} \n".format(f2.geometry().asWkt(), newpointwkt)
# One more check: can we retrieve the same row with the value that we got from this layer?
doublepk = f2['pk']
f3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("pk = '{}'".format(doublepk))))
self.assertTrue(f3.isValid())
self.assertEqual(f3['value'], 'newly inserted')
self.assertEqual(f3['pk'], doublepk)
# no NUMERIC/DECIMAL checks here. NUMERIC primary keys are unsupported.
# TODO: implement NUMERIC primary keys/arbitrary precision arithmethics/fixed point math in QGIS.
def testPktMapInsert(self):
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="obj_id" sql='.format(self.dbconn, 'oid_serial_table'),
"oid_serial", "postgres")
self.assertTrue(vl.isValid())
f = QgsFeature(vl.fields())
f['obj_id'] = vl.dataProvider().defaultValueClause(0)
f['name'] = 'Test'
r, f = vl.dataProvider().addFeatures([f])
self.assertTrue(r)
self.assertNotEqual(f[0]['obj_id'], NULL, f[0].attributes())
vl.deleteFeatures([f[0].id()])
def testNull(self):
"""
Asserts that 0, '' and NULL are treated as different values on insert
"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' table="qgis_test"."constraints" sql=', 'test1',
'postgres')
self.assertTrue(vl.isValid())
QgsProject.instance().addMapLayer(vl)
tg = QgsTransactionGroup()
tg.addLayer(vl)
vl.startEditing()
def onError(message):
"""We should not get here. If we do, fail and say why"""
self.assertFalse(True, message)
vl.raiseError.connect(onError)
f = QgsFeature(vl.fields())
f['gid'] = 100
f['val'] = 0
f['name'] = ''
self.assertTrue(vl.addFeature(f))
feature = next(vl.getFeatures('"gid" = 100'))
self.assertEqual(f['val'], feature['val'])
self.assertEqual(f['name'], feature['name'])
def testNestedInsert(self):
tg = QgsTransactionGroup()
tg.addLayer(self.vl)
self.vl.startEditing()
it = self.vl.getFeatures()
f = next(it)
f['pk'] = NULL
self.vl.addFeature(f) # Should not deadlock during an active iteration
f = next(it)
def testTimeout(self):
"""
Asserts that we will not deadlock if more iterators are opened in parallel than
available in the connection pool
"""
request = QgsFeatureRequest()
request.setTimeout(1)
iterators = list()
for i in range(100):
iterators.append(self.vl.getFeatures(request))
def testTransactionDirtyName(self):
# create a vector ayer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# update the data within the transaction
tr = vl.dataProvider().transaction()
sql = "update qgis_test.some_poly_data set pk=1 where pk=1"
name = "My Awesome Transaction!"
self.assertTrue(tr.executeSql(sql, True, name)[0])
# test name
self.assertEqual(vl.undoStack().command(0).text(), name)
# rollback
vl.rollBack()
def testTransactionDirty(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# check that the feature used for testing is ok
ft0 = vl.getFeatures('pk=1')
f = QgsFeature()
self.assertTrue(ft0.nextFeature(f))
# update the data within the transaction
tr = vl.dataProvider().transaction()
sql = "update qgis_test.some_poly_data set pk=33 where pk=1"
self.assertTrue(tr.executeSql(sql, True)[0])
# check that the pk of the feature has been changed
ft = vl.getFeatures('pk=1')
self.assertFalse(ft.nextFeature(f))
ft = vl.getFeatures('pk=33')
self.assertTrue(ft.nextFeature(f))
# underlying data has been modified but the layer is not tagged as
# modified
self.assertTrue(vl.isModified())
# undo sql query
vl.undoStack().undo()
# check that the original feature with pk is back
ft0 = vl.getFeatures('pk=1')
self.assertTrue(ft0.nextFeature(f))
# redo
vl.undoStack().redo()
# check that the pk of the feature has been changed
ft1 = vl.getFeatures('pk=1')
self.assertFalse(ft1.nextFeature(f))
def testTransactionConstraints(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'id\' table="qgis_test"."check_constraints" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
# get feature
f = QgsFeature()
self.assertTrue(vl.getFeatures('id=1').nextFeature(f))
self.assertEqual(f.attributes(), [1, 4, 3])
# start edition
vl.startEditing()
# update attribute form with a failing constraints
# coming from the database if attributes are updated
# one at a time.
# Current feature: a = 4 / b = 3
# Update feature: a = 1 / b = 0
# If updated one at a time, '(a = 1) < (b = 3)' => FAIL!
form = QgsAttributeForm(vl, f)
for w in form.findChildren(QLabel):
if w.buddy():
spinBox = w.buddy()
if w.text() == 'a':
spinBox.setValue(1)
elif w.text() == 'b':
spinBox.setValue(0)
# save
form.save()
# check new values
self.assertTrue(vl.getFeatures('id=1').nextFeature(f))
self.assertEqual(f.attributes(), [1, 1, 0])
def testTransactionTuple(self):
# create a vector layer based on postgres
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
# prepare a project with transactions enabled
p = QgsProject()
p.setAutoTransaction(True)
p.addMapLayers([vl])
vl.startEditing()
# execute a query which returns a tuple
tr = vl.dataProvider().transaction()
sql = "select * from qgis_test.some_poly_data"
self.assertTrue(tr.executeSql(sql, False)[0])
# underlying data has not been modified
self.assertFalse(vl.isModified())
def testDomainTypes(self):
"""Test that domain types are correctly mapped"""
vl = QgsVectorLayer('%s table="qgis_test"."domains" sql=' %
(self.dbconn), "domains", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
expected = {}
expected['fld_var_char_domain'] = {'type': QVariant.String, 'typeName': 'qgis_test.var_char_domain',
'length': -1}
expected['fld_var_char_domain_6'] = {'type': QVariant.String, 'typeName': 'qgis_test.var_char_domain_6',
'length': 6}
expected['fld_character_domain'] = {'type': QVariant.String, 'typeName': 'qgis_test.character_domain',
'length': 1}
expected['fld_character_domain_6'] = {'type': QVariant.String, 'typeName': 'qgis_test.character_domain_6',
'length': 6}
expected['fld_char_domain'] = {
'type': QVariant.String, 'typeName': 'qgis_test.char_domain', 'length': 1}
expected['fld_char_domain_6'] = {
'type': QVariant.String, 'typeName': 'qgis_test.char_domain_6', 'length': 6}
expected['fld_text_domain'] = {
'type': QVariant.String, 'typeName': 'qgis_test.text_domain', 'length': -1}
expected['fld_numeric_domain'] = {'type': QVariant.Double, 'typeName': 'qgis_test.numeric_domain', 'length': 10,
'precision': 4}
for f, e in list(expected.items()):
self.assertEqual(
fields.at(fields.indexFromName(f)).type(), e['type'])
self.assertEqual(fields.at(fields.indexFromName(f)
).typeName(), e['typeName'])
self.assertEqual(
fields.at(fields.indexFromName(f)).length(), e['length'])
if 'precision' in e:
self.assertEqual(
fields.at(fields.indexFromName(f)).precision(), e['precision'])
def testRenameAttributes(self):
''' Test renameAttributes() '''
vl = QgsVectorLayer('%s table="qgis_test"."rename_table" sql=' % (
self.dbconn), "renames", "postgres")
provider = vl.dataProvider()
provider.renameAttributes({1: 'field1', 2: 'field2'})
# bad rename
self.assertFalse(provider.renameAttributes({-1: 'not_a_field'}))
self.assertFalse(provider.renameAttributes({100: 'not_a_field'}))
# already exists
self.assertFalse(provider.renameAttributes({1: 'field2'}))
# rename one field
self.assertTrue(provider.renameAttributes({1: 'newname'}))
self.assertEqual(provider.fields().at(1).name(), 'newname')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname')
# rename two fields
self.assertTrue(provider.renameAttributes(
{1: 'newname2', 2: 'another'}))
self.assertEqual(provider.fields().at(1).name(), 'newname2')
self.assertEqual(provider.fields().at(2).name(), 'another')
vl.updateFields()
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname2')
self.assertEqual(fet.fields()[2].name(), 'another')
# close layer and reopen, then recheck to confirm that changes were saved to db
del vl
vl = None
vl = QgsVectorLayer('%s table="qgis_test"."rename_table" sql=' % (
self.dbconn), "renames", "postgres")
provider = vl.dataProvider()
self.assertEqual(provider.fields().at(1).name(), 'newname2')
self.assertEqual(provider.fields().at(2).name(), 'another')
fet = next(vl.getFeatures())
self.assertEqual(fet.fields()[1].name(), 'newname2')
self.assertEqual(fet.fields()[2].name(), 'another')
def testEditorWidgetTypes(self):
"""Test that editor widget types can be fetched from the qgis_editor_widget_styles table"""
vl = QgsVectorLayer('%s table="qgis_test"."widget_styles" sql=' % (
self.dbconn), "widget_styles", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
setup1 = fields.field("fld1").editorWidgetSetup()
self.assertFalse(setup1.isNull())
self.assertEqual(setup1.type(), "FooEdit")
self.assertEqual(setup1.config(), {"param1": "value1", "param2": "2"})
best1 = QgsGui.editorWidgetRegistry().findBest(vl, "fld1")
self.assertEqual(best1.type(), "FooEdit")
self.assertEqual(best1.config(), setup1.config())
self.assertTrue(fields.field("fld2").editorWidgetSetup().isNull())
best2 = QgsGui.editorWidgetRegistry().findBest(vl, "fld2")
self.assertEqual(best2.type(), "TextEdit")
def testHstore(self):
vl = QgsVectorLayer('%s table="qgis_test"."dict" sql=' %
(self.dbconn), "testhstore", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.Map)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], dict)
self.assertEqual(f.attributes()[value_idx], {'a': 'b', '1': '2'})
new_f = QgsFeature(vl.fields())
new_f['pk'] = NULL
new_f['value'] = {'simple': '1', 'doubleQuote': '"y"',
'quote': "'q'", 'backslash': '\\'}
r, fs = vl.dataProvider().addFeatures([new_f])
self.assertTrue(r)
new_pk = fs[0]['pk']
self.assertNotEqual(new_pk, NULL, fs[0].attributes())
try:
read_back = vl.getFeature(new_pk)
self.assertEqual(read_back['pk'], new_pk)
self.assertEqual(read_back['value'], new_f['value'])
finally:
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeatures([new_pk]))
self.assertTrue(vl.commitChanges())
def testJson(self):
vl = QgsVectorLayer('%s table="qgis_test"."json" sql=' %
(self.dbconn), "testjson", "postgres")
self.assertTrue(vl.isValid())
attrs = (
123,
1233.45,
None,
True,
False,
r"String literal with \"quotes\" 'and' other funny chars []{};#/èé*",
[1, 2, 3.4, None],
[True, False],
{'a': 123, 'b': 123.34, 'c': 'a string', 'd': [
1, 2, 3], 'e': {'a': 123, 'b': 123.45}}
)
attrs2 = (
246,
2466.91,
None,
True,
False,
r"Yet another string literal with \"quotes\" 'and' other funny chars: π []{};#/èé*",
[2, 4, 3.14159, None],
[True, False],
{'a': 246, 'b': 246.68, 'c': 'a rounded area: π × r²', 'd': [
1, 2, 3], 'e': {'a': 246, 'b': 246.91}}
)
json_idx = vl.fields().lookupField('jvalue')
jsonb_idx = vl.fields().lookupField('jbvalue')
for attr in attrs:
# Add a new feature
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
self.assertTrue(vl2.startEditing())
f = QgsFeature(vl2.fields())
f.setAttributes([None, attr, attr])
self.assertTrue(vl2.addFeatures([f]))
self.assertTrue(vl2.commitChanges(), attr)
# Read back
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
# Change attribute values
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
self.assertTrue(vl2.startEditing())
self.assertTrue(vl2.changeAttributeValues(
fid, {json_idx: attr, jsonb_idx: attr}))
self.assertTrue(vl2.commitChanges())
# Read back
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
# Let's check changeFeatures:
for attr in attrs2:
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
fid = [f.id() for f in vl2.getFeatures()][-1]
self.assertTrue(vl2.startEditing())
self.assertTrue(vl2.dataProvider().changeFeatures({fid: {json_idx: attr, jsonb_idx: attr}}, {}))
self.assertTrue(vl2.commitChanges())
# Read back again
vl2 = QgsVectorLayer('%s table="qgis_test"."json" sql=' % (
self.dbconn), "testjson", "postgres")
f = vl2.getFeature(fid)
self.assertEqual(f.attributes(), [fid, attr, attr])
def testStringArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."string_array" sql=' % (
self.dbconn), "teststringarray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(fields.at(fields.indexFromName(
'value')).type(), QVariant.StringList)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.String)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], ['a', 'b', 'c'])
new_f = QgsFeature(vl.fields())
new_f['pk'] = NULL
new_f['value'] = ['simple', '"doubleQuote"', "'quote'", 'back\\slash']
r, fs = vl.dataProvider().addFeatures([new_f])
self.assertTrue(r)
new_pk = fs[0]['pk']
self.assertNotEqual(new_pk, NULL, fs[0].attributes())
try:
read_back = vl.getFeature(new_pk)
self.assertEqual(read_back['pk'], new_pk)
self.assertEqual(read_back['value'], new_f['value'])
finally:
self.assertTrue(vl.startEditing())
self.assertTrue(vl.deleteFeatures([new_pk]))
self.assertTrue(vl.commitChanges())
def testIntArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."int_array" sql=' % (
self.dbconn), "testintarray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.List)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.Int)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], [1, 2, -5])
def testDoubleArray(self):
vl = QgsVectorLayer('%s table="qgis_test"."double_array" sql=' % (
self.dbconn), "testdoublearray", "postgres")
self.assertTrue(vl.isValid())
fields = vl.dataProvider().fields()
self.assertEqual(
fields.at(fields.indexFromName('value')).type(), QVariant.List)
self.assertEqual(fields.at(fields.indexFromName(
'value')).subType(), QVariant.Double)
f = next(vl.getFeatures(QgsFeatureRequest()))
value_idx = vl.fields().lookupField('value')
self.assertIsInstance(f.attributes()[value_idx], list)
self.assertEqual(f.attributes()[value_idx], [1.1, 2, -5.12345])
def testNotNullConstraint(self):
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertEqual(len(vl.fields()), 4)
# test some bad field indexes
self.assertEqual(vl.dataProvider().fieldConstraints(-1),
QgsFieldConstraints.Constraints())
self.assertEqual(vl.dataProvider().fieldConstraints(
1001), QgsFieldConstraints.Constraints())
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(vl.dataProvider().fieldConstraints(1)
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.dataProvider().fieldConstraints(2) &
QgsFieldConstraints.ConstraintNotNull)
self.assertFalse(vl.dataProvider().fieldConstraints(3)
& QgsFieldConstraints.ConstraintNotNull)
# test that constraints have been saved to fields correctly
fields = vl.fields()
self.assertTrue(fields.at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(fields.at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(1).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(fields.at(2).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertEqual(fields.at(2).constraints().constraintOrigin(QgsFieldConstraints.ConstraintNotNull),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(3).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
def testUniqueConstraint(self):
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertEqual(len(vl.fields()), 4)
# test some bad field indexes
self.assertEqual(vl.dataProvider().fieldConstraints(-1),
QgsFieldConstraints.Constraints())
self.assertEqual(vl.dataProvider().fieldConstraints(
1001), QgsFieldConstraints.Constraints())
self.assertTrue(vl.dataProvider().fieldConstraints(0)
& QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.dataProvider().fieldConstraints(1)
& QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.dataProvider().fieldConstraints(2)
& QgsFieldConstraints.ConstraintUnique)
self.assertFalse(vl.dataProvider().fieldConstraints(3)
& QgsFieldConstraints.ConstraintUnique)
# test that constraints have been saved to fields correctly
fields = vl.fields()
self.assertTrue(fields.at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(0).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(fields.at(1).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(1).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertTrue(fields.at(2).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
self.assertEqual(fields.at(2).constraints().constraintOrigin(QgsFieldConstraints.ConstraintUnique),
QgsFieldConstraints.ConstraintOriginProvider)
self.assertFalse(fields.at(3).constraints().constraints()
& QgsFieldConstraints.ConstraintUnique)
def testConstraintOverwrite(self):
""" test that Postgres provider constraints can't be overwritten by vector layer method """
vl = QgsVectorLayer('%s table="qgis_test"."constraints" sql=' % (
self.dbconn), "constraints", "postgres")
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.fields().at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
# add a constraint at the layer level
vl.setFieldConstraint(0, QgsFieldConstraints.ConstraintUnique)
# should be no change at provider level
self.assertTrue(vl.dataProvider().fieldConstraints(0) &
QgsFieldConstraints.ConstraintNotNull)
# but layer should still keep provider constraints...
self.assertTrue(vl.fields().at(0).constraints().constraints()
& QgsFieldConstraints.ConstraintNotNull)
self.assertTrue(vl.fieldConstraints(
0) & QgsFieldConstraints.ConstraintNotNull)
# ...in addition to layer level constraint
self.assertTrue(vl.fields().at(0).constraints(
).constraints() & QgsFieldConstraints.ConstraintUnique)
self.assertTrue(vl.fieldConstraints(
0) & QgsFieldConstraints.ConstraintUnique)
def testVectorLayerUtilsUniqueWithProviderDefault(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
vl.dataProvider().setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertEqual(
vl.dataProvider().defaultValueClause(0), default_clause)
self.assertTrue(QgsVectorLayerUtils.valueExists(vl, 0, 4))
vl.startEditing()
f = QgsFeature(vl.fields())
f.setAttribute(0, default_clause)
self.assertFalse(
QgsVectorLayerUtils.valueExists(vl, 0, default_clause))
self.assertTrue(vl.addFeatures([f]))
# the default value clause should exist...
self.assertTrue(QgsVectorLayerUtils.valueExists(vl, 0, default_clause))
# but it should not prevent the attribute being validated
self.assertTrue(QgsVectorLayerUtils.validateAttribute(vl, f, 0))
vl.rollBack()
def testSkipConstraintCheck(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
vl.dataProvider().setProviderProperty(
QgsDataProvider.EvaluateDefaultValues, False)
self.assertTrue(vl.dataProvider().skipConstraintCheck(
0, QgsFieldConstraints.ConstraintUnique, default_clause))
self.assertFalse(vl.dataProvider().skipConstraintCheck(
0, QgsFieldConstraints.ConstraintUnique, 59))
def testVectorLayerUtilsCreateFeatureWithProviderDefault(self):
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
default_clause = 'nextval(\'qgis_test."someData_pk_seq"\'::regclass)'
self.assertEqual(
vl.dataProvider().defaultValueClause(0), default_clause)
# If an attribute map is provided, QgsVectorLayerUtils.createFeature must
# respect it, otherwise default values from provider are checked.
# User's choice will not be respected if the value violates unique constraints.
# See https://github.com/qgis/QGIS/issues/27758
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5, 3: 'map'})
# changed so that createFeature respects user choice
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'map', None, None, None, None, None])
vl.setDefaultValueDefinition(3, QgsDefaultValue("'mappy'"))
# test ignore vector layer default value expression overrides postgres provider default clause,
# due to user's choice
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5, 3: 'map'})
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'map', None, None, None, None, None])
# Since user did not enter a default for field 3, test must return the default value chosen
f = QgsVectorLayerUtils.createFeature(vl, attributes={1: 5})
self.assertEqual(f.attributes(), [
default_clause, 5, "'qgis'::text", 'mappy', None, None, None, None, None])
# See https://github.com/qgis/QGIS/issues/23127
def testNumericPrecision(self):
uri = 'point?field=f1:int'
uri += '&field=f2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
f = QgsFeature(lyr.fields())
f['f1'] = 1
f['f2'] = 123.456
f['f3'] = '12345678.90123456789'
lyr.dataProvider().addFeatures([f])
uri = '%s table="qgis_test"."b18155" (g) key=\'f1\'' % (self.dbconn)
self.execSQLCommand('DROP TABLE IF EXISTS qgis_test.b18155')
err = QgsVectorLayerExporter.exportLayer(
lyr, uri, "postgres", lyr.crs())
self.assertEqual(err[0], QgsVectorLayerExporter.NoError,
'unexpected import error {0}'.format(err))
lyr = QgsVectorLayer(uri, "y", "postgres")
self.assertTrue(lyr.isValid())
f = next(lyr.getFeatures())
self.assertEqual(f['f1'], 1)
self.assertEqual(f['f2'], 123.456)
self.assertEqual(f['f3'], '12345678.90123456789')
# See https://github.com/qgis/QGIS/issues/23163
def testImportKey(self):
uri = 'point?field=f1:int'
uri += '&field=F2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
def testKey(lyr, key, kfnames):
self.execSQLCommand('DROP TABLE IF EXISTS qgis_test.import_test')
uri = '%s table="qgis_test"."import_test" (g)' % self.dbconn
if key is not None:
uri += ' key=\'%s\'' % key
err = QgsVectorLayerExporter.exportLayer(
lyr, uri, "postgres", lyr.crs())
self.assertEqual(err[0], QgsVectorLayerExporter.NoError,
'unexpected import error {0}'.format(err))
olyr = QgsVectorLayer(uri, "y", "postgres")
self.assertTrue(olyr.isValid())
flds = lyr.fields()
oflds = olyr.fields()
if key is None:
# if the pkey was not given, it will create a pkey
self.assertEqual(oflds.size(), flds.size() + 1)
self.assertEqual(oflds[0].name(), kfnames[0])
for i in range(flds.size()):
self.assertEqual(oflds[i + 1].name(), flds[i].name())
else:
# pkey was given, no extra field generated
self.assertEqual(oflds.size(), flds.size())
for i in range(oflds.size()):
self.assertEqual(oflds[i].name(), flds[i].name())
pks = olyr.primaryKeyAttributes()
self.assertEqual(len(pks), len(kfnames))
for i in range(0, len(kfnames)):
self.assertEqual(oflds[pks[i]].name(), kfnames[i])
testKey(lyr, 'f1', ['f1'])
testKey(lyr, '"f1"', ['f1'])
testKey(lyr, '"f1","F2"', ['f1', 'F2'])
testKey(lyr, '"f1","F2","f3"', ['f1', 'F2', 'f3'])
testKey(lyr, None, ['id'])
# See https://github.com/qgis/QGIS/issues/25415
def testImportWithoutSchema(self):
def _test(table, schema=None):
self.execSQLCommand('DROP TABLE IF EXISTS %s CASCADE' % table)
uri = 'point?field=f1:int'
uri += '&field=F2:double(6,4)'
uri += '&field=f3:string(20)'
lyr = QgsVectorLayer(uri, "x", "memory")
self.assertTrue(lyr.isValid())
table = ("%s" % table) if schema is None else (
"\"%s\".\"%s\"" % (schema, table))
dest_uri = "%s sslmode=disable table=%s (geom) sql" % (
self.dbconn, table)
QgsVectorLayerExporter.exportLayer(
lyr, dest_uri, "postgres", lyr.crs())
olyr = QgsVectorLayer(dest_uri, "y", "postgres")
self.assertTrue(olyr.isValid(), "Failed URI: %s" % dest_uri)
# Test bug 17518
_test('b17518')
# Test fully qualified table (with schema)
_test("b17518", "qgis_test")
# Test empty schema
_test("b17518", "")
# Test public schema
_test("b17518", "public")
# Test fully qualified table (with wrong schema)
with self.assertRaises(AssertionError):
_test("b17518", "qgis_test_wrong")
def testStyle(self):
self.execSQLCommand('DROP TABLE IF EXISTS layer_styles CASCADE')
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
self.assertTrue(
vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported())
self.assertTrue(vl.dataProvider().isDeleteStyleFromDatabaseSupported())
# table layer_styles does not exit
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, -1)
self.assertEqual(idlist, [])
self.assertEqual(namelist, [])
self.assertEqual(desclist, [])
self.assertNotEqual(errmsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.qml' % (unitTestDataPath(), "singleSymbol"))
status = vl.loadNamedStyle(mFilePath)
self.assertTrue(status)
# The style is saved as non-default
errorMsg = vl.saveStyleToDatabase(
"by day", "faded greens and elegant patterns", False, "")
self.assertEqual(errorMsg, "")
# the style id should be "1", not "by day"
qml, errmsg = vl.getStyleFromDatabase("by day")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 1)
self.assertEqual(errmsg, "")
self.assertEqual(idlist, ["1"])
self.assertEqual(namelist, ["by day"])
self.assertEqual(desclist, ["faded greens and elegant patterns"])
qml, errmsg = vl.getStyleFromDatabase("100")
self.assertEqual(qml, "")
self.assertNotEqual(errmsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertTrue(qml.startswith('<!DOCTYPE qgis'), qml)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("100")
self.assertTrue(res)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("1")
self.assertTrue(res)
self.assertEqual(errmsg, "")
# We save now the style again twice but with one as default
errorMsg = vl.saveStyleToDatabase(
"related style", "faded greens and elegant patterns", False, "")
self.assertEqual(errorMsg, "")
errorMsg = vl.saveStyleToDatabase(
"default style", "faded greens and elegant patterns", True, "")
self.assertEqual(errorMsg, "")
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 2)
self.assertEqual(errmsg, "")
self.assertEqual(idlist, ["3", "2"]) # Ids must be reversed.
self.assertEqual(namelist, ["default style", "related style"])
self.assertEqual(desclist, ["faded greens and elegant patterns"] * 2)
# We remove these 2 styles
res, errmsg = vl.deleteStyleFromDatabase("2")
self.assertTrue(res)
self.assertEqual(errmsg, "")
res, errmsg = vl.deleteStyleFromDatabase("3")
self.assertTrue(res)
self.assertEqual(errmsg, "")
# table layer_styles does exit, but is now empty
related_count, idlist, namelist, desclist, errmsg = vl.listStylesInDatabase()
self.assertEqual(related_count, 0)
self.assertEqual(idlist, [])
self.assertEqual(namelist, [])
self.assertEqual(desclist, [])
self.assertEqual(errmsg, "")
def testStyleWithGeometryType(self):
"""Test saving styles with the additional geometry type
Layers are created from geometries_table
"""
myconn = 'service=\'qgis_test\''
if 'QGIS_PGTEST_DB' in os.environ:
myconn = os.environ['QGIS_PGTEST_DB']
# point layer
myPoint = QgsVectorLayer(
myconn +
' sslmode=disable srid=4326 type=POINT table="qgis_test"."geometries_table" (geom) sql=', 'Point',
'postgres')
self.assertTrue(myPoint.isValid())
myPoint.saveStyleToDatabase('myPointStyle', '', False, '')
# polygon layer
myPolygon = QgsVectorLayer(
myconn +
' sslmode=disable srid=4326 type=POLYGON table="qgis_test"."geometries_table" (geom) sql=', 'Poly',
'postgres')
self.assertTrue(myPoint.isValid())
myPolygon.saveStyleToDatabase('myPolygonStyle', '', False, '')
# how many
related_count, idlist, namelist, desclist, errmsg = myPolygon.listStylesInDatabase()
self.assertEqual(len(idlist), 2)
self.assertEqual(namelist, ['myPolygonStyle', 'myPointStyle'])
# raw psycopg2 query
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute("select stylename, type from layer_styles order by type")
self.assertEqual(cur.fetchall(), [
('myPointStyle', 'Point'), ('myPolygonStyle', 'Polygon')])
cur.close()
# delete them
myPolygon.deleteStyleFromDatabase(idlist[1])
myPolygon.deleteStyleFromDatabase(idlist[0])
styles = myPolygon.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
def testSaveStyleInvalidXML(self):
self.execSQLCommand('DROP TABLE IF EXISTS layer_styles CASCADE')
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
self.assertTrue(
vl.dataProvider().isSaveAndLoadStyleToDatabaseSupported())
self.assertTrue(vl.dataProvider().isDeleteStyleFromDatabaseSupported())
mFilePath = QDir.toNativeSeparators(
'%s/symbol_layer/%s.qml' % (unitTestDataPath(), "fontSymbol"))
status = vl.loadNamedStyle(mFilePath)
self.assertTrue(status)
errorMsg = vl.saveStyleToDatabase(
"fontSymbol", "font with invalid utf8 char", False, "")
self.assertEqual(errorMsg, "")
qml, errmsg = vl.getStyleFromDatabase("1")
self.assertTrue('v="\u001E"' in qml)
self.assertEqual(errmsg, "")
# Test loadStyle from metadata
md = QgsProviderRegistry.instance().providerMetadata('postgres')
qml = md.loadStyle(self.dbconn + " type=POINT table=\"qgis_test\".\"editData\" (geom)", 'fontSymbol')
self.assertTrue(qml.startswith('<!DOCTYPE qgi'), qml)
self.assertTrue('v="\u001E"' in qml)
def testHasMetadata(self):
# views don't have metadata
vl = QgsVectorLayer('{} table="qgis_test"."{}" key="pk" sql='.format(self.dbconn, 'bikes_view'), "bikes_view",
"postgres")
self.assertTrue(vl.isValid())
self.assertFalse(vl.dataProvider().hasMetadata())
# ordinary tables have metadata
vl = QgsVectorLayer('%s table="qgis_test"."someData" sql=' %
(self.dbconn), "someData", "postgres")
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().hasMetadata())
def testReadExtentOnView(self):
# vector layer based on view
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertFalse(vl0.dataProvider().hasMetadata())
# set a custom extent
originalExtent = vl0.extent()
customExtent = QgsRectangle(-80, 80, -70, 90)
vl0.setExtent(customExtent)
# write xml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl0.writeLayerXml(elem, doc, QgsReadWriteContext()))
# read xml with the custom extent. It should not be used by default
vl1 = QgsVectorLayer()
vl1.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl1.isValid())
self.assertEqual(vl1.extent(), originalExtent)
# read xml with custom extent with readExtent option. Extent read from
# xml document should be used because we have a view
vl2 = QgsVectorLayer()
vl2.setReadExtentFromXml(True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl2.isValid())
self.assertEqual(vl2.extent(), customExtent)
# but a force update on extent should allow retrieveing the data
# provider extent
vl2.updateExtents()
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertEqual(vl2.extent(), customExtent)
vl2.updateExtents(force=True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertEqual(vl2.extent(), originalExtent)
def testReadExtentOnTable(self):
# vector layer based on a standard table
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertTrue(vl0.dataProvider().hasMetadata())
# set a custom extent
originalExtent = vl0.extent()
customExtent = QgsRectangle(-80, 80, -70, 90)
vl0.setExtent(customExtent)
# write xml
doc = QDomDocument("testdoc")
elem = doc.createElement("maplayer")
self.assertTrue(vl0.writeLayerXml(elem, doc, QgsReadWriteContext()))
# read xml with the custom extent. It should not be used by default
vl1 = QgsVectorLayer()
vl1.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl1.isValid())
self.assertEqual(vl1.extent(), originalExtent)
# read xml with custom extent with readExtent option. Extent read from
# xml document should NOT be used because we don't have a view or a
# materialized view
vl2 = QgsVectorLayer()
vl2.setReadExtentFromXml(True)
vl2.readLayerXml(elem, QgsReadWriteContext())
self.assertTrue(vl2.isValid())
self.assertEqual(vl2.extent(), originalExtent)
def testDeterminePkey(self):
"""Test primary key auto-determination"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable srid=4326 type=POLYGON table="qgis_test"."authors" sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.dataProvider().hasMetadata())
self.assertTrue("key='pk'" in vl.source())
def testCheckPkUnicityOnView(self):
# vector layer based on view
# This is valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'0\' sslmode=disable key=\'pk\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
geom = vl0.getFeature(1).geometry().asWkt()
# This is NOT valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'1\' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertFalse(vl0.isValid())
# This is NOT valid because the default is to check unicity
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertFalse(vl0.isValid())
# This is valid because the readExtentFromXml option is set
# loadDefaultStyle, readExtentFromXml
options = QgsVectorLayer.LayerOptions(True, True)
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'an_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres', options)
self.assertTrue(vl0.isValid())
# Valid because a_unique_int is unique and default is to check unicity
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'a_unique_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertEqual(vl0.getFeature(1).geometry().asWkt(), geom)
# Valid because a_unique_int is unique
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'1\' sslmode=disable key=\'a_unique_int\' srid=0 type=POINT table="qgis_test"."b21839_pk_unicity_view" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertEqual(vl0.getFeature(1).geometry().asWkt(), geom)
def testNotify(self):
vl0 = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="qgis_test"."some_poly_data" (geom) sql=',
'test', 'postgres')
vl0.dataProvider().setListening(True)
class Notified(QObject):
def __init__(self):
super(Notified, self).__init__()
self.received = ""
def receive(self, msg):
self.received = msg
notified = Notified()
vl0.dataProvider().notify.connect(notified.receive)
vl0.dataProvider().setListening(True)
cur = self.con.cursor()
ok = False
start = time.time()
while True:
cur.execute("NOTIFY qgis, 'my message'")
self.con.commit()
QGISAPP.processEvents()
if notified.received == "my message":
ok = True
break
if (time.time() - start) > 5: # timeout
break
vl0.dataProvider().notify.disconnect(notified.receive)
vl0.dataProvider().setListening(False)
self.assertTrue(ok)
def testStyleDatabaseWithService(self):
"""Test saving style in DB using a service file.
To run this test, you first need to setup the test
database with tests/testdata/provider/testdata_pg.sh
"""
myconn = 'service=\'qgis_test\''
if 'QGIS_PGTEST_DB' in os.environ:
myconn = os.environ['QGIS_PGTEST_DB']
myvl = QgsVectorLayer(
myconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
myvl.saveStyleToDatabase('mystyle', '', False, '')
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 1)
myvl.deleteStyleFromDatabase(ids[0])
styles = myvl.listStylesInDatabase()
ids = styles[1]
self.assertEqual(len(ids), 0)
def testCurveToMultipolygon(self):
self.execSQLCommand(
'CREATE TABLE IF NOT EXISTS multicurve(pk SERIAL NOT NULL PRIMARY KEY, geom public.geometry(MultiPolygon, 4326))')
self.execSQLCommand('TRUNCATE multicurve')
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=MULTIPOLYGON table="multicurve" (geom) sql=',
'test', 'postgres')
f = QgsFeature(vl.fields())
f.setGeometry(QgsGeometry.fromWkt(
'CurvePolygon(CircularString (20 30, 50 30, 50 90, 10 50, 20 30))'))
self.assertTrue(vl.startEditing())
self.assertTrue(vl.addFeatures([f]))
self.assertTrue(vl.commitChanges())
f = next(vl.getFeatures(QgsFeatureRequest()))
g = f.geometry().constGet()
self.assertTrue(g)
self.assertEqual(g.wkbType(), QgsWkbTypes.MultiPolygon)
self.assertEqual(g.childCount(), 1)
self.assertTrue(g.childGeometry(0).vertexCount() > 3)
def testMassivePaste(self):
"""Speed test to compare createFeature and createFeatures, for regression #21303"""
import time
self.execSQLCommand(
'CREATE TABLE IF NOT EXISTS massive_paste(pk SERIAL NOT NULL PRIMARY KEY, geom public.geometry(Polygon, 4326))')
self.execSQLCommand('TRUNCATE massive_paste')
start_time = time.time()
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="massive_paste" (geom) sql=',
'test_massive_paste', 'postgres')
self.assertTrue(vl.startEditing())
features = []
context = vl.createExpressionContext()
for i in range(4000):
features.append(
QgsVectorLayerUtils.createFeature(vl, QgsGeometry.fromWkt('Polygon ((7 44, 8 45, 8 46, 7 46, 7 44))'),
{0: i}, context))
self.assertTrue(vl.addFeatures(features))
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.featureCount(), 4000)
print("--- %s seconds ---" % (time.time() - start_time))
self.execSQLCommand('TRUNCATE massive_paste')
start_time = time.time()
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POLYGON table="massive_paste" (geom) sql=',
'test_massive_paste', 'postgres')
self.assertTrue(vl.startEditing())
features_data = []
context = vl.createExpressionContext()
for i in range(4000):
features_data.append(
QgsVectorLayerUtils.QgsFeatureData(QgsGeometry.fromWkt('Polygon ((7 44, 8 45, 8 46, 7 46, 7 44))'),
{0: i}))
features = QgsVectorLayerUtils.createFeatures(
vl, features_data, context)
self.assertTrue(vl.addFeatures(features))
self.assertTrue(vl.commitChanges())
self.assertEqual(vl.featureCount(), 4000)
print("--- %s seconds ---" % (time.time() - start_time))
def testFilterOnCustomBbox(self):
extent = QgsRectangle(-68, 70, -67, 80)
request = QgsFeatureRequest().setFilterRect(extent)
dbconn = 'service=qgis_test'
uri = '%s srid=4326 key="pk" sslmode=disable table="qgis_test"."some_poly_data_shift_bbox" (geom)' % (
dbconn)
def _test(vl, ids):
values = {feat['pk']: 'x' for feat in vl.getFeatures(request)}
expected = {x: 'x' for x in ids}
self.assertEqual(values, expected)
vl = QgsVectorLayer(uri, "testgeom", "postgres")
self.assertTrue(vl.isValid())
_test(vl, [2, 3])
vl = QgsVectorLayer(uri + ' bbox=shiftbox', "testgeom", "postgres")
self.assertTrue(vl.isValid())
_test(vl, [1, 3])
def testValidLayerDiscoverRelationsNone(self):
"""
Test checks that discover relation feature can be used on a layer that has no relation.
"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertEqual(vl.dataProvider().discoverRelations(vl, []), [])
def testInvalidLayerDiscoverRelations(self):
"""
Test that discover relations feature can be used on invalid layer.
"""
vl = QgsVectorLayer('{} table="qgis_test"."invalid_layer" sql='.format(self.dbconn), "invalid_layer",
"postgres")
self.assertFalse(vl.isValid())
self.assertEqual(vl.dataProvider().discoverRelations(vl, []), [])
def testValidLayerDiscoverRelations(self):
"""
Test implicit relations that can be discovers between tables, based on declared foreign keys.
The test also checks that two distinct relations can be discovered when two foreign keys are declared (see #41138).
"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' checkPrimaryKeyUnicity=\'1\' table="qgis_test"."referencing_layer"',
'referencing_layer', 'postgres')
vls = [
QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk_ref_1\' checkPrimaryKeyUnicity=\'1\' table="qgis_test"."referenced_layer_1"',
'referenced_layer_1', 'postgres'),
QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk_ref_2\' checkPrimaryKeyUnicity=\'1\' table="qgis_test"."referenced_layer_2"',
'referenced_layer_2', 'postgres'),
vl
]
for lyr in vls:
self.assertTrue(lyr.isValid())
QgsProject.instance().addMapLayer(lyr)
relations = vl.dataProvider().discoverRelations(vl, vls)
self.assertEqual(len(relations), 2)
for i, r in enumerate(relations):
self.assertEqual(r.referencedLayer(), vls[i])
def testCheckTidPkOnViews(self):
"""Test vector layer based on a view with `ctid` as a key"""
# This is valid
vl0 = QgsVectorLayer(
self.dbconn +
' checkPrimaryKeyUnicity=\'0\' sslmode=disable key=\'ctid\' srid=4326 type=POINT table="qgis_test"."b31799_test_view_ctid" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl0.isValid())
self.assertEqual(vl0.featureCount(), 10)
for f in vl0.getFeatures():
self.assertNotEqual(f.attribute(0), NULL)
def testFeatureCountEstimatedOnTable(self):
"""
Test feature count on table when estimated data is enabled
"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' estimatedmetadata=true srid=4326 type=POINT table="qgis_test"."someData" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.featureCount() > 0)
def testFeatureCountEstimatedOnView(self):
"""
Test feature count on view when estimated data is enabled
"""
self.execSQLCommand('DROP VIEW IF EXISTS qgis_test.somedataview')
self.execSQLCommand(
'CREATE VIEW qgis_test.somedataview AS SELECT * FROM qgis_test."someData"')
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' estimatedmetadata=true srid=4326 type=POINT table="qgis_test"."somedataview" (geom) sql=',
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertTrue(vl.featureCount() > 0)
def testIdentityPk(self):
"""Test a table with identity pk, see GH #29560"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."b29560"(geom) sql=',
'testb29560', 'postgres')
self.assertTrue(vl.isValid())
feature = QgsFeature(vl.fields())
geom = QgsGeometry.fromWkt('POLYGON EMPTY')
feature.setGeometry(geom)
self.assertTrue(vl.dataProvider().addFeature(feature))
del (vl)
# Verify
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."b29560"(geom) sql=',
'testb29560', 'postgres')
self.assertTrue(vl.isValid())
feature = next(vl.getFeatures())
self.assertIsNotNone(feature.id())
@unittest.skipIf(os.environ.get('TRAVIS', '') == 'true', 'Test flaky')
def testDefaultValuesAndClauses(self):
"""Test whether default values like CURRENT_TIMESTAMP or
now() they are respected. See GH #33383"""
# Create the test table
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable table="public"."test_table_default_values" sql=', 'test',
'postgres')
self.assertTrue(vl.isValid())
dp = vl.dataProvider()
# Clean the table
dp.deleteFeatures(dp.allFeatureIds())
# Save it for the test
now = datetime.now()
# Test default values
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
# FIXME: spatialite provider (and OGR) return a NULL here and the following passes
# self.assertTrue(dp.defaultValue(0).isNull())
self.assertIsNotNone(dp.defaultValue(0))
self.assertIsNone(dp.defaultValue(1))
self.assertTrue(dp.defaultValue(
2).startswith(now.strftime('%Y-%m-%d')))
self.assertTrue(dp.defaultValue(
3).startswith(now.strftime('%Y-%m-%d')))
self.assertEqual(dp.defaultValue(4), 123)
self.assertEqual(dp.defaultValue(5), 'My default')
# FIXME: the provider should return the clause definition
# regardless of the EvaluateDefaultValues setting
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 0)
self.assertEqual(dp.defaultValueClause(
0), "nextval('test_table_default_values_id_seq'::regclass)")
self.assertEqual(dp.defaultValueClause(1), '')
self.assertEqual(dp.defaultValueClause(2), "now()")
self.assertEqual(dp.defaultValueClause(3), "CURRENT_TIMESTAMP")
self.assertEqual(dp.defaultValueClause(4), '123')
self.assertEqual(dp.defaultValueClause(5), "'My default'::text")
# FIXME: the test fails if the value is not reset to 1
dp.setProviderProperty(QgsDataProvider.EvaluateDefaultValues, 1)
feature = QgsFeature(vl.fields())
for idx in range(vl.fields().count()):
default = vl.dataProvider().defaultValue(idx)
if default is not None:
feature.setAttribute(idx, default)
else:
feature.setAttribute(idx, 'A comment')
self.assertTrue(vl.dataProvider().addFeature(feature))
del (vl)
# Verify
vl2 = QgsVectorLayer(self.dbconn + ' sslmode=disable table="public"."test_table_default_values" sql=', 'test',
'postgres')
self.assertTrue(vl2.isValid())
feature = next(vl2.getFeatures())
self.assertEqual(feature.attribute(1), 'A comment')
self.assertTrue(feature.attribute(
2).startswith(now.strftime('%Y-%m-%d')))
self.assertTrue(feature.attribute(
3).startswith(now.strftime('%Y-%m-%d')))
self.assertEqual(feature.attribute(4), 123)
self.assertEqual(feature.attribute(5), 'My default')
def testEncodeDecodeUri(self):
"""Test PG encode/decode URI"""
md = QgsProviderRegistry.instance().providerMetadata('postgres')
self.assertEqual(md.decodeUri(
'dbname=\'qgis_tests\' host=localhost port=5432 user=\'myuser\' sslmode=disable estimatedmetadata=true srid=3067 table="public"."basic_map_tiled" (rast)'),
{'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'rast',
'host': 'localhost',
'port': '5432',
'schema': 'public',
'srid': '3067',
'sslmode': 1,
'table': 'basic_map_tiled',
'username': 'myuser'})
self.assertEqual(md.decodeUri(
'dbname=\'qgis_tests\' host=localhost port=5432 user=\'myuser\' sslmode=disable key=\'id\' estimatedmetadata=true srid=3763 type=MultiPolygon checkPrimaryKeyUnicity=\'1\' table="public"."copas1" (geom)'),
{'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'geom',
'host': 'localhost',
'key': 'id',
'port': '5432',
'schema': 'public',
'srid': '3763',
'sslmode': 1,
'table': 'copas1',
'type': 6,
'username': 'myuser'})
self.assertEqual(md.encodeUri({'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'geom',
'host': 'localhost',
'key': 'id',
'port': '5432',
'schema': 'public',
'srid': '3763',
'sslmode': 1,
'table': 'copas1',
'type': 6,
'username': 'myuser'}),
"dbname='qgis_tests' user='myuser' srid=3763 estimatedmetadata='true' host='localhost' key='id' port='5432' sslmode='disable' type='MultiPolygon' table=\"public\".\"copas1\" (geom)")
self.assertEqual(md.encodeUri({'dbname': 'qgis_tests',
'estimatedmetadata': True,
'geometrycolumn': 'rast',
'host': 'localhost',
'port': '5432',
'schema': 'public',
'srid': '3067',
'sslmode': 1,
'table': 'basic_map_tiled',
'username': 'myuser'}),
"dbname='qgis_tests' user='myuser' srid=3067 estimatedmetadata='true' host='localhost' port='5432' sslmode='disable' table=\"public\".\"basic_map_tiled\" (rast)")
def _round_trip(uri):
decoded = md.decodeUri(uri)
self.assertEqual(decoded, md.decodeUri(md.encodeUri(decoded)))
uri = self.dbconn + \
' sslmode=disable key=\'gid\' srid=3035 table="public"."my_pg_vector" sql='
decoded = md.decodeUri(uri)
self.assertEqual(decoded, {
'key': 'gid',
'schema': 'public',
'service': 'qgis_test',
'srid': '3035',
'sslmode': QgsDataSourceUri.SslDisable,
'table': 'my_pg_vector',
})
_round_trip(uri)
uri = self.dbconn + \
' sslmode=prefer key=\'gid\' srid=3035 temporalFieldIndex=2 ' + \
'authcfg=afebeff username=\'my username\' password=\'my secret password=\' ' + \
'table="public"."my_pg_vector" (the_geom) sql="a_field" != 1223223'
_round_trip(uri)
decoded = md.decodeUri(uri)
self.assertEqual(decoded, {
'authcfg': 'afebeff',
'geometrycolumn': 'the_geom',
'key': 'gid',
'password': 'my secret password=',
'schema': 'public',
'service': 'qgis_test',
'sql': '"a_field" != 1223223',
'srid': '3035',
'sslmode': QgsDataSourceUri.SslPrefer,
'table': 'my_pg_vector',
'username': 'my username',
})
def testHasSpatialIndex(self):
for layer_name in ('hspi_table', 'hspi_materialized_view'):
columns = {'geom_without_index': QgsFeatureSource.SpatialIndexNotPresent, 'geom_with_index': QgsFeatureSource.SpatialIndexPresent}
for (geometry_column, spatial_index) in columns.items():
conn = 'service=\'qgis_test\''
if 'QGIS_PGTEST_DB' in os.environ:
conn = os.environ['QGIS_PGTEST_DB']
vl = QgsVectorLayer(
conn +
' sslmode=disable key=\'id\' srid=4326 type=\'Polygon\' table="qgis_test"."{n}" ({c}) sql='.format(n=layer_name, c=geometry_column),
'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertEqual(vl.hasSpatialIndex(), spatial_index)
class TestPyQgsPostgresProviderCompoundKey(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'"key1","key2"\' srid=4326 type=POINT table="qgis_test"."someDataCompound" (geom) sql=',
'test', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def testConstraints(self):
for key in ["key1", "key2"]:
idx = self.vl.dataProvider().fieldNameIndex(key)
self.assertTrue(idx >= 0)
self.assertFalse(self.vl.dataProvider().fieldConstraints(
idx) & QgsFieldConstraints.ConstraintUnique)
def testCompoundPkChanges(self):
""" Check if fields with compound primary keys can be changed """
vl = self.vl
self.assertTrue(vl.isValid())
idx_key1 = vl.fields().lookupField('key1')
idx_key2 = vl.fields().lookupField('key2')
# the name "pk" for this datasource is misleading;
# the primary key is actually composed by the fields key1 and key2
idx_pk = vl.fields().lookupField('pk')
idx_name = vl.fields().lookupField('name')
idx_name2 = vl.fields().lookupField('name2')
geomwkt = 'Point(-47.945 -15.812)'
# start editing ordinary attribute.
ft1 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("key1 = 2 AND key2 = 2")))
self.assertTrue(ft1.isValid())
original_geometry = ft1.geometry().asWkt()
vl.startEditing()
self.assertTrue(vl.changeAttributeValues(ft1.id(), {idx_name: 'Rose'}))
self.assertTrue(vl.commitChanges())
# check change
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression("key1 = 2 AND key2 = 2")))
self.assertEqual(ft2['name'], 'Rose')
self.assertEqual(ft2['name2'], 'Apple')
self.assertEqual(ft2['pk'], 2)
# now, start editing one of the PK field components
vl.startEditing()
self.assertTrue(vl.dataProvider().changeFeatures({ft2.id(): {idx_key2: 42, idx_name: 'Orchid', idx_name2: 'Daisy'}}, {ft2.id(): QgsGeometry.fromWkt(geomwkt)}))
self.assertTrue(vl.commitChanges())
# let's check if we still have the same fid...
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterFid(ft2.id())))
self.assertEqual(ft2['key2'], 42)
self.assertEqual(ft2['name'], 'Orchid')
self.assertEqual(ft2['name2'], 'Daisy')
self.assertTrue(vl.startEditing())
vl.changeAttributeValues(ft2.id(), {idx_key1: 21, idx_name2: 'Hibiscus'})
self.assertTrue(vl.commitChanges())
ft2 = next(vl.getFeatures(QgsFeatureRequest().setFilterFid(ft2.id())))
self.assertEqual(ft2['key1'], 21)
self.assertEqual(ft2['name2'], 'Hibiscus')
# lets get a brand new feature and check how it went...
ft3 = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 2')))
self.assertEqual(ft3['name'], 'Orchid')
self.assertEqual(ft3['key1'], 21)
self.assertEqual(ft3['key2'], 42)
assert compareWkt(ft3.geometry().asWkt(), geomwkt), "Geometry mismatch. Expected: {} Got: {}\n".format(ft3.geometry().asWkt(), geomwkt)
# Now, we leave the record as we found it, so further tests can proceed
vl.startEditing()
self.assertTrue(vl.dataProvider().changeFeatures({ft3.id(): {idx_key1: 2, idx_key2: 2, idx_pk: 2, idx_name: 'Apple', idx_name2: 'Apple'}}, {ft3.id(): QgsGeometry.fromWkt(original_geometry)}))
self.assertTrue(vl.commitChanges())
class TestPyQgsPostgresProviderBigintSinglePk(unittest.TestCase, ProviderTestCase):
@classmethod
def setUpClass(cls):
"""Run before all tests"""
cls.dbconn = 'service=qgis_test'
if 'QGIS_PGTEST_DB' in os.environ:
cls.dbconn = os.environ['QGIS_PGTEST_DB']
# Create test layers
cls.vl = QgsVectorLayer(
cls.dbconn +
' sslmode=disable key=\'"pk"\' srid=4326 type=POINT table="qgis_test"."provider_bigint_single_pk" (geom) sql=',
'bigint_pk', 'postgres')
assert cls.vl.isValid()
cls.source = cls.vl.dataProvider()
cls.con = psycopg2.connect(cls.dbconn)
@classmethod
def tearDownClass(cls):
"""Run after all tests"""
def getSource(self):
""" drops/recreates the test data anew, like TestPyQgsPostgresProvider::getSource above. """
self.execSqlCommand(
"DROP TABLE IF EXISTS qgis_test.provider_edit_bigint_single_pk")
self.execSqlCommand(
"CREATE TABLE qgis_test.provider_edit_bigint_single_pk ( pk bigserial PRIMARY KEY, cnt integer, name text DEFAULT 'qgis', name2 text DEFAULT 'qgis', num_char text, dt timestamp without time zone, \"date\" date, \"time\" time without time zone, geom public.geometry(Point,4326), key1 integer, key2 integer)")
self.execSqlCommand(
"INSERT INTO qgis_test.provider_edit_bigint_single_pk ( key1, key2, pk, cnt, name, name2, num_char, dt, \"date\", \"time\", geom) VALUES"
"(1, 1, 5, -200, NULL, 'NuLl', '5', TIMESTAMP '2020-05-04 12:13:14', '2020-05-02', '12:13:01', '0101000020E61000001D5A643BDFC751C01F85EB51B88E5340'),"
"(1, 2, 3, 300, 'Pear', 'PEaR', '3', NULL, NULL, NULL, NULL),"
"(2, 1, 1, 100, 'Orange', 'oranGe', '1', TIMESTAMP '2020-05-03 12:13:14', '2020-05-03', '12:13:14', '0101000020E61000006891ED7C3F9551C085EB51B81E955040'),"
"(2, 2, 2, 200, 'Apple', 'Apple', '2', TIMESTAMP '2020-05-04 12:14:14', '2020-05-04', '12:14:14', '0101000020E6100000CDCCCCCCCC0C51C03333333333B35140'),"
"(2, 3, 4, 400, 'Honey', 'Honey', '4', TIMESTAMP '2021-05-04 13:13:14', '2021-05-04', '13:13:14', '0101000020E610000014AE47E17A5450C03333333333935340')")
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'"pk"\' srid=4326 type=POINT table="qgis_test"."provider_edit_bigint_single_pk" (geom) sql=',
'edit_bigint_pk', 'postgres')
return vl
def getEditableLayer(self):
return self.getSource()
def execSqlCommand(self, sql):
self.assertTrue(self.con)
cur = self.con.cursor()
self.assertTrue(cur)
cur.execute(sql)
cur.close()
self.con.commit()
def enableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', True)
return True
def disableCompiler(self):
QgsSettings().setValue('/qgis/compileExpressions', False)
def uncompiledFilters(self):
return set(['"dt" = to_datetime(\'000www14ww13ww12www4ww5ww2020\',\'zzzwwwsswwmmwwhhwwwdwwMwwyyyy\')',
'"date" = to_date(\'www4ww5ww2020\',\'wwwdwwMwwyyyy\')',
'"time" = to_time(\'000www14ww13ww12www\',\'zzzwwwsswwmmwwhhwww\')'])
def partiallyCompiledFilters(self):
return set([])
def testConstraints(self):
idx = self.vl.dataProvider().fieldNameIndex("pk")
self.assertTrue(idx >= 0)
def testGetFeaturesFidTests(self):
fids = [f.id() for f in self.source.getFeatures()]
assert len(fids) == 5, 'Expected 5 features, got {} instead'.format(
len(fids))
for id in fids:
features = [f for f in self.source.getFeatures(
QgsFeatureRequest().setFilterFid(id))]
self.assertEqual(len(features), 1)
feature = features[0]
self.assertTrue(feature.isValid())
result = [feature.id()]
expected = [id]
assert result == expected, 'Expected {} and got {} when testing for feature ID filter'.format(expected,
result)
# test that results match QgsFeatureRequest.acceptFeature
request = QgsFeatureRequest().setFilterFid(id)
for f in self.source.getFeatures():
self.assertEqual(request.acceptFeature(f), f.id() == id)
# TODO: bad features are not tested because the PostgreSQL provider
# doesn't mark explicitly set invalid features as such.
def testGetFeatures(self, source=None, extra_features=[], skip_features=[], changed_attributes={},
changed_geometries={}):
""" Test that expected results are returned when fetching all features """
# IMPORTANT - we do not use `for f in source.getFeatures()` as we are also
# testing that existing attributes & geometry in f are overwritten correctly
# (for f in ... uses a new QgsFeature for every iteration)
if not source:
source = self.source
it = source.getFeatures()
f = QgsFeature()
attributes = {}
geometries = {}
while it.nextFeature(f):
# expect feature to be valid
self.assertTrue(f.isValid())
# some source test datasets will include additional attributes which we ignore,
# so cherry pick desired attributes
attrs = [f['pk'], f['cnt'], f['name'], f['name2'], f['num_char']]
# DON'T force the num_char attribute to be text - some sources (e.g., delimited text) will
# automatically detect that this attribute contains numbers and set it as a numeric
# field
# TODO: PostgreSQL 12 won't accept conversion from integer to text.
# attrs[4] = str(attrs[4])
attributes[f['pk']] = attrs
geometries[f['pk']] = f.hasGeometry() and f.geometry().asWkt()
expected_attributes = {5: [5, -200, NULL, 'NuLl', '5'],
3: [3, 300, 'Pear', 'PEaR', '3'],
1: [1, 100, 'Orange', 'oranGe', '1'],
2: [2, 200, 'Apple', 'Apple', '2'],
4: [4, 400, 'Honey', 'Honey', '4']}
expected_geometries = {1: 'Point (-70.332 66.33)',
2: 'Point (-68.2 70.8)',
3: None,
4: 'Point(-65.32 78.3)',
5: 'Point(-71.123 78.23)'}
for f in extra_features:
expected_attributes[f[0]] = f.attributes()
if f.hasGeometry():
expected_geometries[f[0]] = f.geometry().asWkt()
else:
expected_geometries[f[0]] = None
for i in skip_features:
del expected_attributes[i]
del expected_geometries[i]
for i, a in changed_attributes.items():
for attr_idx, v in a.items():
expected_attributes[i][attr_idx] = v
for i, g, in changed_geometries.items():
if g:
expected_geometries[i] = g.asWkt()
else:
expected_geometries[i] = None
self.assertEqual(attributes, expected_attributes, 'Expected {}, got {}'.format(
expected_attributes, attributes))
self.assertEqual(len(expected_geometries), len(geometries))
for pk, geom in list(expected_geometries.items()):
if geom:
assert compareWkt(geom, geometries[pk]), "Geometry {} mismatch Expected:\n{}\nGot:\n{}\n".format(pk,
geom,
geometries[
pk])
else:
self.assertFalse(
geometries[pk], 'Expected null geometry for {}'.format(pk))
def testAddFeatureExtraAttributes(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
if not l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
return
# test that adding features with too many attributes drops these attributes
# we be more tricky and also add a valid feature to stress test the provider
f1 = QgsFeature()
f1.setAttributes([6, -220, 'qgis', 'String', '15'])
f2 = QgsFeature()
f2.setAttributes([7, -230, 'qgis', 'String', '15', 15, 16, 17])
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(result,
'Provider returned False to addFeatures with extra attributes. Providers should accept these features but truncate the extra attributes.')
# make sure feature was added correctly
added = [f for f in l.dataProvider().getFeatures() if f['pk'] == 7][0]
# TODO: The PostgreSQL provider doesn't truncate extra attributes!
self.assertNotEqual(added.attributes(), [7, -230, 'qgis', 'String', '15'],
'The PostgreSQL provider doesn\'t truncate extra attributes.')
def testAddFeatureMissingAttributes(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
if not l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
return
# test that adding features with missing attributes pads out these
# attributes with NULL values to the correct length.
# changed from ProviderTestBase.testAddFeatureMissingAttributes: we use
# 'qgis' instead of NULL below.
# TODO: Only unmentioned attributes get filled with the DEFAULT table
# value; if the attribute is present, the saved value will be NULL if
# that is indicated, or the value mentioned by the user; there is no
# implicit conversion of PyQGIS::NULL to PostgreSQL DEFAULT.
f1 = QgsFeature()
f1.setAttributes([6, -220, 'qgis', 'String'])
f2 = QgsFeature()
f2.setAttributes([7, 330])
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(result,
'Provider returned False to addFeatures with missing attributes. Providers should accept these features but add NULL attributes to the end of the existing attributes to the required field length.')
f1.setId(added[0].id())
f2.setId(added[1].id())
# check result - feature attributes MUST be padded out to required number of fields
f1.setAttributes([6, -220, 'qgis', 'String', NULL])
f2.setAttributes([7, 330, 'qgis', 'qgis', NULL])
self.testGetFeatures(l.dataProvider(), [f1, f2])
def testAddFeature(self):
if not getattr(self, 'getEditableLayer', None):
return
l = self.getEditableLayer()
self.assertTrue(l.isValid())
f1 = QgsFeature()
# changed from ProviderTestBase.testAddFeature: we use 'qgis' instead
# of NULL below.
# TODO: Only unmentioned attributes get filled with the DEFAULT table
# value; if the attribute is present, the saved value will be NULL if
# that is indicated, or the value mentioned by the user; there is no
# implicit conversion of PyQGIS::NULL to PostgreSQL DEFAULT.
f1.setAttributes([6, -220, 'qgis', 'String', '15'])
f1.setGeometry(QgsGeometry.fromWkt('Point (-72.345 71.987)'))
f2 = QgsFeature()
f2.setAttributes([7, 330, 'Coconut', 'CoCoNut', '13'])
if l.dataProvider().capabilities() & QgsVectorDataProvider.AddFeatures:
# expect success
result, added = l.dataProvider().addFeatures([f1, f2])
self.assertTrue(
result, 'Provider reported AddFeatures capability, but returned False to addFeatures')
f1.setId(added[0].id())
f2.setId(added[1].id())
# check result
self.testGetFeatures(l.dataProvider(), [f1, f2])
# add empty list, should return true for consistency
self.assertTrue(l.dataProvider().addFeatures([]))
# ensure that returned features have been given the correct id
f = next(l.getFeatures(
QgsFeatureRequest().setFilterFid(added[0].id())))
self.assertTrue(f.isValid())
self.assertEqual(f['cnt'], -220)
f = next(l.getFeatures(
QgsFeatureRequest().setFilterFid(added[1].id())))
self.assertTrue(f.isValid())
self.assertEqual(f['cnt'], 330)
else:
# expect fail
self.assertFalse(l.dataProvider().addFeatures([f1, f2]),
'Provider reported no AddFeatures capability, but returned true to addFeatures')
def testModifyPk(self):
""" Check if we can modify a primary key value. Since this PK is bigint, we also exercise the mapping between fid and values """
vl = self.getEditableLayer()
self.assertTrue(vl.isValid())
geomwkt = 'Point(-47.945 -15.812)'
feature = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 4')))
self.assertTrue(feature.isValid())
self.assertTrue(vl.startEditing())
idxpk = vl.fields().lookupField('pk')
self.assertTrue(vl.dataProvider().changeFeatures({feature.id(): {idxpk: 42}}, {feature.id(): QgsGeometry.fromWkt(geomwkt)}))
self.assertTrue(vl.commitChanges())
# read back
ft = next(vl.getFeatures(QgsFeatureRequest().setFilterExpression('pk = 42')))
self.assertTrue(ft.isValid())
self.assertEqual(ft['name'], 'Honey')
assert compareWkt(ft.geometry().asWkt(), geomwkt), "Geometry mismatch. Expected: {} Got: {}\n".format(ft.geometry().asWkt(), geomwkt)
def testDuplicatedFieldNamesInQueryLayers(self):
"""Test regresssion GH #36205"""
vl = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'__rid__\' table="(SELECT row_number() OVER () AS __rid__, * FROM (SELECT * from qgis_test.some_poly_data a, qgis_test.some_poly_data b where ST_Intersects(a.geom,b.geom)) as foo)" sql=', 'test_36205', 'postgres')
self.assertTrue(vl.isValid())
self.assertEqual(vl.featureCount(), 3)
# This fails because the "geom" field and "pk" fields are ambiguous
# There is no easy fix: all duplicated fields should be explicitly aliased
# and the query internally rewritten
# feature = next(vl.getFeatures())
# self.assertTrue(vl.isValid())
def testUnrestrictedGeometryType(self):
"""Test geometry column with no explicit geometry type, regression GH #38565"""
md = QgsProviderRegistry.instance().providerMetadata("postgres")
conn = md.createConnection(self.dbconn, {})
# Cleanup if needed
try:
conn.dropVectorTable('qgis_test', 'test_unrestricted_geometry')
except QgsProviderConnectionException:
pass
conn.executeSql('''
CREATE TABLE "qgis_test"."test_unrestricted_geometry" (
gid serial primary key,
geom geometry(Geometry, 4326)
);''')
points = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=POINT table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_points', 'postgres')
lines = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=LINESTRING table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_lines', 'postgres')
polygons = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' srid=4326 type=POLYGON table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_polygons', 'postgres')
self.assertTrue(points.isValid())
self.assertTrue(lines.isValid())
self.assertTrue(polygons.isValid())
f = QgsFeature(points.fields())
f.setGeometry(QgsGeometry.fromWkt('point(9 45)'))
self.assertTrue(points.dataProvider().addFeatures([f]))
self.assertEqual(points.featureCount(), 1)
self.assertEqual(lines.featureCount(), 0)
self.assertEqual(polygons.featureCount(), 0)
# Fetch from iterator
self.assertTrue(compareWkt(next(points.getFeatures()).geometry().asWkt(), 'point(9 45)'))
with self.assertRaises(StopIteration):
next(lines.getFeatures())
with self.assertRaises(StopIteration):
next(polygons.getFeatures())
f.setGeometry(QgsGeometry.fromWkt('linestring(9 45, 10 46)'))
self.assertTrue(lines.dataProvider().addFeatures([f]))
self.assertEqual(points.featureCount(), 1)
self.assertEqual(lines.featureCount(), 1)
self.assertEqual(polygons.featureCount(), 0)
# Fetch from iterator
self.assertTrue(compareWkt(next(points.getFeatures()).geometry().asWkt(), 'point(9 45)'))
self.assertTrue(compareWkt(next(lines.getFeatures()).geometry().asWkt(), 'linestring(9 45, 10 46)'))
with self.assertRaises(StopIteration):
next(polygons.getFeatures())
# Test regression GH #38567 (no SRID requested in the data source URI)
# Cleanup if needed
conn.executeSql('DELETE FROM "qgis_test"."test_unrestricted_geometry" WHERE \'t\'')
points = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=POINT table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_points', 'postgres')
lines = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=LINESTRING table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_lines', 'postgres')
polygons = QgsVectorLayer(self.dbconn + ' sslmode=disable key=\'gid\' type=POLYGON table="qgis_test"."test_unrestricted_geometry" (geom) sql=', 'test_polygons', 'postgres')
self.assertTrue(points.isValid())
self.assertTrue(lines.isValid())
self.assertTrue(polygons.isValid())
def testTrustFlag(self):
"""Test regression https://github.com/qgis/QGIS/issues/38809"""
vl = QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'pk\' srid=4326 type=POINT table="qgis_test"."editData" (geom) sql=',
'testTrustFlag', 'postgres')
self.assertTrue(vl.isValid())
p = QgsProject.instance()
d = QTemporaryDir()
dir_path = d.path()
self.assertTrue(p.addMapLayers([vl]))
project_path = os.path.join(dir_path, 'testTrustFlag.qgs')
self.assertTrue(p.write(project_path))
del vl
p.clear()
self.assertTrue(p.read(project_path))
vl = p.mapLayersByName('testTrustFlag')[0]
self.assertTrue(vl.isValid())
self.assertFalse(p.trustLayerMetadata())
# Set the trust flag
p.setTrustLayerMetadata(True)
self.assertTrue(p.write(project_path))
# Re-read
p.clear()
self.assertTrue(p.read(project_path))
self.assertTrue(p.trustLayerMetadata())
vl = p.mapLayersByName('testTrustFlag')[0]
self.assertTrue(vl.isValid())
def testQueryLayerDuplicatedFields(self):
"""Test that duplicated fields from a query layer are returned"""
def _get_layer(sql):
return QgsVectorLayer(
self.dbconn +
' sslmode=disable key=\'__rid__\' table=\'(SELECT row_number() OVER () AS __rid__, * FROM (' + sql + ') as foo)\' sql=',
'test', 'postgres')
l = _get_layer('SELECT 1, 2')
self.assertEqual(l.fields().count(), 3)
self.assertEqual([f.name() for f in l.fields()], ['__rid__', '?column?', '?column? (2)'])
l = _get_layer('SELECT 1 as id, 2 as id')
self.assertEqual(l.fields().count(), 3)
self.assertEqual([f.name() for f in l.fields()], ['__rid__', 'id', 'id (2)'])
def testInsertOnlyFieldIsEditable(self):
"""Test issue #40922 when an INSERT only use cannot insert a new feature"""
md = QgsProviderRegistry.instance().providerMetadata("postgres")
conn = md.createConnection(self.dbconn, {})
conn.executeSql('DROP TABLE IF EXISTS public.insert_only_points')
conn.executeSql('DROP USER IF EXISTS insert_only_user')
conn.executeSql('CREATE USER insert_only_user WITH PASSWORD \'insert_only_user\'')
conn.executeSql('CREATE TABLE insert_only_points (id SERIAL PRIMARY KEY, name VARCHAR(64))')
conn.executeSql("SELECT AddGeometryColumn('public', 'insert_only_points', 'geom', 4326, 'POINT', 2 )")
conn.executeSql('GRANT SELECT ON "public"."insert_only_points" TO insert_only_user')
uri = QgsDataSourceUri(self.dbconn +
' sslmode=disable key=\'id\'srid=4326 type=POINT table="public"."insert_only_points" (geom) sql=')
uri.setUsername('insert_only_user')
uri.setPassword('insert_only_user')
vl = QgsVectorLayer(uri.uri(), 'test', 'postgres')
self.assertTrue(vl.isValid())
self.assertFalse(vl.startEditing())
feature = QgsFeature(vl.fields())
self.assertFalse(QgsVectorLayerUtils.fieldIsEditable(vl, 0, feature))
self.assertFalse(QgsVectorLayerUtils.fieldIsEditable(vl, 1, feature))
conn.executeSql('GRANT INSERT ON "public"."insert_only_points" TO insert_only_user')
vl = QgsVectorLayer(uri.uri(), 'test', 'postgres')
feature = QgsFeature(vl.fields())
self.assertTrue(vl.startEditing())
self.assertTrue(QgsVectorLayerUtils.fieldIsEditable(vl, 0, feature))
self.assertTrue(QgsVectorLayerUtils.fieldIsEditable(vl, 1, feature))
if __name__ == '__main__':
unittest.main()
| ghtmtt/QGIS | tests/src/python/test_provider_postgres.py | Python | gpl-2.0 | 138,650 |
"""
==========================
FastICA on 2D point clouds
==========================
This example illustrates visually in the feature space a comparison by
results using two different component analysis techniques.
:ref:`ICA` vs :ref:`PCA`.
Representing ICA in the feature space gives the view of 'geometric ICA':
ICA is an algorithm that finds directions in the feature space
corresponding to projections with high non-Gaussianity. These directions
need not be orthogonal in the original feature space, but they are
orthogonal in the whitened feature space, in which all directions
correspond to the same variance.
PCA, on the other hand, finds orthogonal directions in the raw feature
space that correspond to directions accounting for maximum variance.
Here we simulate independent sources using a highly non-Gaussian
process, 2 student T with a low number of degrees of freedom (top left
figure). We mix them to create observations (top right figure).
In this raw observation space, directions identified by PCA are
represented by orange vectors. We represent the signal in the PCA space,
after whitening by the variance corresponding to the PCA vectors (lower
left). Running ICA corresponds to finding a rotation in this space to
identify the directions of largest non-Gaussianity (lower right).
"""
# Authors: Alexandre Gramfort, Gael Varoquaux
# License: BSD 3 clause
import numpy as np
import matplotlib.pyplot as plt
from sklearn.decomposition import PCA, FastICA
# #############################################################################
# Generate sample data
rng = np.random.RandomState(42)
S = rng.standard_t(1.5, size=(20000, 2))
S[:, 0] *= 2.0
# Mix data
A = np.array([[1, 1], [0, 2]]) # Mixing matrix
X = np.dot(S, A.T) # Generate observations
pca = PCA()
S_pca_ = pca.fit(X).transform(X)
ica = FastICA(random_state=rng)
S_ica_ = ica.fit(X).transform(X) # Estimate the sources
S_ica_ /= S_ica_.std(axis=0)
# #############################################################################
# Plot results
def plot_samples(S, axis_list=None):
plt.scatter(
S[:, 0], S[:, 1], s=2, marker="o", zorder=10, color="steelblue", alpha=0.5
)
if axis_list is not None:
colors = ["orange", "red"]
for color, axis in zip(colors, axis_list):
axis /= axis.std()
x_axis, y_axis = axis
# Trick to get legend to work
plt.plot(0.1 * x_axis, 0.1 * y_axis, linewidth=2, color=color)
plt.quiver(
(0, 0),
(0, 0),
x_axis,
y_axis,
zorder=11,
width=0.01,
scale=6,
color=color,
)
plt.hlines(0, -3, 3)
plt.vlines(0, -3, 3)
plt.xlim(-3, 3)
plt.ylim(-3, 3)
plt.xlabel("x")
plt.ylabel("y")
plt.figure()
plt.subplot(2, 2, 1)
plot_samples(S / S.std())
plt.title("True Independent Sources")
axis_list = [pca.components_.T, ica.mixing_]
plt.subplot(2, 2, 2)
plot_samples(X / np.std(X), axis_list=axis_list)
legend = plt.legend(["PCA", "ICA"], loc="upper right")
legend.set_zorder(100)
plt.title("Observations")
plt.subplot(2, 2, 3)
plot_samples(S_pca_ / np.std(S_pca_, axis=0))
plt.title("PCA recovered signals")
plt.subplot(2, 2, 4)
plot_samples(S_ica_ / np.std(S_ica_))
plt.title("ICA recovered signals")
plt.subplots_adjust(0.09, 0.04, 0.94, 0.94, 0.26, 0.36)
plt.show()
| manhhomienbienthuy/scikit-learn | examples/decomposition/plot_ica_vs_pca.py | Python | bsd-3-clause | 3,446 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2013 Red Hat, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import collections
import functools
from nova import notifier
NOTIFICATIONS = []
def reset():
del NOTIFICATIONS[:]
FakeMessage = collections.namedtuple('Message',
['publisher_id', 'priority',
'event_type', 'payload'])
class FakeNotifier(object):
def __init__(self, publisher_id):
self.publisher_id = publisher_id
for priority in ['debug', 'info', 'warn', 'error', 'critical']:
setattr(self, priority,
functools.partial(self._notify, priority.upper()))
def prepare(self, publisher_id=None):
if publisher_id is None:
publisher_id = self.publisher_id
return self.__class__(publisher_id)
def _notify(self, priority, ctxt, event_type, payload):
msg = FakeMessage(self.publisher_id, priority, event_type, payload)
NOTIFICATIONS.append(msg)
def stub_notifier(stubs):
stubs.Set(notifier, 'Notifier', FakeNotifier)
| ntt-sic/nova | nova/tests/fake_notifier.py | Python | apache-2.0 | 1,657 |
import bot
bot.run_bot() | git-commit/iot-gatekeeper | gatekeeper/test_app.py | Python | mit | 25 |
def find_superbubble(G, G_rev, s):
queue = set([s])
labels = {}
while True:
v = queue.pop()
labels[v] = "visited"
v_children = list(G.neighbors(v))
if len(v_children) == 0:
return False, "tip"
for u in v_children:
if u == s:
return False, "cycle"
labels[u] = "seen"
u_parents = list(G_rev.neighbors(u))
if all(labels.get(parent, "") == "visited" for parent in u_parents):
queue.add(u)
if len(queue) == 1:
t = list(queue)[0]
if "seen" not in [labels[k] for k in set(labels.keys()) - set([t])]:
if not G.has_edge(s, t):
return True, t
else:
return False, "cycle"
if len(queue) == 0:
break
return False, "end"
| snurk/meta-strains | final_algo/superbubbles.py | Python | mit | 891 |
# Copyright (c) 2013-2016 Jeffrey Pfau
#
# This Source Code Form is subject to the terms of the Mozilla Public
# License, v. 2.0. If a copy of the MPL was not distributed with this
# file, You can obtain one at http://mozilla.org/MPL/2.0/.
from ._pylib import ffi, lib
from . import tile
from cached_property import cached_property
def find(path):
core = lib.mCoreFind(path.encode('UTF-8'))
if core == ffi.NULL:
return None
return Core._init(core)
def findVF(vf):
core = lib.mCoreFindVF(vf.handle)
if core == ffi.NULL:
return None
return Core._init(core)
def loadPath(path):
core = find(path)
if not core or not core.loadFile(path):
return None
return core
def loadVF(vf):
core = findVF(vf)
if not core or not core.loadROM(vf):
return None
return core
def needsReset(f):
def wrapper(self, *args, **kwargs):
if not self._wasReset:
raise RuntimeError("Core must be reset first")
return f(self, *args, **kwargs)
return wrapper
class Core(object):
if hasattr(lib, 'PLATFORM_GBA'):
PLATFORM_GBA = lib.PLATFORM_GBA
if hasattr(lib, 'PLATFORM_GB'):
PLATFORM_GB = lib.PLATFORM_GB
def __init__(self, native):
self._core = native
self._wasReset = False
@cached_property
def tiles(self):
return tile.TileView(self)
@classmethod
def _init(cls, native):
core = ffi.gc(native, native.deinit)
success = bool(core.init(core))
if not success:
raise RuntimeError("Failed to initialize core")
if hasattr(cls, 'PLATFORM_GBA') and core.platform(core) == cls.PLATFORM_GBA:
from .gba import GBA
return GBA(core)
if hasattr(cls, 'PLATFORM_GB') and core.platform(core) == cls.PLATFORM_GB:
from .gb import GB
return GB(core)
return Core(core)
def _deinit(self):
self._core.deinit(self._core)
def loadFile(self, path):
return bool(lib.mCoreLoadFile(self._core, path.encode('UTF-8')))
def isROM(self, vf):
return bool(self._core.isROM(vf.handle))
def loadROM(self, vf):
return bool(self._core.loadROM(self._core, vf.handle))
def loadSave(self, vf):
return bool(self._core.loadSave(self._core, vf.handle))
def loadTemporarySave(self, vf):
return bool(self._core.loadTemporarySave(self._core, vf.handle))
def loadPatch(self, vf):
return bool(self._core.loadPatch(self._core, vf.handle))
def autoloadSave(self):
return bool(lib.mCoreAutoloadSave(self._core))
def autoloadPatch(self):
return bool(lib.mCoreAutoloadPatch(self._core))
def platform(self):
return self._core.platform(self._core)
def desiredVideoDimensions(self):
width = ffi.new("unsigned*")
height = ffi.new("unsigned*")
self._core.desiredVideoDimensions(self._core, width, height)
return width[0], height[0]
def setVideoBuffer(self, image):
self._core.setVideoBuffer(self._core, image.buffer, image.stride)
def reset(self):
self._core.reset(self._core)
self._wasReset = True
@needsReset
def runFrame(self):
self._core.runFrame(self._core)
@needsReset
def runLoop(self):
self._core.runLoop(self._core)
@needsReset
def step(self):
self._core.step(self._core)
@staticmethod
def _keysToInt(*args, **kwargs):
keys = 0
if 'raw' in kwargs:
keys = kwargs['raw']
for key in args:
keys |= 1 << key
return keys
def setKeys(self, *args, **kwargs):
self._core.setKeys(self._core, self._keysToInt(*args, **kwargs))
def addKeys(self, *args, **kwargs):
self._core.addKeys(self._core, self._keysToInt(*args, **kwargs))
def clearKeys(self, *args, **kwargs):
self._core.clearKeys(self._core, self._keysToInt(*args, **kwargs))
@needsReset
def frameCounter(self):
return self._core.frameCounter(self._core)
def frameCycles(self):
return self._core.frameCycles(self._core)
def frequency(self):
return self._core.frequency(self._core)
def getGameTitle(self):
title = ffi.new("char[16]")
self._core.getGameTitle(self._core, title)
return ffi.string(title, 16).decode("ascii")
def getGameCode(self):
code = ffi.new("char[12]")
self._core.getGameCode(self._core, code)
return ffi.string(code, 12).decode("ascii")
| fr500/mgba | src/platform/python/mgba/core.py | Python | mpl-2.0 | 4,578 |
#!/usr/bin/env python
#=============================================================================================
# MODULE DOCSTRING
#=============================================================================================
"""
Datasets and parameter sets for testing physical property estimation.
Authors
-------
* John D. Chodera <[email protected]>
TODO
----
* Implement methods
"""
#=============================================================================================
# GLOBAL IMPORTS
#=============================================================================================
import os
import sys
import time
import copy
import numpy as np
from simtk import openmm, unit
#=============================================================================================
# TEST DATASETS
#=============================================================================================
from openforcefield import PhysicalPropertyDataset
TestDataset = PhysicalPropertyDataset()
#=============================================================================================
# TEST PARAMETER SETS
#=============================================================================================
from openforcefield import ParameterSet
TestParameterSet = ParameterSet()
| bmanubay/open-forcefield-tools | openforcefield/tests/testsystems.py | Python | mit | 1,301 |
from st2actions.runners.pythonrunner import Action
from libcloud.compute.providers import Provider as ComputeProvider
from libcloud.compute.providers import get_driver as get_compute_driver
from libcloud.storage.providers import Provider as StorageProvider
from libcloud.storage.providers import get_driver as get_storage_driver
from msrestazure.azure_active_directory import ServicePrincipalCredentials
__all__ = [
'AzureBaseComputeAction',
'AzureBaseStorageAction',
'AzureBaseResourceManagerAction'
]
class AzureBaseComputeAction(Action):
def __init__(self, config):
super(AzureBaseComputeAction, self).__init__(config=config)
config = self.config['compute']
subscription_id = config['subscription_id']
key_file = config['cert_file']
self._driver = self._get_driver(subscription_id=subscription_id,
key_file=key_file)
def _get_driver(self, subscription_id, key_file):
cls = get_compute_driver(ComputeProvider.AZURE)
driver = cls(subscription_id=subscription_id, key_file=key_file)
return driver
class AzureBaseStorageAction(Action):
def __init__(self, config):
super(AzureBaseStorageAction, self).__init__(config=config)
config = self.config['storage']
name = config['name']
access_key = config['access_key']
self._driver = self._get_driver(name=name, access_key=access_key)
def _get_driver(self, name, access_key):
cls = get_storage_driver(StorageProvider.AZURE_BLOBS)
driver = cls(key=name, secret=access_key)
return driver
class AzureBaseResourceManagerAction(Action):
def __init__(self, config):
super(AzureBaseResourceManagerAction, self).__init__(config=config)
resource_config = self.config['resource_manager']
self.credentials = ServicePrincipalCredentials(
client_id=resource_config['client_id'],
secret=resource_config['secret'],
tenant=resource_config['tenant']
)
| armab/st2contrib | packs/azure/actions/lib/base.py | Python | apache-2.0 | 2,059 |
import argparse
import os
import numpy as np
from multiprocessing import Process, Queue
from Queue import Empty
import ioutils
from vecanalysis.representations.explicit import Explicit
from statutils.fastfreqdist import CachedFreqDist
SAMPLE_MAX = 1e9
def worker(proc_num, queue, out_dir, in_dir, count_dir, valid_words, num_words, min_count, sample=1e-5):
while True:
try:
year = queue.get(block=False)
except Empty:
break
print proc_num, "Getting counts and matrix year", year
embed = Explicit.load(in_dir + str(year) + ".bin", normalize=False)
year_words = valid_words[year][:num_words]
count_words = set(ioutils.words_above_count(count_dir, year, min_count))
freq = CachedFreqDist(ioutils.load_pickle(count_dir + str(year) + "-counts.pkl"))
use_words = list(count_words.intersection(year_words))
embed = embed.get_subembed(use_words, restrict_context=True)
sample_corr = min(SAMPLE_MAX / freq.N(), 1.0)
print "Sample correction..", sample_corr
embed.m = embed.m * sample_corr
mat = embed.m.tocoo()
print proc_num, "Outputing pairs for year", year
with open(out_dir + str(year) + ".tmp.txt", "w") as fp:
for i in xrange(len(mat.data)):
if i % 10000 == 0:
print "Done ", i, "of", len(mat.data)
word = embed.iw[mat.row[i]]
context = embed.ic[mat.col[i]]
if sample != 0:
prop_keep = min(np.sqrt(sample / freq.freq(word)), 1.0)
prop_keep *= min(np.sqrt(sample / freq.freq(context)), 1.0)
else:
prop_keep = 1.0
word = word.encode("utf-8")
context = context.encode("utf-8")
line = word + " " + context + "\n"
for j in xrange(int(mat.data[i] * prop_keep)):
fp.write(line)
mat = mat.tocsr()
print proc_num, "Outputing vocab for year", year
with open(out_dir + str(year) + ".vocab", "w") as fp:
for word in year_words:
if not word in count_words:
print >>fp, word.encode("utf-8"), 1
else:
print >>fp, word.encode("utf-8"), int(mat[embed.wi[word], :].sum())
print "shuf " + out_dir + str(year) + ".tmp.txt" " > " + out_dir + str(year) + ".txt"
os.system("shuf " + out_dir + str(year) + ".tmp.txt" + " > " + out_dir + str(year) + ".txt")
os.remove(out_dir + str(year) + ".tmp.txt")
def run_parallel(num_procs, out_dir, in_dir, count_dir, years, words, num_words, min_count, sample):
queue = Queue()
for year in years:
queue.put(year)
procs = [Process(target=worker, args=[i, queue, out_dir, in_dir, count_dir, words, num_words, min_count, sample]) for i in range(num_procs)]
for p in procs:
p.start()
for p in procs:
p.join()
if __name__ == '__main__':
parser = argparse.ArgumentParser(description="Computes various frequency statistics.")
parser.add_argument("out_dir")
parser.add_argument("in_dir")
parser.add_argument("count_dir")
parser.add_argument("word_file")
parser.add_argument("--workers", type=int, default=10)
parser.add_argument("--num-words", type=int, default=None)
parser.add_argument("--start-year", type=int, help="start year (inclusive)", default=1800)
parser.add_argument("--end-year", type=int, help="end year (inclusive)", default=2000)
parser.add_argument("--year-inc", type=int, help="end year (inclusive)", default=1)
parser.add_argument("--min-count", type=int, default=100)
parser.add_argument("--sample", type=float, default=1e-5)
args = parser.parse_args()
years = range(args.start_year, args.end_year + 1, args.year_inc)
words = ioutils.load_year_words(args.word_file, years)
ioutils.mkdir(args.out_dir)
run_parallel(args.workers, args.out_dir + "/", args.in_dir + "/", args.count_dir + "/", years, words, args.num_words, args.min_count, args.sample)
| williamleif/histwords | sgns/makecorpus.py | Python | apache-2.0 | 4,138 |
from mot.lib.cl_function import SimpleCLFunctionParameter
from .parameter_functions.numdiff_info import SimpleNumDiffInfo
from .parameter_functions.priors import UniformWithinBoundsPrior
from .parameter_functions.transformations import IdentityTransform
import numpy as np
__author__ = 'Robbert Harms'
__date__ = "2016-10-03"
__maintainer__ = "Robbert Harms"
__email__ = "[email protected]"
class InputDataParameter(SimpleCLFunctionParameter):
def __init__(self, declaration, value):
"""These parameters signal are meant to be contain data loaded from the input data object.
In contrast to free parameters which are being optimized (or fixed to values), these parameters are
meant to be loaded from the input data. They can contain scalars, vectors or matrices with values
to use for each problem instance and each data point.
Args:
declaration (str): the declaration of this parameter. For example ``global int foo``.
value (double or ndarray): The value used if no value is given in the input data.
"""
super().__init__(declaration)
self.value = value
class ProtocolParameter(InputDataParameter):
def __init__(self, declaration, value=None):
"""Caries data per observation.
Values for this parameter type are typically loaded from the input data. A default can be provided in the case
that there is no suitable value in the input data.
Args:
declaration (str): the declaration of this parameter. For example ``global int foo``.
value (None or float or ndarray): The value used if no value is given in the input data.
"""
super().__init__(declaration, value=value)
class FreeParameter(SimpleCLFunctionParameter):
def __init__(self, declaration, fixed, value, lower_bound, upper_bound,
parameter_transform=None, sampling_proposal_std=None,
sampling_prior=None, numdiff_info=None):
"""This are the kind of parameters that are generally meant to be optimized.
Args:
declaration (str): the declaration of this parameter. For example ``global int foo``.
fixed (boolean): If this parameter is fixed to the value given
value (double or ndarray): A single value for all problems or a list of values for each problem.
lower_bound (double): The lower bound of this parameter
upper_bound (double): The upper bound of this parameter
parameter_transform (mdt.model_building.parameter_functions.transformations.AbstractTransformation):
The parameter transformation function
sampling_proposal_std (float): The proposal standard deviation, used in some MCMC sample routines
sampling_prior (mdt.model_building.parameter_functions.priors.ParameterPrior): The prior function for
use in model sample
numdiff_info (mdt.model_building.parameter_functions.numdiff_info.NumDiffInfo): the information
for taking the numerical derivative with respect to this parameter.
"""
super().__init__(declaration)
self._value = value
self._lower_bound = lower_bound
self._upper_bound = upper_bound
self._fixed = fixed
self._parameter_transform = parameter_transform or IdentityTransform()
self._sampling_proposal_std = sampling_proposal_std or 1
self._sampling_prior = sampling_prior or UniformWithinBoundsPrior()
self._numdiff_info = numdiff_info or SimpleNumDiffInfo()
@property
def value(self):
return self._value
@property
def lower_bound(self):
return self._lower_bound
@property
def upper_bound(self):
return self._upper_bound
@property
def fixed(self):
return self._fixed
@property
def parameter_transform(self):
"""Get the parameter transformation function used during optimization.
Returns:
mdt.model_building.parameter_functions.transformations.AbstractTransformation: the transformation method
"""
return self._parameter_transform
@property
def sampling_proposal_std(self):
"""Get the initial proposal standard deviation for this parameter.
Returns:
float: the initial default proposal standard deviation for use in MCMC sampling
"""
return self._sampling_proposal_std
@property
def sampling_prior(self):
"""Get the prior for this parameter, this is used in MCMC sampling.
Returns:
mdt.model_building.parameter_functions.priors.ParameterPrior: the prior for this parameter
"""
return self._sampling_prior
@property
def numdiff_info(self):
"""Specifies how to numerically differentiate this parameter.
Returns:
mdt.model_building.parameter_functions.numdiff_info.NumDiffInfo: the numerical differentiation information
"""
return self._numdiff_info
class SphericalCoordinateParameter(FreeParameter):
"""Template base class for spherical coordinate parameters.
These are meant to be inherited by the polar angle class and the azimuth angle class.
"""
class PolarAngleParameter(SphericalCoordinateParameter):
"""Polar angle for use in spherical coordinate systems.
If a compartment uses both a :class:`PolarAngleParameter` and :class:`AzimuthAngleParameter`,
the composite model will ensure that the resulting cartesian coordinates are within the right spherical hemisphere.
This is possible since diffusion is symmetric.
In the background, we limit both the the polar angle and the azimuth angle between [0, pi] parameter
between [0, pi] by projecting any other angle combination onto the right spherical hemisphere.
"""
class AzimuthAngleParameter(SphericalCoordinateParameter):
"""Azimuth angle for use in spherical coordinate systems.
If a compartment uses both a :class:`PolarAngleParameter` and :class:`AzimuthAngleParameter`,
the composite model will ensure that the resulting cartesian coordinates are within the right spherical hemisphere.
This is possible since diffusion is symmetric.
In the background, we limit both the the polar angle and the azimuth angle between [0, pi] parameter
between [0, pi] by projecting any other angle combination onto the right spherical hemisphere.
"""
class RotationalAngleParameter(FreeParameter):
def __init__(self, *args, modulus=np.pi, **kwargs):
"""Base class for parameters for which we want to enforce a modulus range.
Parameters of this type are essentially unbounded, but their range is restricted to [0, modulus] using a modulo
transformation. The modulus can be provided as an argument. This parameter class is recognized by the
composite model which adds the necessary functions to the optimization and sampling routines.
"""
super().__init__(*args, **kwargs)
self.modulus = modulus
class NoiseStdFreeParameter(FreeParameter):
"""Specifies that this parameter should be set to the current noise standard deviation estimate.
Parameters of this type are only meant to be used in the likelihood functions. They indicate the
parameter to use for the initialization of the noise standard deviation.
"""
class NoiseStdInputParameter(SimpleCLFunctionParameter):
def __init__(self, name='noise_std'):
"""Parameter indicating that this parameter should be fixed to the current value of the noise std.
Parameters of this type are meant to be used in compartment models specifying that we should use the
noise standard deviation value from the likelihood function as input to this function.
"""
super().__init__('double ' + name)
class LibraryParameter(SimpleCLFunctionParameter):
"""Parameters of this type are used inside library functions. They are not meant to be used in Model functions.
"""
class CurrentObservationParam(SimpleCLFunctionParameter):
def __init__(self, name='observation'):
"""This parameter indicates that the model should inject the current observation value in the model.
Sometimes during model linearization or other mathematical operations the current observation appears on
both sides of the optimization equation. That is, it sometimes happens you want to use the current observation
to model that same observation. This parameter is a signal to the model builder to inject the current
observation.
You can use this parameter by adding it to your model and then use the current name in your model equation.
"""
super().__init__('float ' + name)
class AllObservationsParam(SimpleCLFunctionParameter):
def __init__(self, name='observations'):
"""This parameter indicates that the model should inject the observations array into the model.
Some models need to convolve a signal over part of the observations array. To do so, this parameter can be used.
You can use this parameter by adding it to your model and then use the current name in your model equation.
"""
super().__init__('global float *' + name)
class ObservationIndexParam(SimpleCLFunctionParameter):
def __init__(self, name='observation_ind'):
"""Injects the current observation index into the compartment model.
Some models need to convolve a signal over part of the observations array. To do so, they typically require
all the observations (see :class:`AllObservationsParam`), the current observation index and the total number
of observations (see :class:`NmrObservationsParam`).
You can use this parameter by adding it to your model and then use the current name in your model equation.
"""
super().__init__('uint ' + name)
class NmrObservationsParam(SimpleCLFunctionParameter):
def __init__(self, name='nmr_observations'):
"""Injects the total number of observation into the compartment model.
Some models need to convolve a signal over part of the observations array. To do so, they typically require
all the observations (see :class:`AllObservationsParam`), the current observation index
(see :class:`ObservationIndexParam`) and the total number of observations.
You can use this parameter by adding it to your model and then use the current name in your model equation.
"""
super().__init__('uint ' + name)
class CurrentModelSignalParam(SimpleCLFunctionParameter):
def __init__(self, name='model_signal'):
"""This parameter indicates that the model should inject here the current signal value of the model.
Parameters of this type can only be used by the signal noise and the likelihood functions.
"""
super().__init__('double ' + name)
class DataCacheParameter(SimpleCLFunctionParameter):
def __init__(self, compartment_name, name):
"""This class provides a subclass for checking instance types.
Args:
compartment_name (str): the name of the compartment holding this parameter.
This parameter will make sure it gets named to the correct caching struct type.
name (str): the name of this parameter in the function
"""
super().__init__('{}_cache* {}'.format(compartment_name, name))
| cbclab/MDT | mdt/model_building/parameters.py | Python | lgpl-3.0 | 11,543 |
# Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests for tensorflow.ops.gradients."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import warnings
import numpy as np
import tensorflow as tf
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import function
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_ops
from tensorflow.python.framework import test_util
from tensorflow.python.framework.constant_op import constant
from tensorflow.python.ops import array_grad # pylint: disable=unused-import
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import data_flow_grad # pylint: disable=unused-import
from tensorflow.python.ops import data_flow_ops # pylint: disable=unused-import
from tensorflow.python.ops import gradients
from tensorflow.python.ops import gradients_impl
from tensorflow.python.ops import math_grad # pylint: disable=unused-import
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import state_grad # pylint: disable=unused-import
from tensorflow.python.ops import functional_ops # pylint: disable=unused-import
from tensorflow.python.ops.nn_ops import bias_add
from tensorflow.python.platform import googletest
def _OpsBetween(graph, to_ops, from_ops):
"""Build the list of operations between two lists of Operations.
Args:
graph: a Graph.
to_ops: list of Operations.
from_ops: list of Operations.
Returns:
The list of operations between "from_ops" and "to_ops", sorted by
decreasing operation id. This list contains all elements of to_ops.
TODO(touts): Think about returning an empty list if from_ops are not
reachable from to_ops. Presently it returns to_ops in that case.
"""
# List of booleans, indexed by operation id, indicating if
# an op is reached from the output of "input_ops".
reached_ops = [False] * (graph._last_id + 1)
# We only care to reach up to "output_ops" so we mark the
# output ops as reached to avoid recursing past them.
for op in to_ops:
reached_ops[op._id] = True
gradients_impl._MarkReachedOps(from_ops, reached_ops)
between_ops = gradients_impl._GatherInputs(to_ops, reached_ops)
between_ops.sort(key=lambda x: -x._id)
return between_ops
class GradientsTest(test_util.TensorFlowTestCase):
def _OpNames(self, op_list):
return ["%s/%d" % (str(op.name), op._id) for op in op_list]
def _assertOpListEqual(self, ops1, ops2):
self.assertEquals(self._OpNames(ops1), self._OpNames(ops2))
def testOpsBetweenSimple(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.pack([t1, t2])
# Full graph
self._assertOpListEqual([t3.op, t2.op, t1.op],
_OpsBetween(g, [t3.op], [t1.op, t2.op]))
# Only t1, t3.
self._assertOpListEqual([t3.op, t1.op],
_OpsBetween(g, [t3.op], [t1.op]))
def testOpsBetweenUnreachable(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
_ = array_ops.pack([t1, t2])
t4 = constant(1.0)
t5 = constant(2.0)
t6 = array_ops.pack([t4, t5])
# Elements of to_ops are always listed.
self._assertOpListEqual([t6.op], _OpsBetween(g, [t6.op], [t1.op]))
def testOpsBetweenCut(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.pack([t1, t2])
t4 = constant([1.0])
t5 = array_ops.concat(0, [t4, t3])
t6 = constant([2.0])
t7 = array_ops.concat(0, [t5, t6])
self._assertOpListEqual([t7.op, t5.op, t4.op],
_OpsBetween(g, [t7.op], [t4.op]))
def testOpsBetweenCycle(self):
with ops.Graph().as_default() as g:
t1 = constant(1.0)
t2 = constant(2.0)
t3 = array_ops.pack([t1, t2])
t4 = array_ops.concat(0, [t3, t3, t3])
t5 = constant([1.0])
t6 = array_ops.concat(0, [t4, t5])
t7 = array_ops.concat(0, [t6, t3])
self._assertOpListEqual([t6.op, t4.op, t3.op],
_OpsBetween(g, [t6.op], [t3.op]))
self._assertOpListEqual([t7.op, t6.op, t5.op, t4.op, t3.op, t1.op],
_OpsBetween(g, [t7.op], [t1.op, t5.op]))
self._assertOpListEqual([t6.op, t5.op, t4.op, t3.op, t2.op],
_OpsBetween(g, [t6.op], [t2.op, t5.op]))
def testGradients(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[32, 100], name="in")
w = constant(1.0, shape=[100, 10], name="w")
b = constant(1.0, shape=[10], name="b")
xw = math_ops.matmul(inp, w, name="xw")
h = bias_add(xw, b, name="h")
w_grad = gradients.gradients(h, w)[0]
self.assertEquals("MatMul", w_grad.op.type)
self.assertEquals(w_grad.op._original_op, xw.op)
self.assertTrue(w_grad.op.get_attr("transpose_a"))
self.assertFalse(w_grad.op.get_attr("transpose_b"))
def testUnusedOutput(self):
with ops.Graph().as_default():
w = constant(1.0, shape=[2, 2])
x = constant(1.0, shape=[2, 2])
wx = math_ops.matmul(w, x)
split_wx = array_ops.split(0, 2, wx)
c = math_ops.reduce_sum(split_wx[1])
gw = gradients.gradients(c, [w])[0]
self.assertEquals("MatMul", gw.op.type)
def testColocateGradients(self):
with ops.Graph().as_default() as g:
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
with g.device("/gpu:0"):
wx = math_ops.matmul(w, x)
gw = gradients.gradients(wx, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw.op.colocation_groups(), wx.op.colocation_groups())
def testColocateGradientsWithAggregation(self):
with ops.Graph().as_default() as g:
with g.device("/gpu:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
wx = math_ops.matmul(w, x)
wy = math_ops.matmul(w, y)
with g.device("/gpu:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), wx.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(wx.op.colocation_groups() != gw2.op.colocation_groups())
def testColocateGradientsWithAggregationInMultipleDevices(self):
with ops.Graph().as_default() as g:
with g.device("/gpu:1"):
w = constant(1.0, shape=[1, 1])
x = constant(1.0, shape=[1, 2])
y = constant(1.0, shape=[1, 2])
with g.device("/task:1"):
wx = math_ops.matmul(w, x)
with g.device("/task:2"):
wy = math_ops.matmul(w, y)
with g.device("/gpu:0"):
z = wx + wy
gw1 = gradients.gradients(z, [w], colocate_gradients_with_ops=True)[0]
self.assertEqual(gw1.op.colocation_groups(), w.op.colocation_groups())
gw2 = gradients.gradients(z, [w], colocate_gradients_with_ops=False)[0]
self.assertTrue(w.op.colocation_groups() != gw2.op.colocation_groups())
def testBoundaryStop(self):
# Test that we don't differentiate 'x'. The gradient function for 'x' is
# set explicitly to None so we will get an exception if the gradient code
# tries to differentiate 'x'.
with ops.Graph().as_default() as g:
c = constant(1.0)
x = array_ops.identity(c)
y = x + 1.0
z = y + 1
grads = gradients.gradients(z, [x])
self.assertTrue(all(x is not None for x in grads))
def testBoundaryContinue(self):
# Test that we differentiate both 'x' and 'y' correctly when x is a
# predecessor of y.
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y * 3.0
grads = gradients.gradients(z, [x, y])
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(6.0, grads[0].eval())
def testAggregationMethodAccumulateN(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z,
[x, y],
aggregation_method=
gradients.AggregationMethod.EXPERIMENTAL_ACCUMULATE_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testAggregationMethodAddN(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z,
[x, y],
aggregation_method=gradients.AggregationMethod.ADD_N)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testAggregationMethodTree(self):
with self.test_session():
x = constant(1.0)
y = x * 2.0
z = y + y + y + y + y + y + y + y + y + y
grads = gradients.gradients(
z,
[x, y],
aggregation_method=gradients.AggregationMethod.EXPERIMENTAL_TREE)
self.assertTrue(all(x is not None for x in grads))
self.assertEqual(20.0, grads[0].eval())
self.assertEqual(10.0, grads[1].eval())
def testNoGradientForStringOutputs(self):
with ops.Graph().as_default():
def _TestOpGrad(_, float_grad, string_grad):
"""Gradient function for TestStringOutput."""
self.assertEquals(float_grad.dtype, dtypes.float32)
self.assertFalse(string_grad)
return float_grad
ops.RegisterGradient("TestStringOutput")(_TestOpGrad)
c = constant(1.0)
x, _ = test_ops.test_string_output(c)
z = x * 2.0
w = z * 3.0
grads = gradients.gradients(z, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
grads = gradients.gradients(w, [c])
self.assertTrue(isinstance(grads[0], ops.Tensor))
def testSingletonIndexedSlices(self):
with ops.Graph().as_default():
x = tf.placeholder(tf.float32)
y = tf.identity(x)
dy = tf.IndexedSlices(tf.placeholder(tf.float32),
tf.placeholder(tf.int32))
dx, = gradients.gradients(y, x, grad_ys=dy)
# The gradient of tf.identity should pass the value through unchanged.
# A previous version of the code did this only for tf.Tensor, not
# tf.IndexedSlices.
self.assertEqual(dx, dy)
class FunctionGradientsTest(test_util.TensorFlowTestCase):
@classmethod
def XSquarePlusB(cls, x, b):
return x * x + b
@classmethod
def XSquarePlusBGradient(cls, x, b, g):
# Perturb gradients (multiply by 2), so we can test that this was called.
g *= 2.0
return g * 2.0 * x, g
@classmethod
def _PythonGradient(cls, op, grad):
# Perturb gradients (multiply by 3), so we can test that this was called.
grad *= 3.0
return grad * op.inputs[0] * 2.0, grad
@classmethod
def _GetFunc(cls, **kwargs):
return function.Defun(tf.float32, tf.float32, **kwargs)(
cls.XSquarePlusB)
def _GetFuncGradients(self, f, x_value, b_value):
x = tf.constant(x_value, name="x")
b = tf.constant(b_value, name="b")
y = f(x, b)
grads = gradients.gradients(y, [x, b])
with self.test_session() as sess:
return sess.run(grads)
def testFunctionGradientsBasic(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc()
# Get gradients (should add SymbolicGradient node for function).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0], grads[0])
self.assertAllEqual([1.0], grads[1])
def testFunctionGradientsComposition(self):
with ops.Graph().as_default():
f = self._GetFunc()
x = tf.constant([2.0], name="x")
b1 = tf.constant([1.0], name="b1")
b2 = tf.constant([1.0], name="b2")
y = f(f(x, b1), b2)
# Build gradient graph (should add SymbolicGradient node for function).
grads = gradients.gradients(y, [x, b1])
with self.test_session() as sess:
self.assertAllEqual([40.0], sess.run(grads)[0])
self.assertAllEqual([10.0], sess.run(grads)[1])
def testFunctionGradientsWithGradFunc(self):
g = ops.Graph()
with g.as_default():
grad_func = function.Defun(tf.float32, tf.float32, tf.float32)(
self.XSquarePlusBGradient)
f = self._GetFunc(grad_func=grad_func)
# Get gradients (should add SymbolicGradient node for function, which
# uses the grad_func above, which multiplies all gradients by 2).
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 2], grads[0])
self.assertAllEqual([1.0 * 2], grads[1])
def testFunctionGradientWithRegistration(self):
g = ops.Graph()
with g.as_default():
f = self._GetFunc(python_grad_func=self._PythonGradient)
# Get gradients, using the python gradient function. It multiplies the
# gradients by 3.
grads = self._GetFuncGradients(f, [2.0], [1.0])
self.assertAllEqual([4.0 * 3], grads[0])
self.assertAllEqual([1.0 * 3], grads[1])
def testFunctionGradientWithGradFuncAndRegistration(self):
g = ops.Graph()
with g.as_default():
grad_func = function.Defun(tf.float32, tf.float32, tf.float32)(
self.XSquarePlusBGradient)
with self.assertRaisesRegexp(ValueError, "Gradient defined twice"):
f = self._GetFunc(grad_func=grad_func,
python_grad_func=self._PythonGradient)
f.add_to_graph(tf.Graph())
class StopGradientTest(test_util.TensorFlowTestCase):
def testStopGradient(self):
with ops.Graph().as_default():
inp = constant(1.0, shape=[100, 32], name="in")
out = array_ops.stop_gradient(inp)
igrad = gradients.gradients(out, inp)[0]
assert igrad is None
class HessianVectorProductTest(test_util.TensorFlowTestCase):
def testHessianVectorProduct(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that HessianVectorProduct matches multiplication by the
# explicit Hessian.
# Specifically, the Hessian of f(x) = x^T A x is
# H = A + A^T.
# We expect HessianVectorProduct(f(x), x, v) to be H v.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
v_value = rng.randn(m, 1).astype("float32")
x_value = rng.randn(m, 1).astype("float32")
hess_value = mat_value + mat_value.T
hess_v_value = np.dot(hess_value, v_value)
for use_gpu in [False, True]:
with self.test_session(use_gpu=use_gpu):
mat = constant_op.constant(mat_value)
v = constant_op.constant(v_value)
x = constant_op.constant(x_value)
mat_x = math_ops.matmul(mat, x, name="Ax")
x_mat_x = math_ops.matmul(array_ops.transpose(x), mat_x, name="xAx")
hess_v = gradients_impl._hessian_vector_product(x_mat_x, [x], [v])[0]
hess_v_actual = hess_v.eval()
self.assertAllClose(hess_v_value, hess_v_actual)
class HessianTest(test_util.TensorFlowTestCase):
def testHessian1D(self):
# Manually compute the Hessian explicitly for a low-dimensional problem
# and check that `hessian` matches. Specifically, the Hessian of
# f(x) = x^T A x is H = A + A^T.
m = 4
rng = np.random.RandomState([1, 2, 3])
mat_value = rng.randn(m, m).astype("float32")
x_value = rng.randn(m).astype("float32")
hess_value = mat_value + mat_value.T
with self.test_session(use_gpu=True):
mat = constant_op.constant(mat_value)
x = constant_op.constant(x_value)
x_mat_x = math_ops.reduce_sum(x[:, None] * mat * x[None, :])
hess = gradients.hessians(x_mat_x, x)[0]
hess_actual = hess.eval()
self.assertAllClose(hess_value, hess_actual)
def testHessian1D_multi(self):
# Test the computation of the hessian with respect to multiple tensors
m = 4
n = 3
rng = np.random.RandomState([1, 2, 3])
mat_values = [rng.randn(m, m).astype("float32") for _ in range(n)]
x_values = [rng.randn(m).astype("float32") for _ in range(n)]
hess_values = [mat_value + mat_value.T for mat_value in mat_values]
with self.test_session(use_gpu=True):
mats = [constant_op.constant(mat_value) for mat_value in mat_values]
xs = [constant_op.constant(x_value) for x_value in x_values]
xs_mats_xs = [math_ops.reduce_sum(x[:, None] * mat * x[None, :])
for x, mat in zip(xs, mats)]
hessians = gradients.hessians(xs_mats_xs, xs)
hessians_actual = [hess.eval() for hess in hessians]
for hess_value, hess_actual in zip(hess_values, hessians_actual):
self.assertAllClose(hess_value, hess_actual)
def testHessianInvalidDimension(self):
for shape in [(10, 10), None]:
with self.test_session(use_gpu=True):
x = array_ops.placeholder(tf.float32, shape)
# Expect a ValueError because the dimensions are wrong
with self.assertRaises(ValueError):
gradients.hessians(x, x)
class IndexedSlicesToTensorTest(test_util.TensorFlowTestCase):
def testIndexedSlicesToTensor(self):
with self.test_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.mul(c_sparse, 1.0)
self.assertAllClose(np_val, c_dense.eval())
def testIndexedSlicesToTensorList(self):
with self.test_session():
numpy_list = []
dense_list = []
sparse_list = []
for _ in range(3):
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
numpy_list.append(np_val)
dense_list.append(c)
sparse_list.append(c_sparse)
packed_dense = array_ops.pack(dense_list)
packed_sparse = array_ops.pack(sparse_list)
self.assertAllClose(packed_dense.eval(), packed_sparse.eval())
def testInt64Indices(self):
with self.test_session():
np_val = np.random.rand(4, 4, 4, 4).astype(np.float32)
c = constant_op.constant(np_val)
c_sparse = math_ops._as_indexed_slices(c)
c_sparse = ops.IndexedSlices(
c_sparse.values, math_ops.cast(c_sparse.indices, dtypes.int64),
c_sparse.dense_shape)
self.assertAllEqual(np_val.shape, c_sparse.dense_shape.eval())
c_dense = math_ops.mul(c_sparse, 1.0)
self.assertAllClose(np_val, c_dense.eval())
def testWarnings(self):
# Smaller than the threshold: no warning.
c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
constant([4, 4, 4, 4]))
with warnings.catch_warnings(record=True) as w:
math_ops.mul(c_sparse, 1.0)
self.assertEqual(0, len(w))
# Greater than or equal to the threshold: warning.
c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
constant([100, 100, 100, 100]))
with warnings.catch_warnings(record=True) as w:
math_ops.mul(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"with 100000000 elements. This may consume a large amount of memory."
in str(w[0].message))
# Unknown dense shape: warning.
c_sparse = ops.IndexedSlices(array_ops.placeholder(dtypes.float32),
array_ops.placeholder(dtypes.int32),
array_ops.placeholder(dtypes.int32))
with warnings.catch_warnings(record=True) as w:
math_ops.mul(c_sparse, 1.0)
self.assertEqual(1, len(w))
self.assertTrue(
"of unknown shape. This may consume a large amount of memory."
in str(w[0].message))
if __name__ == "__main__":
googletest.main()
| laosiaudi/tensorflow | tensorflow/python/ops/gradients_test.py | Python | apache-2.0 | 20,925 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
class SkipNode(object):
__slots__ = ('key', 'payload', 'next', 'level')
def __init__(self, key, level=1, payload=None):
self.key = key
self.payload = payload
self.next = [None] * level
pass
def __str__(self):
return "SkipNode(%s)" % self.key
| hongta/practice-python | data_structures/skip_list/skip_node.py | Python | mit | 344 |
# Copyright 2011 OpenStack Foundation.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from oslo.config import cfg
from nova.openstack.common import context as req_context
from nova.openstack.common.gettextutils import _
from nova.openstack.common import log as logging
from nova.openstack.common import rpc
LOG = logging.getLogger(__name__)
notification_topic_opt = cfg.ListOpt(
'notification_topics', default=['notifications', ],
help='AMQP topic used for openstack notifications')
CONF = cfg.CONF
CONF.register_opt(notification_topic_opt)
def notify(context, message):
"""Sends a notification via RPC"""
if not context:
context = req_context.get_admin_context()
priority = message.get('priority',
CONF.default_notification_level)
priority = priority.lower()
for topic in CONF.notification_topics:
topic = '%s.%s' % (topic, priority)
try:
rpc.notify(context, topic, message)
except Exception:
LOG.exception(_("Could not send notification to %(topic)s. "
"Payload=%(message)s"), locals())
| zestrada/nova-cs498cc | nova/openstack/common/notifier/rpc_notifier.py | Python | apache-2.0 | 1,685 |
"""
Project Euler functions
"""
def sumcd(num):
"""
sum of consecutive digits function returns the sum of all numbers through num
ex: 5 would be 15 (1 + 2 + 3 + 4 + 5)
param: num: integer that we will return the sum of
return: sum of the consecutive digits
"""
return num * (num + 1) // 2
def sumpf(num):
"""
sum of the positive factors
:param num: an integer greater than 1
:return: sum of all the positive factors of num (excluding num itself)
"""
results = set()
results.add(1)
for i in range(2, int(num**0.5)+1):
if not num % i:
results.add(i)
results.add(num//i)
return sum(results)
def prime_sieve(n):
sieve = [True] * (n//2)
for i in range(3, int(n**0.5)+1, 2):
if sieve[i//2]:
sieve[i*i//2::i] = [False] * ((n-i*i-1)//(2*i)+1)
return [2] + [2*i+1 for i in range(1, n//2) if sieve[i]]
| GHMusicalCoder/project_euler | Euler_funcs.py | Python | apache-2.0 | 912 |
from datetime import datetime, timedelta, date, time
import random
from django.test import TestCase as DjangoTestCase
from django.test.client import RequestFactory
from ..models import Event
def hours_ago(num):
return datetime.now() - timedelta(hours=num)
def hours_ahead(num):
return datetime.now() + timedelta(hours=num)
def start_of_day():
return datetime.combine(date.today(), time())
def end_of_day():
return datetime.combine(date.today(), time(23, 59))
def generate_random_event(start_date, end_date):
slug = 'random-slug-%s' % random.randint(100, 1000)
title = 'Random title %s' % random.randint(100, 1000)
location = 'Random lugar %s' % random.randint(100, 1000)
pub_date = datetime.now()
pub_status = 'P'
return Event.objects.create(slug=slug, title=title, start_date=start_date,
end_date=end_date, location=location, pub_date=pub_date,
pub_status=pub_status)
class TestCase(DjangoTestCase):
def setUp(self):
self.factory = RequestFactory()
def assertInContext(self, var_name, other, template_or_context):
# TODO: support passing in a straight "context" (i.e., dict)
context = template_or_context.context_data
self.assertTrue(var_name in context,
msg="`%s` not in provided context" % var_name)
self.assertEqual(context[var_name], other)
| armstrong/armstrong.apps.events | armstrong/apps/events/tests/_utils.py | Python | apache-2.0 | 1,387 |
from django.core import validators
from django.forms import CharField, ChoiceField, MultiValueField
from django.utils.translation import ugettext_lazy as _
from django.core.exceptions import ValidationError
from proposal.widgets import TimeDurationWidget, TimeDurationHiddenWidget
TIME_DURATION_CHOICES = (
('hours', _(u'Hours')),
('minutes', _(u'Minutes')),
)
class TimeDurationField(MultiValueField):
widget = TimeDurationWidget(choices=TIME_DURATION_CHOICES)
hidden_widget = TimeDurationHiddenWidget
default_error_messages = {
'required': _(u'Enter a duration value.'),
}
def __init__(self, *args, **kwargs):
errors = self.default_error_messages.copy()
if 'error_messages' in kwargs:
errors.update(kwargs['error_messages'])
fields = (CharField(error_messages={'required': errors['required']}),
ChoiceField(choices=TIME_DURATION_CHOICES))
super(TimeDurationField, self).__init__(fields, *args, **kwargs)
def compress(self, data_list):
if data_list:
if data_list[0] in validators.EMPTY_VALUES:
raise ValidationError(self.error_messages['required'])
return ' '.join(data_list)
return None
| arscariosus/django-mango | mango/apps/proposal/fields.py | Python | isc | 1,254 |
import unittest
import shutil
import os
from mock import MagicMock
try:
import queue
except ImportError:
import Queue as queue # flake8: noqa
from pax.parallel import multiprocess_locally
from pax.plugins.io.Queues import PullFromQueue, PushToQueue, NO_MORE_EVENTS, REGISTER_PUSHER, PUSHER_DONE
from pax.datastructure import Event
def fake_events(n):
result = []
for i in range(n):
e = Event(n_channels=1, start_time=0, length=100, sample_duration=10)
e.event_number = i
e.block_id = i // 10
result.append(e)
return result
class TestMultiprocessing(unittest.TestCase):
def test_ordered_pull(self):
# Test pulling from a queue in order. Queue is here just a local (non-multiprocessing) queue
q = queue.Queue()
p = PullFromQueue(dict(queue=q, ordered_pull=True), processor=MagicMock())
events = fake_events(20)
q.put((2, events[20:]))
q.put((0, events[:10]))
q.put((1, events[10:20]))
q.put((NO_MORE_EVENTS, None))
for i, e in enumerate(p.get_events()):
self.assertEqual(e.event_number, i)
def test_pull_multiple(self):
q = queue.Queue()
p = PullFromQueue(dict(queue=q, ordered_pull=True), processor=MagicMock())
events = fake_events(20)
q.put((REGISTER_PUSHER, 'gast'))
q.put((REGISTER_PUSHER, 'gozer'))
q.put((2, events[20:]))
q.put((0, events[:10]))
q.put((PUSHER_DONE, 'gozer'))
q.put((1, events[10:20]))
q.put((PUSHER_DONE, 'gast'))
for i, e in enumerate(p.get_events()):
self.assertEqual(e.event_number, i)
def test_push(self):
# Test pushing to a local (non-multiprocessing) queue
q = queue.Queue()
p = PushToQueue(dict(queue=q), processor=MagicMock())
# Submit a series of fake events, then shut down
events = fake_events(22)
for e in events:
p.write_event(e)
p.shutdown()
blocks_out = []
try:
while True:
blocks_out.append(q.get(timeout=1))
except queue.Empty:
pass
# No more events message pushed to queue
self.assertEqual(blocks_out[-1], (NO_MORE_EVENTS, None))
blocks_out = blocks_out[:-1]
# Block ids must be correct
self.assertEqual([x[0] for x in blocks_out], [0, 1, 2])
# Block sizes are correct
self.assertEqual([len(x[1]) for x in blocks_out], [10, 10, 2])
def test_push_preserveid(self):
q = queue.Queue()
p = PushToQueue(dict(queue=q, preserve_ids=True), processor=MagicMock())
events = fake_events(22)
for e in events:
p.write_event(e)
p.shutdown()
blocks_out = []
try:
while True:
blocks_out.append(q.get(timeout=1))
except queue.Empty:
pass
# No more events message pushed to queue
self.assertEqual(blocks_out[-1], (NO_MORE_EVENTS, None))
blocks_out = blocks_out[:-1]
# Block ids must be correct
self.assertEqual([x[0] for x in blocks_out], [0, 1, 2])
# Block sizes are correct
self.assertEqual([len(x[1]) for x in blocks_out], [10, 10, 2])
def test_multiprocessing(self):
multiprocess_locally(n_cpus=2,
config_names='XENON100',
config_dict=dict(pax=dict(stop_after=10)))
def test_process_event_list_multiprocessing(self):
"""Take a list of event numbers from a file, and process them on two cores
"""
with open('temp_eventlist.txt', mode='w') as outfile:
outfile.write("0\n7\n")
config = {'pax': {'event_numbers_file': 'temp_eventlist.txt',
'plugin_group_names': ['input', 'output'],
'output_name': 'test_output',
'encoder_plugin': None,
'output': 'Table.TableWriter'},
'Table.TableWriter': {'output_format': 'csv'}}
multiprocess_locally(n_cpus=2, config_names='XENON100', config_dict=config)
# Check we actually wrote two events (and a header row)
self.assertTrue(os.path.exists('test_output'))
self.assertTrue(os.path.exists('test_output/Event.csv'))
with open('test_output/Event.csv') as infile:
self.assertEqual(len(infile.readlines()), 3)
# Cleanup
shutil.rmtree('test_output')
os.remove('temp_eventlist.txt')
if __name__ == '__main__':
# import logging
# import sys
# logging.basicConfig(stream=sys.stderr, level=logging.DEBUG)
unittest.main()
| XENON1T/pax | tests/test_multiprocessing.py | Python | bsd-3-clause | 4,745 |
from math import log, log1p, exp
__author__ = 'Deepak Borania'
def pv_factor(irate, period=1):
return (1 + irate) ** (-period)
def convert_effective_rate(irate, src_prd_length, target_prd_length):
return ((1 + irate) ** (float(target_prd_length) / src_prd_length)) - 1
def irate_nominal_to_effective(ip, p):
return ((1 + (float(ip) / p)) ** p) - 1
def irate_effective_to_nominal(i, p):
return p * (((1 + i) ** (1.0 / p)) - 1)
def drate_nominal_to_effective(dp, p):
return 1 - ((1 - (float(dp) / p)) ** p)
def drate_effective_to_nominal(d, p):
return p * (1 - ((1 - d) ** (1.0 / p)))
def irate_to_force(i):
return log1p(i)
def irate_to_v(i):
return pv_factor(i)
def irate_to_discrate(i):
return i * pv_factor(i)
def force_to_irate(delta):
return exp(delta) - 1
def force_to_v(delta):
return exp(-delta)
def force_to_discrate(delta):
return 1 - exp(-delta)
def v_to_force(v):
return -log(v)
def v_to_irate(v):
return (float(v) ** (-1)) - 1
def v_to_discount_rate(v):
return 1 - v
def discount_rate_to_force(d):
return -log(1 - d)
def discount_rate_to_irate(d):
return ((1 - d) ** (-1)) - 1
def discount_rate_to_v(d):
return 1 - d
| antiface/pyActuary | pyactuary/rateconverters.py | Python | mit | 1,238 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
{
'name': 'Audit Trail',
'version': '1.0',
'category': 'Tools',
'description': """
This module lets administrator track every user operation on all the objects of the system.
===========================================================================================
The administrator can subscribe to rules for read, write and
delete on objects and can check logs.
""",
'author': 'OpenERP SA',
'website': 'http://www.openerp.com',
'depends': ['base'],
'init_xml': [],
'update_xml': [
'wizard/audittrail_view_log_view.xml',
'audittrail_view.xml',
'security/ir.model.access.csv',
'security/audittrail_security.xml'
],
'demo_xml': ['audittrail_demo.xml'],
'installable': True,
'auto_install': False,
'certificate': '0062572348749',
'images': ['images/audittrail1.jpeg','images/audittrail2.jpeg','images/audittrail3.jpeg'],
}
# vim:expandtab:smartindent:tabstop=4:softtabstop=4:shiftwidth=4:
| Johnzero/erp | openerp/addons/audittrail/__openerp__.py | Python | agpl-3.0 | 1,966 |
#!/usr/bin/env python
# OpenCenter(TM) is Copyright 2013 by Rackspace US, Inc.
##############################################################################
#
# OpenCenter is licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License. This
# version of OpenCenter includes Rackspace trademarks and logos, and in
# accordance with Section 6 of the License, the provision of commercial
# support services in conjunction with a version of OpenCenter which includes
# Rackspace trademarks and logos is prohibited. OpenCenter source code and
# details are available at: # https://github.com/rcbops/opencenter or upon
# written request.
#
# You may obtain a copy of the License at
# http://www.apache.org/licenses/LICENSE-2.0 and a copy, including this
# notice, is available in the LICENSE file accompanying this software.
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the # specific language governing permissions and limitations
# under the License.
#
##############################################################################
#
import os
import subprocess
import sys
import time
name = 'service'
def setup(config={}):
LOG.debug('Setting up service "service"')
register_action('service_start', service_action)
register_action('service_stop', service_action)
register_action('service_restart', service_action)
def service_action(input_data):
payload = input_data['payload']
action = input_data['action']
full_restart = False
sleep = 0
if not 'service' in payload:
return _return(1, 'no "service" in payload')
if 'sleep' in payload:
sleep = payload['sleep']
service = payload['service']
service_action = action.split('_')[1]
command = ['service', service, service_action]
LOG.debug('preparing to run service command: "%s"' % (' ').join(command))
result = subprocess.call(command, shell=False)
if sleep:
time.sleep(int(sleep))
return _return(result, os.strerror(result))
def _return(result_code, result_str, result_data=None):
if result_data is None:
result_data = {}
return {'result_code': result_code,
'result_str': result_str,
'result_data': result_data}
def _success(result_str='success', result_data=None):
if result_data is None:
result_data = {}
return _return(0, result_str, result_data)
| rcbops/opencenter-agent | opencenteragent/plugins/output/plugin_service.py | Python | apache-2.0 | 2,640 |
# -*- coding: utf-8 -*-
# Copyright (C) 2020 Matthias Luescher
#
# Authors:
# Matthias Luescher
#
# This file is part of edi.
#
# edi is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# edi is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with edi. If not, see <http://www.gnu.org/licenses/>.
import os
import argparse
import logging
from edi.commands.documentation import Documentation
from edi.lib.helpers import print_success
from edi.lib.documentationsteprunner import DocumentationStepRunner
def readable_directory(directory):
if not os.path.isdir(directory):
raise argparse.ArgumentTypeError("directory '{}' does not exist".format(directory))
if not os.access(directory, os.R_OK):
raise argparse.ArgumentTypeError("directory '{}' is not readable".format(directory))
return directory
def valid_output_directory(directory):
if not os.path.isdir(directory):
raise argparse.ArgumentTypeError("output directory '{}' does not exist".format(directory))
if not os.access(directory, os.W_OK):
raise argparse.ArgumentTypeError("output directory '{}' is not writable".format(directory))
return directory
class Render(Documentation):
def __init__(self):
super().__init__()
self.raw_input = None
self.rendered_output = None
@classmethod
def advertise(cls, subparsers):
help_text = "render the project documentation"
description_text = "Render the project documentation."
parser = subparsers.add_parser(cls._get_short_command_name(),
help=help_text,
description=description_text)
exclusive_group = cls._offer_options(parser, introspection=True, clean=False)
exclusive_group.add_argument('--clean', action="store_true",
help='clean the artifacts that got produced by this command')
parser.add_argument('raw_input', type=readable_directory,
help="directory containing the input files")
parser.add_argument('rendered_output', type=valid_output_directory,
help="directory receiving the output files")
cls._require_config_file(parser)
@staticmethod
def _unpack_cli_args(cli_args):
return [cli_args.raw_input, cli_args.rendered_output, cli_args.config_file]
def run_cli(self, cli_args):
self._dispatch(*self._unpack_cli_args(cli_args), run_method=self._get_run_method(cli_args))
def dry_run(self, raw_input, rendered_output, config_file):
return self._dispatch(raw_input, rendered_output, config_file, run_method=self._dry_run)
def _dry_run(self):
plugins = DocumentationStepRunner(self.config, self.raw_input, self._result()).get_plugin_report()
return plugins
def run(self, raw_input, rendered_output, config_file):
return self._dispatch(raw_input, rendered_output, config_file, run_method=self._run)
def _run(self):
print("Going to render project documentation to '{}'.".format(self._result()))
documentation_step_runner = DocumentationStepRunner(self.config, self.raw_input, self._result())
documentation_step_runner.check_for_absence_of_output_files()
documentation_step_runner.run_all()
print_success("Rendered project documentation to '{}'.".format(self._result()))
return self._result()
def clean_recursive(self, raw_input, rendered_output, config_file, _):
self._dispatch(raw_input, rendered_output, config_file, run_method=self._clean)
def _clean(self):
documentation_step_runner = DocumentationStepRunner(self.config, self.raw_input, self._result())
documentation_step_runner.clean()
def _dispatch(self, raw_input, rendered_output, config_file, run_method):
self._setup_parser(config_file)
self.raw_input = os.path.abspath(raw_input)
self.rendered_output = os.path.abspath(rendered_output)
if os.getuid() == 0:
logging.warning('You should not not use the render command as root!')
return run_method()
def _result(self):
return self.rendered_output
| lueschem/edi | edi/commands/documentationcommands/render.py | Python | lgpl-3.0 | 4,683 |
# coding: utf-8
import functions
def datasets(*args):
if args[0] == "0":
raise functions.OperatorError("DATASET", "Dataset does not exist")
else:
return 1
datasets.registered = True
if not ('.' in __name__):
"""
This is needed to be able to test the function, put it at the end of every
new function you create
"""
import sys
from functions import *
testfunction()
if __name__ == "__main__":
reload(sys)
sys.setdefaultencoding('utf-8')
import doctest
doctest.testmod()
| madgik/exareme | Exareme-Docker/src/exareme/exareme-tools/madis/src/functions/row/datasets.py | Python | mit | 564 |
"""
======================================
Generalized hyperfine component fitter
======================================
.. moduleauthor:: Adam Ginsburg <[email protected]>
"""
import numpy as np
import model
import fitter
from astropy import units as u
# should be imported in the future
ckms = 2.99792458e5
class hyperfinemodel(object):
"""
Wrapper for the hyperfine model class. Specify the offsets and relative
strengths when initializing, then you've got yourself a hyperfine modeler.
There are a wide variety of different fitter attributes, each designed to
free a different subset of the parameters. Their purposes should be
evident from their names.
"""
def __init__(self, line_names, voff_lines_dict, freq_dict,
line_strength_dict, relative_strength_total_degeneracy):
"""
Initialize the various parameters defining the hyperfine transitions
Parameters
----------
line_names: list
list of the line names to be used as indices for the dictionaries
voff_lines_dict: dict
a linename:v_off dictionary of velocity offsets for the hyperfine
components. Technically, this is redundant with freq_dict
freq_dict: dict
frequencies of the indvidual transitions
line_strength_dict: dict
Relative strengths of the hyperfine components, usually determined
by their degeneracy and Einstein A coefficients
"""
self.line_names = line_names
self.voff_lines_dict = voff_lines_dict
self.freq_dict = freq_dict
self.line_strength_dict = line_strength_dict
self.relative_strength_total_degeneracy = relative_strength_total_degeneracy
self.fitter = model.SpectralModel(self,4,
parnames=['Tex','tau','center','width'],
parlimited=[(False,False), (True,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0), (0,0)],
shortvarnames=("T_{ex}","\\tau","v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz' )
self.nlines = len(line_names)
self.varyhf_fitter = model.SpectralModel(self.hyperfine_varyhf,3+self.nlines,
parnames=['Tex','center','width']+['tau%s' % k for k in self.line_names],
parlimited=[(False,False), (False,False), (True,False)] + [(True,False),]*self.nlines,
parlimits=[(0,0), (0,0), (0,0)]+[(0,0),]*self.nlines,
shortvarnames=("T_{ex}","v","\\sigma") + tuple(("\\tau(\\mathrm{%s})" % k for k in self.line_names)),
# specify the parameter names (TeX is OK)
fitunits='Hz')
self.varyhf_amp_fitter = model.SpectralModel(self.hyperfine_varyhf_amp, 2+self.nlines,
parnames=['center','width']+['amp%s' % k for k in self.line_names],
parlimited=[(False,False), (True,False)] + [(True,False),]*self.nlines,
parlimits=[(0,0), (0,0)]+[(0,0),]*self.nlines,
shortvarnames=("v","\\sigma") + tuple(("amp(\\mathrm{%s})" % k for k in self.line_names)),
# specify the parameter names (TeX is OK)
fitunits='Hz')
self.varyhf_amp_width_fitter = model.SpectralModel(self.hyperfine_varyhf_amp_width,1+self.nlines*2,
parnames=['center']+['amp%s' % k for k in self.line_names]+['width%s' % k for k in self.line_names],
parlimited=[(False,False)] + [(True,False),]*self.nlines + [(True,False)]*self.nlines,
parlimits=[(0,0)]+[(0,0),]*self.nlines*2,
shortvarnames=(("v",) +
tuple(("amp(\\mathrm{%s})" % k for k in self.line_names)) +
tuple(("\\sigma(\\mathrm{%s})" % k for k in self.line_names))),
# specify the parameter names (TeX is OK)
fitunits='Hz' )
self.vheight_fitter = model.SpectralModel(fitter.vheightmodel(self),5,
parnames=['height','Tex','tau','center','width'],
parlimited=[(False,False), (False,False), (True,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0), (0,0), (0,0)],
shortvarnames=("H","T_{ex}","\\tau","v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz' )
self.background_fitter = model.SpectralModel(self.hyperfine_addbackground,5,
parnames=['Tbackground','Tex','tau','center','width'],
parlimited=[(True,False), (False,False), (False,False), (True,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0), (0,0), (0,0), (0,0)],
shortvarnames=('T_{BG}',"T_{ex}","\\tau","v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz')
self.ampfitter = model.SpectralModel(self.hyperfine_amp,3,
parnames=['amp','center','width'],
parlimited=[(False,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0)],
shortvarnames=("amp","v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz' )
self.taufitter = model.SpectralModel(self.hyperfine_tau,3,
parnames=['tau','center','width'],
parlimited=[(True,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0)],
shortvarnames=(r'\tau',"v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz')
self.totaltaufitter = model.SpectralModel(self.hyperfine_tau_total,3,
parnames=['tau','center','width'],
parlimited=[(True,False), (False,False), (True,False)],
parlimits=[(0,0), (0,0), (0,0)],
shortvarnames=(r'\tau',"v","\\sigma"), # specify the parameter names (TeX is OK)
fitunits='Hz')
def __call__(self, *args, **kwargs):
"""
Generate a model spectrum given an excitation temperature, optical depth, offset velocity, and velocity width.
"""
return self.hyperfine(*args,**kwargs)
def hyperfine_amp(self, xarr, amp=None, xoff_v=0.0, width=1.0,
return_hyperfine_components=False, Tbackground=2.73,
Tex=5.0, tau=0.1):
"""
wrapper of self.hyperfine with order of arguments changed
"""
return self.hyperfine(xarr, amp=amp, Tex=Tex, tau=tau, xoff_v=xoff_v,
width=width, return_hyperfine_components=return_hyperfine_components,
Tbackground=Tbackground)
def hyperfine_tau(self, xarr, tau, xoff_v, width, **kwargs):
""" same as hyperfine, but with arguments in a different order, AND
tau is returned instead of exp(-tau)"""
return self.hyperfine(xarr, tau=tau, xoff_v=xoff_v, width=width,
return_tau=True, **kwargs)
def hyperfine_tau_total(self, xarr, tau_total, xoff_v, width, **kwargs):
""" same as hyperfine, but with arguments in a different order, AND
tau is returned instead of exp(-tau), AND the *peak* tau is used"""
return self.hyperfine(xarr, tau_total=tau_total, xoff_v=xoff_v, width=width,
return_tau=True, **kwargs)
def hyperfine_varyhf(self, xarr, Tex, xoff_v, width, *args, **kwargs):
""" Wrapper of hyperfine for using a variable number of peaks with specified
tau """
return self.hyperfine(xarr, Tex=Tex, xoff_v=xoff_v, width=width,
tau=dict(zip(self.line_names,args)),
vary_hyperfine_tau=True, **kwargs)
def hyperfine_varyhf_amp(self, xarr, xoff_v, width, *args, **kwargs):
""" Wrapper of hyperfine for using a variable number of peaks with specified
amplitude (rather than tau). Uses some opaque tricks: Tex is basically ignored,
and return_tau means you're actually returning the amplitude,
which is just passed in as tau"""
return self.hyperfine(xarr, xoff_v=xoff_v, width=width,
tau=dict(zip(self.line_names,args)),
vary_hyperfine_tau=True,
return_tau=True, **kwargs)
def hyperfine_varyhf_amp_width(self, xarr, xoff_v, *args, **kwargs):
""" Wrapper of hyperfine for using a variable number of peaks with specified
amplitude (rather than tau). Uses some opaque tricks: Tex is basically ignored,
and return_tau means you're actually returning the amplitude,
which is just passed in as tau"""
if len(args) % 2 != 0:
raise ValueError("Incorrect number of arguments for varying amplitude"
" and width. Need N amplitudes, N widths.")
return self.hyperfine(xarr, xoff_v=xoff_v,
tau=dict(zip(self.line_names,args[:len(args)/2])),
width=dict(zip(self.line_names,args[len(args)/2:])),
vary_hyperfine_tau=True,
vary_hyperfine_width=True,
return_tau=True, **kwargs)
def hyperfine_addbackground(self, xarr, Tbackground=2.73, Tex=5.0, tau=0.1,
xoff_v=0.0, width=1.0, return_tau=False,
**kwargs):
"""
Identical to hyperfine, but adds Tbackground as a constant continuum
level
"""
if return_tau:
raise ValueError("Cannot return tau when adding a continuum background.")
return (self.hyperfine(xarr, Tbackground=Tbackground, Tex=Tex, tau=tau,
xoff_v=xoff_v, width=width, return_tau=False,
**kwargs)
+ Tbackground)
def hyperfine(self, xarr, Tex=5.0, tau=0.1, xoff_v=0.0, width=1.0,
return_hyperfine_components=False, Tbackground=2.73, amp=None,
return_tau=False, tau_total=None, vary_hyperfine_tau=False,
vary_hyperfine_width=False):
"""
Generate a model spectrum given an excitation temperature, optical depth, offset velocity, and velocity width.
Parameters
----------
return_tau : bool
If specified, return just the tau spectrum, ignoring Tex
tau_total : bool
If specified, use this *instead of tau*, and it tries to normalize
to the *peak of the line*
vary_hyperfine_tau : bool
If set to true, allows the hyperfine transition amplitudes to vary and
does not use the line_strength_dict. If set, `tau` must be a dict
"""
# Convert X-units to frequency in Hz
xarr = xarr.as_unit('Hz').value
# Ensure parameters are scalar / have no extra dims
if not np.isscalar(Tex): Tex = Tex.squeeze()
if not np.isscalar(xoff_v): xoff_v = xoff_v.squeeze()
if vary_hyperfine_width:
if not isinstance(width, dict):
raise TypeError("If varying the amplitude of the hyperfine lines, must specify tau as a dict")
else:
if not np.isscalar(width): width = width.squeeze()
if vary_hyperfine_tau:
if not isinstance(tau, dict):
raise TypeError("If varying the amplitude of the hyperfine lines, must specify tau as a dict")
else:
if not np.isscalar(tau): tau = tau.squeeze()
# Generate an optical depth spectrum as a function of the X-axis
tau_nu_cumul = np.zeros(len(xarr))
# Error check: inputing NANs results in meaningless output - return without computing a model
if (np.any(np.isnan((Tex,xoff_v))) or
((not vary_hyperfine_tau) and np.isnan(tau)) or
((not vary_hyperfine_width) and np.isnan(width))):
if return_hyperfine_components:
return [tau_nu_cumul] * len(self.line_names)
else:
return tau_nu_cumul
if tau_total is not None:
tau = 1
components =[]
for linename in self.line_names:
voff_lines = np.array(self.voff_lines_dict[linename])
lines = (1-voff_lines/ckms)*self.freq_dict[linename]
if not vary_hyperfine_width and width == 0:
tau_nu = xarr*0
else:
if vary_hyperfine_width:
nuwidth = np.abs(width[linename]/ckms*lines)
else:
nuwidth = np.abs(width/ckms*lines)
nuoff = xoff_v/ckms*lines
if vary_hyperfine_tau:
tau_line = tau[linename]
else:
# the total optical depth, which is being fitted, should be the sum of the components
tau_line = (tau * np.array(self.line_strength_dict[linename])/
np.array(self.relative_strength_total_degeneracy[linename]))
tau_nu = np.array(tau_line *
np.exp(-(xarr+nuoff-self.freq_dict[linename])**2 /
(2.0*nuwidth**2)))
tau_nu[tau_nu!=tau_nu] = 0 # avoid nans
components.append(tau_nu)
tau_nu_cumul += tau_nu
# add a list of the individual 'component' spectra to the total components...
if tau_total is not None:
tau_max = tau_nu_cumul.max() # danger of undersampling...
tau_nu_cumul *= tau_total/tau_max
for c in components:
c *= tau_total/tau_max
if return_hyperfine_components:
if return_tau:
return components
elif amp is None:
return (1.0-np.exp(-np.array(components)))*(Tex-Tbackground)
else:
comps = (1.0-np.exp(-np.array(components)))*(Tex-Tbackground)
return comps/comps.max() * amp
if return_tau:
return tau_nu_cumul
else:
# This is not the full equation of radiative transfer, but a
# background-subtracted version
# With "background" function B_nu = CMB, S_nu = absorber, and I_nu = received:
# I_nu = B_nu * exp(-tau) + (1-exp(-tau)) * S_nu
# This is a very good approximation for Rohlfs & Wilson eqn 15.29:
spec = (1.0-np.exp(-np.array(tau_nu_cumul)))*(Tex-Tbackground)
# This is the equation of radiative transfer using the RJ definitions
# (eqn 1.37 in Rohlfs)
# It is identical, except without T_background subtracted
# spec = Tex+(np.exp(-np.array(tau_nu_cumul)))*(Tbackground-Tex)
if amp is None:
return spec
else:
return spec/spec.max() * amp
| dinossimpson/pyspeckit | pyspeckit/spectrum/models/hyperfine.py | Python | mit | 14,893 |
#!/usr/bin/env python
import sys
import cv2
import matplotlib.pyplot as plt
import numpy as np
import rospy as rp
import argparse
import tms_ss_ssd.srv
from sensor_msgs.msg import CompressedImage
def parse_args():
parser = argparse.ArgumentParser(description='Single-Shot Multibox Detector demo client')
parser.add_argument('--image', dest='image',
help='Image to detect',
default='/home/kazuto/Desktop/test.png')
return parser.parse_args()
class SSDClient:
def __init__(self):
rp.wait_for_service('ssd')
try:
self._client = rp.ServiceProxy('ssd', tms_ss_ssd.srv.obj_detection)
img = cv2.imread(parse_args().image, cv2.IMREAD_COLOR)
req = self._convert2msg(img)
res = self._client(req)
self._visualize(img, res)
except rp.ServiceException, e:
print 'Service call failed: %s' % e
@staticmethod
def _convert2msg(img):
req = CompressedImage()
req.header.stamp = rp.Time.now()
req.format = "jpeg"
req.data = np.array(cv2.imencode('.jpg', img)[1]).tostring()
return req
@staticmethod
def _visualize(img, res):
img = img[:, :, (2, 1, 0)]
fig, ax = plt.subplots(figsize=(12, 12))
ax.imshow(img, aspect='equal')
for obj in res.objects:
ax.add_patch(
plt.Rectangle((obj.region.x_offset, obj.region.y_offset),
obj.region.width,
obj.region.height,
fill=False, edgecolor='red', linewidth=3.5)
)
ax.text(obj.region.x_offset, obj.region.y_offset - 2,
'{:s} {:.3f}'.format(obj.class_name, obj.score),
bbox=dict(facecolor='blue', alpha=0.5),
fontsize=14, color='white')
plt.axis('off')
plt.tight_layout()
plt.draw()
plt.show()
def main(args):
rp.init_node('ssd_client', anonymous=True)
ic = SSDClient()
try:
rp.spin()
except KeyboardInterrupt:
print "Shutting down ROS Image feature detector module"
cv2.destroyAllWindows()
if __name__ == '__main__':
main(sys.argv)
| kazuto1011/rcnn-server | tms_ss_ssd/nodes/client/client_loaded_image.py | Python | mit | 2,314 |
from __future__ import absolute_import, unicode_literals
from django import template
register = template.Library()
@register.simple_tag(takes_context=True)
def if_url(context, url_name, yes, no):
"""
Example:
%li{ class:"{% if_url 'contacts.contact_read' 'active' '' %}" }
"""
current = context['request'].resolver_match.url_name
return yes if url_name == current else no
| peterayeni/dash | dash/utils/templatetags/utils.py | Python | bsd-3-clause | 405 |
#!/usr/bin/env python
ARCH_EXCLUDE = ['armel', 'alpha', 'hurd-i386', 'ia64', 'kfreebsd-amd64', 'kfreebsd-i386', 'mips', 'powerpc', 'ppc64el', 's390', 's390x', 'sparc']
CONTENT_EXCLUDE = ['binary-{arch}', 'installer-{arch}', 'Contents-{arch}.gz', 'Contents-udeb-{arch}.gz', 'Contents-{arch}.diff', 'arch-{arch}.files', 'arch-{arch}.list.gz', '*_{arch}.deb', '*_{arch}.udeb', '*_{arch}.changes']
with open("debian-exclude.txt", 'wb') as f:
f.write(".~tmp~/\n")
f.write(".*\n")
for arch in ARCH_EXCLUDE:
for content in CONTENT_EXCLUDE:
f.write(content.format(arch=arch))
f.write('\n')
| cubarco/tunasync | scripts/gen_debian_exclude.py | Python | gpl-3.0 | 597 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import json
from django import forms
from django import http
from django import shortcuts
from django.views import generic
import six
from horizon import exceptions
from horizon.forms import views as hz_views
from horizon.forms.views import ADD_TO_FIELD_HEADER # noqa
from horizon import messages
class WorkflowView(hz_views.ModalBackdropMixin, generic.TemplateView):
"""A generic class-based view which handles the intricacies of workflow
processing with minimal user configuration.
.. attribute:: workflow_class
The :class:`~horizon.workflows.Workflow` class which this view handles.
Required.
.. attribute:: template_name
The template to use when rendering this view via standard HTTP
requests. Required.
.. attribute:: ajax_template_name
The template to use when rendering the workflow for AJAX requests.
In general the default common template should be used. Defaults to
``"horizon/common/_workflow.html"``.
.. attribute:: context_object_name
The key which should be used for the workflow object in the template
context. Defaults to ``"workflow"``.
"""
workflow_class = None
template_name = 'horizon/common/_workflow_base.html'
context_object_name = "workflow"
ajax_template_name = 'horizon/common/_workflow.html'
step_errors = {}
def __init__(self):
super(WorkflowView, self).__init__()
if not self.workflow_class:
raise AttributeError("You must set the workflow_class attribute "
"on %s." % self.__class__.__name__)
def get_initial(self):
"""Returns initial data for the workflow. Defaults to using the GET
parameters to allow pre-seeding of the workflow context values.
"""
return copy.copy(self.request.GET)
def get_workflow_class(self):
"""Returns the workflow class"""
return self.workflow_class
def get_workflow(self):
"""Returns the instantiated workflow class."""
extra_context = self.get_initial()
entry_point = self.request.GET.get("step", None)
workflow = self.get_workflow_class()(self.request,
context_seed=extra_context,
entry_point=entry_point)
return workflow
def get_context_data(self, **kwargs):
"""Returns the template context, including the workflow class.
This method should be overridden in subclasses to provide additional
context data to the template.
"""
context = super(WorkflowView, self).get_context_data(**kwargs)
workflow = self.get_workflow()
context[self.context_object_name] = workflow
next = self.request.GET.get(workflow.redirect_param_name)
context['REDIRECT_URL'] = next
context['layout'] = self.get_layout()
# For consistency with Workflow class
context['modal'] = 'modal' in context['layout']
if ADD_TO_FIELD_HEADER in self.request.META:
context['add_to_field'] = self.request.META[ADD_TO_FIELD_HEADER]
return context
def get_layout(self):
"""returns classes for the workflow element in template based on
the workflow characteristics
"""
if self.request.is_ajax():
layout = ['modal', ]
if self.workflow_class.fullscreen:
layout += ['fullscreen', ]
else:
layout = ['static_page', ]
if self.workflow_class.wizard:
layout += ['wizard', ]
return layout
def get_template_names(self):
"""Returns the template name to use for this request."""
if self.request.is_ajax():
template = self.ajax_template_name
else:
template = self.template_name
return template
def get_object_id(self, obj):
return getattr(obj, "id", None)
def get_object_display(self, obj):
return getattr(obj, "name", None)
def add_error_to_step(self, error_msg, step):
self.step_errors[step] = error_msg
def set_workflow_step_errors(self, context):
workflow = context['workflow']
for step in self.step_errors:
error_msg = self.step_errors[step]
workflow.add_error_to_step(error_msg, step)
def get(self, request, *args, **kwargs):
"""Handler for HTTP GET requests."""
context = self.get_context_data(**kwargs)
self.set_workflow_step_errors(context)
return self.render_to_response(context)
def validate_steps(self, request, workflow, start, end):
"""Validates the workflow steps from ``start`` to ``end``, inclusive.
Returns a dict describing the validation state of the workflow.
"""
errors = {}
for step in workflow.steps[start:end + 1]:
if not step.action.is_valid():
errors[step.slug] = dict(
(field, [six.text_type(error) for error in errors])
for (field, errors) in six.iteritems(step.action.errors))
return {
'has_errors': bool(errors),
'workflow_slug': workflow.slug,
'errors': errors,
}
def render_next_steps(self, request, workflow, start, end):
"""render next steps
this allows change form content on the fly
"""
rendered = {}
request = copy.copy(self.request)
# patch request method, because we want render new form without
# validation
request.method = "GET"
new_workflow = self.get_workflow_class()(
request,
context_seed=workflow.context,
entry_point=workflow.entry_point)
for step in new_workflow.steps[end:]:
rendered[step.get_id()] = step.render()
return rendered
def post(self, request, *args, **kwargs):
"""Handler for HTTP POST requests."""
context = self.get_context_data(**kwargs)
workflow = context[self.context_object_name]
try:
# Check for the VALIDATE_STEP* headers, if they are present
# and valid integers, return validation results as JSON,
# otherwise proceed normally.
validate_step_start = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_START', ''))
validate_step_end = int(self.request.META.get(
'HTTP_X_HORIZON_VALIDATE_STEP_END', ''))
except ValueError:
# No VALIDATE_STEP* headers, or invalid values. Just proceed
# with normal workflow handling for POSTs.
pass
else:
# There are valid VALIDATE_STEP* headers, so only do validation
# for the specified steps and return results.
data = self.validate_steps(request, workflow,
validate_step_start,
validate_step_end)
next_steps = self.render_next_steps(request, workflow,
validate_step_start,
validate_step_end)
# append rendered next steps
data["rendered"] = next_steps
return http.HttpResponse(json.dumps(data),
content_type="application/json")
if not workflow.is_valid():
return self.render_to_response(context)
try:
success = workflow.finalize()
except forms.ValidationError:
return self.render_to_response(context)
except Exception:
success = False
exceptions.handle(request)
if success:
msg = workflow.format_status_message(workflow.success_message)
messages.success(request, msg)
else:
msg = workflow.format_status_message(workflow.failure_message)
messages.error(request, msg)
if "HTTP_X_HORIZON_ADD_TO_FIELD" in self.request.META:
field_id = self.request.META["HTTP_X_HORIZON_ADD_TO_FIELD"]
response = http.HttpResponse()
if workflow.object:
data = [self.get_object_id(workflow.object),
self.get_object_display(workflow.object)]
response.content = json.dumps(data)
response["X-Horizon-Add-To-Field"] = field_id
return response
next_url = self.request.POST.get(workflow.redirect_param_name)
return shortcuts.redirect(next_url or workflow.get_success_url())
| django-leonardo/horizon | horizon/workflows/views.py | Python | apache-2.0 | 9,305 |
from .base import Base
class Roles(Base):
endpoint = "/roles"
def get_role_by_id(self, role_id):
return self.client.get(self.endpoint + "/" + role_id)
def get_role_by_name(self, role_name):
return self.client.get(self.endpoint + "/name/" + role_name)
def patch_role(self, role_id, options=None):
return self.client.put(self.endpoint + "/" + role_id + "/patch", options=options)
def get_list_of_roles_by_name(self):
return self.client.get(
self.endpoint + "/names",
)
| Vaelor/python-mattermost-driver | src/mattermostdriver/endpoints/roles.py | Python | mit | 545 |
# Copyright (c) 2016 ARM Limited
# All rights reserved.
#
# The license below extends only to copyright in the software and shall
# not be construed as granting a license to any other intellectual
# property including but not limited to intellectual property relating
# to a hardware implementation of the functionality of the software
# licensed hereunder. You may use the software subject to the license
# terms below provided that you ensure that this notice is replicated
# unmodified and in its entirety in all distributions of the software,
# modified or unmodified, in source code or in binary form.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met: redistributions of source code must retain the above copyright
# notice, this list of conditions and the following disclaimer;
# redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution;
# neither the name of the copyright holders nor the names of its
# contributors may be used to endorse or promote products derived from
# this software without specific prior written permission.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
# "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
# LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
# A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
# OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
# SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
# LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
# DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
# THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
# (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
# OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
#
# Authors: Rekai Gonzalez
from m5.params import *
from m5.proxy import *
from m5.SimObject import SimObject
class VecRegRenameMode(Enum):
'''Enum for Rename Mode in rename map
Elem: Each native-elem in a vector register is renamed independently.
Full: Vectors are renamed as one unit.'''
vals = ['Full', 'Elem']
__all__ = ['VecRegRenameMode']
| rallylee/gem5 | src/arch/generic/ISACommon.py | Python | bsd-3-clause | 2,443 |
#!/usr/bin/env python
#
# Protein Engineering Analysis Tool DataBase (PEATDB)
# Copyright (C) 2010 Damien Farrell & Jens Erik Nielsen
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# Contact information:
# Email: Jens.Nielsen_at_gmail.com
# Normal mail:
# Jens Nielsen
# SBBS, Conway Institute
# University College Dublin
# Dublin 4, Ireland
#
"""Class for dumping text files of current primer DB in DNAtool"""
import sys, os,copy
from Tkinter import *
class PDBDumper:
def __init__(self,parent,Dump2Dir,noDB=None):
from datetime import datetime
self.MaxDumpFiles = 3
self.parent = parent
#project name from DB_Main.. too many levels of reference!!
#should set the proj name from where this objected is created..
if noDB==None:
projName = self.parent.parent.parent.DB.meta['info']['project']
else:
projName = 'current'
now = datetime.today()
print " projname ",projName,"\n\n",str(now.time())," ",str(now.time()).split('.'),now.date(),
self.DumpName = projName+'.'+"PrimerDB"+"."+str(now.date())+str(now.time()).split('.')[0]+".csv"
self.DumpBase = projName+'.'+"PrimerDB"
print " DumpName "+self.DumpName
self.DIR = Dump2Dir
self.CheckDump()
#current primer db held in the parent DNAtool instance
if noDB==None:
self.primerDB = self.parent.parent.data['primer_dict']
else:
self.primerDB = []
return
# Does the file writing
def doDump(self,event=None):
"""Does the file writing"""
# if pdb is empty do nothing
try:
self.parent.parent.data.get('primer_dict')
except:
return
if event.widget == self.parent.pDB_win:
print "Doing dump.."
self.CheckDump()
list_csv = self.createCSV(self.primerDB)
#DFILE = self.DIR+'/'+self.DumpName
DFILE =os.path.join(self.DIR,self.DumpName)
#print "writing: ",list_csv
try:
fd=open(DFILE,'w')
for line in list_csv:
str = ''
for S in line:
str = str+S+","
fd.write(str+'\n')
fd.close()
print "File written ",DFILE
except:
print "could not write dump or pdb was empty"
# open file
#write file
return
# Creates the CSV file structure
def createCSV(self,Labbook):
"""Creates the CSV file structure"""
result = None
if self.parent.parent.data.has_key('primer_dict'):
PDB = self.parent.parent.data['primer_dict']
else:
return
primer_names=PDB.keys()
HeaderList = ['description', 'sequence']
ActualHeaderList = ['name','description', 'sequence']
DumpPDB = []
#DumpPDB.append(ActualHeaderList)
#print "Header list ",HeaderList
for entry in PDB:
#print "entry ",entry
tmp = []
tmp.append(entry)
for head in HeaderList:
#print PDB[entry][head]
tmp.append(PDB[entry][head])
DumpPDB.append(tmp)
result = DumpPDB
#print " DumpPDB ",DumpPDB
return result
# Checks to see which is oldest version present and overwrites it
def CheckDump(self):
import os,os.path
print "Checking Dump"
Files = os.listdir(self.DIR)
N_files = 0
Dump_files = []
for count,I in enumerate(Files):
if I.find(self.DumpBase) != -1:
#print "file ",self.DIR+"/"+I
Dump_files.append([I,os.path.getmtime(self.DIR+"/"+I)])
N_files += 1
Dump_files.sort(lambda x, y: cmp(x[1], y[1]))
Dump_files.reverse()
#print "Dumps ",Dump_files
# if we have more dump files than MaxDumpFiles, delete the oldest
# and create a new one
try:
#print "N ",len(Dump_files)
while len(Dump_files) >= self.MaxDumpFiles:
del_file = Dump_files.pop()
#print "remove dump file :",self.DIR+"/"+del_file[0],"\n\n"
rfile=os.path.join(self.DIR,del_file[0])
os.remove(rfile)
except:
print "Could not remove dump file "
#Dump_files.sort( os.getmtime( path) )
#getmtime( path)
return
| dmnfarrell/peat | PEATDB/DNAtool/PDBDumper.py | Python | mit | 5,131 |
from django.db import models
from django.conf import settings
from django.utils import timezone
from django.utils.timezone import utc
import urllib
import json
import datetime
class WeatherLocationManager(models.Manager):
def updateLocations(self):
apiKey = settings.MET_OFFICE_API_KEY
if apiKey != "":
try:
url = "http://datapoint.metoffice.gov.uk/public/data/val/wxfcs/all/json/sitelist?key=" + apiKey
page = urllib.urlopen(url).read()
jsonResult = json.loads(page)
locationItems = jsonResult['Locations']['Location']
for location in locationItems:
try:
weatherLocationObject = WeatherLocation.objects.get(locationId=location['id'])
except WeatherLocation.DoesNotExist:
weatherLocationObject = WeatherLocation()
weatherLocationObject.locationId = str(location['id'])
weatherLocationObject.name = location['name']
weatherLocationObject.longitude = location['longitude']
weatherLocationObject.latitude = location['latitude']
if location.has_key("region"):
weatherLocationObject.region = location['region']
if location.has_key("unitaryAuthArea"):
weatherLocationObject.unitaryAuthArea = location['unitaryAuthArea']
if location.has_key('elevation'):
weatherLocationObject.elevation = location['elevation']
weatherLocationObject.save()
except IOError, e:
pass
except AttributeError, e:
pass
class WeatherLocation(models.Model):
objects = WeatherLocationManager()
locationId = models.TextField()
name = models.TextField()
region = models.TextField(default="")
unitaryAuthArea = models.TextField(default="")
longitude = models.TextField()
latitude = models.TextField()
elevation = models.TextField(default="")
weather = models.OneToOneField('Weather', related_name='WeatherLocation', null=True)
def getWeather(self):
if self.weather == None:
#Get WEATHER FROM INTERNET HERE
weatherItem = Weather.objects.create()
weatherItem.save()
self.weather = weatherItem
self.save()
self.weather.update()
elif self.weather.LoadDate < datetime.datetime.utcnow().replace(tzinfo=utc) - datetime.timedelta(hours=1):
#UPDATE THE WEATHER ITEM
self.weather.update()
return self.weather
class Meta:
db_table = u'WeatherLocations'
class Weather(models.Model):
LoadDate = models.DateTimeField(auto_now=True)
MesurementUnits = models.TextField(default="[]")
FiveDayWeather = models.TextField(default="[]")
def setMesurementUnits(self, units):
self.MesurementUnits = json.dumps(units)
def getMesurementUnits(self):
if self.MesurementUnits == "":
return []
else:
return json.loads(self.MesurementUnits)
def setFiveDayWeather(self, weather):
self.FiveDayWeather = json.dumps(weather)
def getFiveDayWeather(self):
if self.FiveDayWeather == "":
return []
else:
return json.loads(self.FiveDayWeather)
def update(self):
try:
apiKey = settings.MET_OFFICE_API_KEY
fivedayurl = "http://datapoint.metoffice.gov.uk/public/data/val/wxfcs/all/json/" + self.WeatherLocation.locationId + "?res=daily&key=" + apiKey
weatherArray = json.loads(urllib.urlopen(fivedayurl).read())
self.setMesurementUnits(weatherArray['SiteRep']['Wx']['Param'])
self.setFiveDayWeather(weatherArray['SiteRep']['DV'])
self.LoadDate = timezone.now()
self.save()
except IOError, e:
pass
except AttributeError, e:
pass
def getTempUnits(self):
if self.getMesurementUnits()[0]['units'] == "F":
return "Fahrenheit"
else:
return "Celsius"
def getTempMax(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][0]['Dm']
def getTempMin(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][1]['Nm']
def getFeelsLikeTemp(self, day=0, forDay=True):
if forDay:
dayInt = 0
tempKey = "FDm"
else:
dayInt = 1
tempKey = "FNm"
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][dayInt][tempKey]
def getHumidity(self, day=0, forDay=True):
if forDay:
dayInt = 0
tempKey = "Hn"
else:
dayInt = 1
tempKey = "Hm"
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][dayInt][tempKey]
def getWindDirection(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][0]['D']
def getWindSpeed(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][0]['S']
def getWindGust(self, day=0, forDay=True):
if forDay:
dayInt = 0
tempKey = "Gn"
else:
dayInt = 1
tempKey = "Gm"
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][dayInt][tempKey]
def getWeatherTypeNum(self, day=0, isNight=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][isNight]['W']
def getMaxUVIndex(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][0]['U']
def getPrecipitationProbability(self, day=0, forDay=True):
if forDay:
dayInt = 0
tempKey = "PPd"
else:
dayInt = 1
tempKey = "PPn"
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][dayInt][tempKey]
def getVisibilityNum(self, day=0):
return self.getFiveDayWeather()['Location']['Period'][day]['Rep'][0]['V']
def getVisibilityText(self, day=0):
visabilityNum = self.getVisibilityNum(day)
if visabilityNum == "UN":
return "Unknown"
elif visabilityNum == "VP":
return "Less than 1km" #Very poor
elif visabilityNum == "PO":
return "Between 1-4 km" #Poor
elif visabilityNum == "MO":
return "Between 4-10 km" #Moderate
elif visabilityNum == "GO":
return "Between 10-20 km" #Good
elif visabilityNum == "VG":
return "Between 20-40 km" #Very good
elif visabilityNum == "EX":
return "More than 40 km" #Excellent
else:
return "Unavalable"
def getWeatherTypeText(self, day=0, isNight=0):
weatherNum = int(self.getWeatherTypeNum(day, isNight))
if -1 < weatherNum < 31:
return "Not available"
else:
states = ["Clear night",
"Sunny day",
"Partly cloudy",
"Partly cloudy",
"Not available",
"Mist",
"Fog",
"Cloudy",
"Overcast",
"Light rain shower",
"Light rain shower",
"Drizzle",
"Light rain",
"Heavy rain shower",
"Heavy rain shower",
"Heavy rain",
"Sleet shower",
"Sleet shower",
"Sleet",
"Hail shower",
"Hail shower",
"Hail",
"Light snow shower",
"Light snow shower",
"Light snow",
"Heavy snow shower",
"Heavy snow shower",
"Heavy snow",
"Thunder shower",
"Thunder shower",
"Thunder"]
return states[weatherNum]
class Meta:
db_table = u'WeatherForcast'
| Tomcuzz/OctaHomeAutomation | OctaHomeCore/models/weather.py | Python | mit | 6,720 |
"""Test for `cspreports.urls`."""
from django.test import SimpleTestCase
from django.urls import include, re_path
import cspreports.urls
class TestCSPReportsURLs(SimpleTestCase):
"""Basic tests of CSP reports urls."""
def test_nice_report_empty(self):
self.assertTrue(len(include('cspreports.urls')) > 0)
| adamalton/django-csp-reports | cspreports/tests/test_urls.py | Python | mit | 326 |
# Copyright Iris contributors
#
# This file is part of Iris and is released under the LGPL license.
# See COPYING and COPYING.LESSER in the root of the repository for full
# licensing details.
"""
A package for handling multi-dimensional data and associated metadata.
.. note ::
The Iris documentation has further usage information, including
a :ref:`user guide <user_guide_index>` which should be the first port of
call for new users.
The functions in this module provide the main way to load and/or save
your data.
The :func:`load` function provides a simple way to explore data from
the interactive Python prompt. It will convert the source data into
:class:`Cubes <iris.cube.Cube>`, and combine those cubes into
higher-dimensional cubes where possible.
The :func:`load_cube` and :func:`load_cubes` functions are similar to
:func:`load`, but they raise an exception if the number of cubes is not
what was expected. They are more useful in scripts, where they can
provide an early sanity check on incoming data.
The :func:`load_raw` function is provided for those occasions where the
automatic combination of cubes into higher-dimensional cubes is
undesirable. However, it is intended as a tool of last resort! If you
experience a problem with the automatic combination process then please
raise an issue with the Iris developers.
To persist a cube to the file-system, use the :func:`save` function.
All the load functions share very similar arguments:
* uris:
Either a single filename/URI expressed as a string or
:class:`pathlib.PurePath`, or an iterable of filenames/URIs.
Filenames can contain `~` or `~user` abbreviations, and/or
Unix shell-style wildcards (e.g. `*` and `?`). See the
standard library function :func:`os.path.expanduser` and
module :mod:`fnmatch` for more details.
.. warning::
If supplying a URL, only OPeNDAP Data Sources are supported.
* constraints:
Either a single constraint, or an iterable of constraints.
Each constraint can be either a string, an instance of
:class:`iris.Constraint`, or an instance of
:class:`iris.AttributeConstraint`. If the constraint is a string
it will be used to match against cube.name().
.. _constraint_egs:
For example::
# Load air temperature data.
load_cube(uri, 'air_temperature')
# Load data with a specific model level number.
load_cube(uri, iris.Constraint(model_level_number=1))
# Load data with a specific STASH code.
load_cube(uri, iris.AttributeConstraint(STASH='m01s00i004'))
* callback:
A function to add metadata from the originating field and/or URI which
obeys the following rules:
1. Function signature must be: ``(cube, field, filename)``.
2. Modifies the given cube inplace, unless a new cube is
returned by the function.
3. If the cube is to be rejected the callback must raise
an :class:`iris.exceptions.IgnoreCubeException`.
For example::
def callback(cube, field, filename):
# Extract ID from filenames given as: <prefix>__<exp_id>
experiment_id = filename.split('__')[1]
experiment_coord = iris.coords.AuxCoord(
experiment_id, long_name='experiment_id')
cube.add_aux_coord(experiment_coord)
"""
import contextlib
import glob
import itertools
import os.path
import pathlib
import threading
import iris._constraints
from iris._deprecation import IrisDeprecation, warn_deprecated
import iris.config
import iris.io
try:
import iris_sample_data
except ImportError:
iris_sample_data = None
# Iris revision.
__version__ = "3.3.dev0"
# Restrict the names imported when using "from iris import *"
__all__ = [
"AttributeConstraint",
"Constraint",
"FUTURE",
"Future",
"IrisDeprecation",
"NameConstraint",
"load",
"load_cube",
"load_cubes",
"load_raw",
"sample_data_path",
"save",
"site_configuration",
]
Constraint = iris._constraints.Constraint
AttributeConstraint = iris._constraints.AttributeConstraint
NameConstraint = iris._constraints.NameConstraint
class Future(threading.local):
"""Run-time configuration controller."""
def __init__(self):
"""
A container for run-time options controls.
To adjust the values simply update the relevant attribute from
within your code. For example::
iris.FUTURE.example_future_flag = False
If Iris code is executed with multiple threads, note the values of
these options are thread-specific.
.. note::
iris.FUTURE.example_future_flag does not exist. It is provided
as an example because there are currently no flags in
iris.Future.
"""
# The flag 'example_future_flag' is provided as a future reference
# for the structure of this class.
#
# self.__dict__['example_future_flag'] = example_future_flag
pass
def __repr__(self):
# msg = ('Future(example_future_flag={})')
# return msg.format(self.example_future_flag)
msg = "Future()"
return msg.format()
# deprecated_options = {'example_future_flag': 'warning',}
deprecated_options = {}
def __setattr__(self, name, value):
if name in self.deprecated_options:
level = self.deprecated_options[name]
if level == "error" and not value:
emsg = (
"setting the 'Future' property {prop!r} has been "
"deprecated to be removed in a future release, and "
"deprecated {prop!r} behaviour has been removed. "
"Please remove code that sets this property."
)
raise AttributeError(emsg.format(prop=name))
else:
msg = (
"setting the 'Future' property {!r} is deprecated "
"and will be removed in a future release. "
"Please remove code that sets this property."
)
warn_deprecated(msg.format(name))
if name not in self.__dict__:
msg = "'Future' object has no attribute {!r}".format(name)
raise AttributeError(msg)
self.__dict__[name] = value
@contextlib.contextmanager
def context(self, **kwargs):
"""
Return a context manager which allows temporary modification of
the option values for the active thread.
On entry to the `with` statement, all keyword arguments are
applied to the Future object. On exit from the `with`
statement, the previous state is restored.
For example::
with iris.FUTURE.context(example_future_flag=False):
# ... code that expects some past behaviour
.. note::
iris.FUTURE.example_future_flag does not exist and is
provided only as an example since there are currently no
flags in Future.
"""
# Save the current context
current_state = self.__dict__.copy()
# Update the state
for name, value in kwargs.items():
setattr(self, name, value)
try:
yield
finally:
# Return the state
self.__dict__.clear()
self.__dict__.update(current_state)
#: Object containing all the Iris run-time options.
FUTURE = Future()
# Initialise the site configuration dictionary.
#: Iris site configuration dictionary.
site_configuration = {}
try:
from iris.site_config import update as _update
except ImportError:
pass
else:
_update(site_configuration)
def _generate_cubes(uris, callback, constraints):
"""Returns a generator of cubes given the URIs and a callback."""
if isinstance(uris, (str, pathlib.PurePath)):
uris = [uris]
# Group collections of uris by their iris handler
# Create list of tuples relating schemes to part names
uri_tuples = sorted(iris.io.decode_uri(uri) for uri in uris)
for scheme, groups in itertools.groupby(uri_tuples, key=lambda x: x[0]):
# Call each scheme handler with the appropriate URIs
if scheme == "file":
part_names = [x[1] for x in groups]
for cube in iris.io.load_files(part_names, callback, constraints):
yield cube
elif scheme in ["http", "https"]:
urls = [":".join(x) for x in groups]
for cube in iris.io.load_http(urls, callback):
yield cube
else:
raise ValueError("Iris cannot handle the URI scheme: %s" % scheme)
def _load_collection(uris, constraints=None, callback=None):
from iris.cube import _CubeFilterCollection
try:
cubes = _generate_cubes(uris, callback, constraints)
result = _CubeFilterCollection.from_cubes(cubes, constraints)
except EOFError as e:
raise iris.exceptions.TranslationError(
"The file appears empty or incomplete: {!r}".format(str(e))
)
return result
def load(uris, constraints=None, callback=None):
"""
Loads any number of Cubes for each constraint.
For a full description of the arguments, please see the module
documentation for :mod:`iris`.
Args:
* uris:
One or more filenames/URIs, as a string or :class:`pathlib.PurePath`.
If supplying a URL, only OPeNDAP Data Sources are supported.
Kwargs:
* constraints:
One or more constraints.
* callback:
A modifier/filter function.
Returns:
An :class:`iris.cube.CubeList`. Note that there is no inherent order
to this :class:`iris.cube.CubeList` and it should be treated as if it
were random.
"""
return _load_collection(uris, constraints, callback).merged().cubes()
def load_cube(uris, constraint=None, callback=None):
"""
Loads a single cube.
For a full description of the arguments, please see the module
documentation for :mod:`iris`.
Args:
* uris:
One or more filenames/URIs, as a string or :class:`pathlib.PurePath`.
If supplying a URL, only OPeNDAP Data Sources are supported.
Kwargs:
* constraints:
A constraint.
* callback:
A modifier/filter function.
Returns:
An :class:`iris.cube.Cube`.
"""
constraints = iris._constraints.list_of_constraints(constraint)
if len(constraints) != 1:
raise ValueError("only a single constraint is allowed")
cubes = _load_collection(uris, constraints, callback).cubes()
try:
cube = cubes.merge_cube()
except iris.exceptions.MergeError as e:
raise iris.exceptions.ConstraintMismatchError(str(e))
except ValueError:
raise iris.exceptions.ConstraintMismatchError("no cubes found")
return cube
def load_cubes(uris, constraints=None, callback=None):
"""
Loads exactly one Cube for each constraint.
For a full description of the arguments, please see the module
documentation for :mod:`iris`.
Args:
* uris:
One or more filenames/URIs, as a string or :class:`pathlib.PurePath`.
If supplying a URL, only OPeNDAP Data Sources are supported.
Kwargs:
* constraints:
One or more constraints.
* callback:
A modifier/filter function.
Returns:
An :class:`iris.cube.CubeList`. Note that there is no inherent order
to this :class:`iris.cube.CubeList` and it should be treated as if it
were random.
"""
# Merge the incoming cubes
collection = _load_collection(uris, constraints, callback).merged()
# Make sure we have exactly one merged cube per constraint
bad_pairs = [pair for pair in collection.pairs if len(pair) != 1]
if bad_pairs:
fmt = " {} -> {} cubes"
bits = [fmt.format(pair.constraint, len(pair)) for pair in bad_pairs]
msg = "\n" + "\n".join(bits)
raise iris.exceptions.ConstraintMismatchError(msg)
return collection.cubes()
def load_raw(uris, constraints=None, callback=None):
"""
Loads non-merged cubes.
This function is provided for those occasions where the automatic
combination of cubes into higher-dimensional cubes is undesirable.
However, it is intended as a tool of last resort! If you experience
a problem with the automatic combination process then please raise
an issue with the Iris developers.
For a full description of the arguments, please see the module
documentation for :mod:`iris`.
Args:
* uris:
One or more filenames/URIs, as a string or :class:`pathlib.PurePath`.
If supplying a URL, only OPeNDAP Data Sources are supported.
Kwargs:
* constraints:
One or more constraints.
* callback:
A modifier/filter function.
Returns:
An :class:`iris.cube.CubeList`.
"""
from iris.fileformats.um._fast_load import _raw_structured_loading
with _raw_structured_loading():
return _load_collection(uris, constraints, callback).cubes()
save = iris.io.save
def sample_data_path(*path_to_join):
"""
Given the sample data resource, returns the full path to the file.
.. note::
This function is only for locating files in the iris sample data
collection (installed separately from iris). It is not needed or
appropriate for general file access.
"""
target = os.path.join(*path_to_join)
if os.path.isabs(target):
raise ValueError(
"Absolute paths, such as {!r}, are not supported.\n"
"NB. This function is only for locating files in the "
"iris sample data collection. It is not needed or "
"appropriate for general file access.".format(target)
)
if iris_sample_data is not None:
target = os.path.join(iris_sample_data.path, target)
else:
raise ImportError(
"Please install the 'iris-sample-data' package to "
"access sample data."
)
if not glob.glob(target):
raise ValueError(
"Sample data file(s) at {!r} not found.\n"
"NB. This function is only for locating files in the "
"iris sample data collection. It is not needed or "
"appropriate for general file access.".format(target)
)
return target
| SciTools/iris | lib/iris/__init__.py | Python | lgpl-3.0 | 14,621 |
#!/usr/bin/env python
########################################################################
# $HeadURL$
# File : dirac-start-component
# Author : Ricardo Graciani
########################################################################
"""
Start DIRAC component using runsvctrl utility
"""
__RCSID__ = "$Id$"
#
from DIRAC.Core.Base import Script
Script.disableCS()
Script.setUsageMessage( '\n'.join( [ __doc__.split( '\n' )[1],
'Usage:',
' %s [option|cfgfile] ... [system [service|agent]]' % Script.scriptName,
'Arguments:',
' system: Name of the system for the component (default *: all)',
' service|agent: Name of the particular component (default *: all)' ] ) )
Script.parseCommandLine()
args = Script.getPositionalArgs()
if len( args ) > 2:
Script.showHelp()
exit( -1 )
system = '*'
component = '*'
if len( args ) > 0:
system = args[0]
if system != '*':
if len( args ) > 1:
component = args[1]
#
from DIRAC.Core.Utilities import InstallTools
#
InstallTools.exitOnError = True
#
result = InstallTools.runsvctrlComponent( system, component, 'u' )
if not result['OK']:
print 'ERROR:', result['Message']
exit( -1 )
InstallTools.printStartupStatus( result['Value'] )
| Sbalbp/DIRAC | Core/scripts/dirac-start-component.py | Python | gpl-3.0 | 1,384 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.