code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# (C) British Crown Copyright 2014 - 2015, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""Unit tests for :func:`iris.fileformats.rules._make_cube`."""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# Import iris.tests first so that some things can be initialised before
# importing anything else.
import iris.tests as tests
from iris.fileformats.rules import _make_cube
import iris.fileformats.rules
from iris.tests import mock
class Test(tests.IrisTest):
def test_invalid_units(self):
# Mock converter() function that returns an invalid
# units string amongst the collection of other elements.
factories = None
references = None
standard_name = None
long_name = None
units = 'wibble' # Invalid unit.
attributes = dict(source='test')
cell_methods = None
dim_coords_and_dims = None
aux_coords_and_dims = None
metadata = iris.fileformats.rules.ConversionMetadata(
factories, references, standard_name, long_name, units, attributes,
cell_methods, dim_coords_and_dims, aux_coords_and_dims)
converter = mock.Mock(return_value=metadata)
field = mock.Mock()
with mock.patch('warnings.warn') as warn:
cube, factories, references = _make_cube(field, converter)
# Check attributes dictionary is correctly populated.
expected_attributes = attributes.copy()
expected_attributes['invalid_units'] = units
self.assertEqual(cube.attributes, expected_attributes)
# Check warning was raised.
self.assertEqual(warn.call_count, 1)
warning_msg = warn.call_args[0][0]
self.assertIn('invalid units', warning_msg)
if __name__ == "__main__":
tests.main()
| mo-g/iris | lib/iris/tests/unit/fileformats/rules/test__make_cube.py | Python | gpl-3.0 | 2,483 |
import unittest
from unittest.mock import Mock
from test import support
from worklog import *
import models
class WorklogTests(unittest.TestCase):
@classmethod
def setUpClass(self):
# running the setup method only once
self.tasks = Entry.select()
self.stdout = None
self.stdin = None
self.task_name = get_task_name('a')
self.task_name_edit = get_task_name('e')
self.employee_name = get_employee_name('a')
self.employee_name_edit = get_employee_name('e')
self.task_time = get_task_time('a')
self.task_time_edit = get_task_time('e')
self.notes = get_notes('a')
self.notes_edit = get_notes('e')
def test_display_tasks(self):
"""test to see if it returns a particular
string when there are no tasks to show"""
#task is an empty list
self.tasks = []
with support.captured_stdout() as self.stdout:
display_tasks(self.tasks)
self.check_assertion('Sorry no results to display!\n')
def test_view_entries(self):
"""check that display_tasks is called with no issues
when the default parameters are applied to view_tasks"""
view_tasks = Mock()
self.assertTrue(view_tasks.return_type(display_tasks([])))
def test_type_of_search(self):
"""check that the result type is
one of the desired options"""
result = type_of_search()
self.assertIn(result, "edsrt")
def test_get_task_name(self):
"""check that return value is not blank,
when a user is adding a task, but at the
same time test that the return value can be blank
or not in an edit context"""
self.assertNotEqual(self.task_name, '')
self.assertEqual(self.task_name_edit,'')
def test_get_task_time(self):
"""check that the return type is not blank when
a new task is getting created but can be blank
when in an edit context, also check that the return
type is the same function when the user enters blank"""
self.assertNotEqual(self.task_time, '')
self.assertEqual(self.task_time_edit,'')
def test_get_notes(self):
"""check that the return type is not blank when
a new task is getting created but can be blank
when in an edit context"""
self.assertNotEqual(self.notes, '')
self.assertEqual(self.notes_edit,'')
def test_get_date_range(self):
"""check that the list returned is not empty"""
result = get_date_range()
self.assertNotEqual(len(result), 0)
def test_get_employee_name_edit(self):
"""check that the return type is not blank when
a new task is getting created but can be blank
when in an edit context"""
self.assertNotEqual(self.employee_name, '')
self.assertEqual(self.employee_name_edit,'')
def test_not_valid_time(self):
"""check the function returns true
when the time format is wrong"""
user_input = not_valid_time('60:01')
self.assertTrue(user_input)
def test_valid_date(self):
"""Check to see if false is returned when
the date is in the wrong format"""
result = valid_date('2017-03-02')
self.assertFalse(result)
def test_get_date_options(self):
"""check that the dictionary returned is not
empty"""
dict = get_date_options()
length = len(dict)
self.assertNotEqual(length, 0)
def test_create_task(self):
"""test that a task gets created"""
with support.captured_stdout() as self.stdout:
create_task(self.employee_name,self.task_name,
self.task_time, self.notes)
self.check_assertion("Task created successfully!\n")
def test_edit_task(self):
"""test that a task gets updated"""
task = Entry.get(id='7')
with support.captured_stdout() as self.stdout:
edit_task(task,self.employee_name_edit,self.task_name_edit,
self.task_time_edit, self.notes_edit)
self.check_assertion("Task updated\n")
def test_delete_task(self):
"""test to see if a task gets deleted successfully"""
task = Entry.get(Entry.id=='6')
delete_task(task)
user_input = self.get_input()
if user_input =='y':
with support.captured_stdout() as self.stdout:
delete_task(task)
self.check_assertion("Task deleted!\n")
def check_assertion(self, msg):
self.assertEqual(self.stdout.getvalue(), msg)
def test_main(self):
print("enter q when prompted!")
result = main()
self.assertEqual(result, None)
def get_input(self):
'''this method captures input from the console'''
with support.captured_stdin() as self.stdin:
return self.stdin.readline()
if __name__ == '__main__':
print("*"*50)
print("For each input request leave the second blank, for\n"
"options, enter a value in the options")
print("*"*50)
unittest.main()
| squadran2003/worklog-with-database | tests.py | Python | mit | 4,551 |
# coding: utf-8
from __future__ import absolute_import
from datetime import date, datetime # noqa: F401
from typing import List, Dict # noqa: F401
from data_explorer.models.base_model_ import Model
from data_explorer.models.facet import Facet # noqa: F401,E501
from data_explorer import util
class FacetsResponse(Model):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
def __init__(self,
facets=None,
count=None,
invalid_filter_facets=None,
invalid_extra_facets=None,
sql_query=None): # noqa: E501
"""FacetsResponse - a model defined in Swagger
:param facets: The facets of this FacetsResponse. # noqa: E501
:type facets: List[Facet]
:param count: The count of this FacetsResponse. # noqa: E501
:type count: int
:param invalid_filter_facets: The invalid_filter_facets of this FacetsResponse. # noqa: E501
:type invalid_filter_facets: List[str]
:param invalid_extra_facets: The invalid_extra_facets of this FacetsResponse. # noqa: E501
:type invalid_extra_facets: List[str]
:param sql_query: The sql_query of this FacetsResponse. # noqa: E501
:type sql_query: str
"""
self.swagger_types = {
'facets': List[Facet],
'count': int,
'invalid_filter_facets': List[str],
'invalid_extra_facets': List[str],
'sql_query': str
}
self.attribute_map = {
'facets': 'facets',
'count': 'count',
'invalid_filter_facets': 'invalid_filter_facets',
'invalid_extra_facets': 'invalid_extra_facets',
'sql_query': 'sql_query'
}
self._facets = facets
self._count = count
self._invalid_filter_facets = invalid_filter_facets
self._invalid_extra_facets = invalid_extra_facets
self._sql_query = sql_query
@classmethod
def from_dict(cls, dikt):
"""Returns the dict as a model
:param dikt: A dict.
:type: dict
:return: The FacetsResponse of this FacetsResponse. # noqa: E501
:rtype: FacetsResponse
"""
return util.deserialize_model(dikt, cls)
@property
def facets(self):
"""Gets the facets of this FacetsResponse.
:return: The facets of this FacetsResponse.
:rtype: List[Facet]
"""
return self._facets
@facets.setter
def facets(self, facets):
"""Sets the facets of this FacetsResponse.
:param facets: The facets of this FacetsResponse.
:type facets: List[Facet]
"""
self._facets = facets
@property
def count(self):
"""Gets the count of this FacetsResponse.
Number of entities represented by current facet selection. For example, this could be 40, representing 40 people. # noqa: E501
:return: The count of this FacetsResponse.
:rtype: int
"""
return self._count
@count.setter
def count(self, count):
"""Sets the count of this FacetsResponse.
Number of entities represented by current facet selection. For example, this could be 40, representing 40 people. # noqa: E501
:param count: The count of this FacetsResponse.
:type count: int
"""
self._count = count
@property
def invalid_filter_facets(self):
"""Gets the invalid_filter_facets of this FacetsResponse.
Facets that were passed in filter param that don't exist in Elasticsearch index. Example: - Data Explorer url contains filter=amppd.2019_v1_0101.demographics.sex=female which is valid. User saves a cohort with this filter - A new version of AMP PD is released. (Data explorer url remains the same.) The dataset 2019_v1_0101 is replaced by dataset 2019_v2_0401. - User won't be able to open saved cohort in DE; amppd.2019_v1_0101.demographics.sex is no longer is Elasticsearch index. invalid_filter_facets will contain amppd.2019_v1_0101.demographics.sex # noqa: E501
:return: The invalid_filter_facets of this FacetsResponse.
:rtype: List[str]
"""
return self._invalid_filter_facets
@invalid_filter_facets.setter
def invalid_filter_facets(self, invalid_filter_facets):
"""Sets the invalid_filter_facets of this FacetsResponse.
Facets that were passed in filter param that don't exist in Elasticsearch index. Example: - Data Explorer url contains filter=amppd.2019_v1_0101.demographics.sex=female which is valid. User saves a cohort with this filter - A new version of AMP PD is released. (Data explorer url remains the same.) The dataset 2019_v1_0101 is replaced by dataset 2019_v2_0401. - User won't be able to open saved cohort in DE; amppd.2019_v1_0101.demographics.sex is no longer is Elasticsearch index. invalid_filter_facets will contain amppd.2019_v1_0101.demographics.sex # noqa: E501
:param invalid_filter_facets: The invalid_filter_facets of this FacetsResponse.
:type invalid_filter_facets: List[str]
"""
self._invalid_filter_facets = invalid_filter_facets
@property
def invalid_extra_facets(self):
"""Gets the invalid_extra_facets of this FacetsResponse.
Facets that were passed in extraFacets param that don't exist in Elasticsearch index. Example: - Data Explorer url contains extraFacets=amppd.2019_v1_0101.demographics.sex which is valid. User saves a cohort with this extra facet - A new version of AMP PD is released. (Data explorer url remains the same.) The dataset 2019_v1_0101 is replaced by dataset 2019_v2_0401. - User won't be able to open saved cohort in DE; amppd.2019_v1_0101.demographics.sex is no longer is Elasticsearch index. invalid_extra_facets will contain amppd.2019_v1_0101.demographics.sex # noqa: E501
:return: The invalid_extra_facets of this FacetsResponse.
:rtype: List[str]
"""
return self._invalid_extra_facets
@invalid_extra_facets.setter
def invalid_extra_facets(self, invalid_extra_facets):
"""Sets the invalid_extra_facets of this FacetsResponse.
Facets that were passed in extraFacets param that don't exist in Elasticsearch index. Example: - Data Explorer url contains extraFacets=amppd.2019_v1_0101.demographics.sex which is valid. User saves a cohort with this extra facet - A new version of AMP PD is released. (Data explorer url remains the same.) The dataset 2019_v1_0101 is replaced by dataset 2019_v2_0401. - User won't be able to open saved cohort in DE; amppd.2019_v1_0101.demographics.sex is no longer is Elasticsearch index. invalid_extra_facets will contain amppd.2019_v1_0101.demographics.sex # noqa: E501
:param invalid_extra_facets: The invalid_extra_facets of this FacetsResponse.
:type invalid_extra_facets: List[str]
"""
self._invalid_extra_facets = invalid_extra_facets
@property
def sql_query(self):
"""Gets the sql_query of this FacetsResponse.
SQL query that can be used in BigQuery to get the cohort (list of participants) of the current filter. # noqa: E501
:return: The sql_query of this FacetsResponse.
:rtype: str
"""
return self._sql_query
@sql_query.setter
def sql_query(self, sql_query):
"""Sets the sql_query of this FacetsResponse.
SQL query that can be used in BigQuery to get the cohort (list of participants) of the current filter. # noqa: E501
:param sql_query: The sql_query of this FacetsResponse.
:type sql_query: str
"""
self._sql_query = sql_query
| DataBiosphere/data-explorer | api/data_explorer/models/facets_response.py | Python | bsd-3-clause | 7,851 |
import os
import numpy as np
import matplotlib.pyplot as plt
import matplotlib.ticker as ticker
from astropy.io import fits
from astropy.time import Time
'''
This program makes awesome plots of APOGEE CCFs compared to our BFs.
The idea is for you to run it once for each target.
'''
#plt.rc('font',**{'family':'sans-serif','sans-serif':['Helvetica']})
#plt.rc('text', usetex=True)
#KIC = '5285607'; windowrows = 1; windowcols = 6
#KIC = '6864859'; windowrows = 4; windowcols = 7
#KIC = '6778289'; windowrows = 4; windowcols = 7
#KIC = '6449358'; windowrows = 4; windowcols = 7
#KIC = '4285087'; windowrows = 1; windowcols = 6
#KIC = '6781535'; windowrows = 4; windowcols = 7
KIC = '6131659'; windowrows = 4; windowcols = 7
dir = os.path.join('data', KIC)
# The name of the apStar file for a single target
#ccffile = 'apStar-r8-2M19390532+4027346.fits' #5285607
#ccffile = 'apStar-r6-2M19292405+4223363.fits' #6864859 #r8 is missing one visit
#ccffile = 'apStar-r8-2M19282456+4215080.fits' #6778289
#ccffile = 'apStar-r8-2M19353513+4149543.fits' #6449358
#ccffile = 'apStar-r8-2M19463571+3919069.fits' #4285087
#ccffile = 'apStar-r8-2M19321788+4216489.fits' #6781535
ccffile = 'apStar-r8-2M19370697+4126128.fits' #6131659
# The name of the file with relevant BF info for the same target
#bffile = '5285607BFOutAPstar.txt' # in the same order as APOGEE even though it's not chronologic
#bffile = '6864859BFOutALL.txt'
#bffile = '6778289BFOutJCAllVisits.txt'
#bffile = '6449358BFOutALL.txt'
#bffile = '4285087BFOut.txt'
#bffile = '6781535BFOutALL.txt'
bffile = '6131659BFOutALL.txt'
# The name of the bjdinfile used with BF_python
# (heliocentric/barycentric velocities in col=(2,); note the top row is for the template)
#BCVfile = '5285607bjdinfile.txt'
#BCVfile = '6864859bjdinfileALL.txt'
#BCVfile = '6778289bjdinfiles.txt'
#BCVfile = '6449358bjdinfile.txt'
#BCVfile = '4285087bjdinfile.txt' ## apogee visits aren't chronological WHY
#BCVfile = '6781535bjdinfileALL.txt'
BCVfile = '6131659bjdinfile.txt'
#ccfinfile = os.path.join(dir, ccffile)
ccfinfile = os.path.join('data', ccffile)
bfinfile = os.path.join(dir, bffile)
bjdinfile = os.path.join(dir, BCVfile)
# Read in relevant CCF info from apStar file
hdu = fits.open(ccfinfile)
# The CCF information lives in the 9th HDU, because of course it does
hdu9 = hdu[9].data
CCFvalues = hdu9['CCF'][0][2:]
CCFerrors = hdu9['CCFERR'][0][2:]
CCFxaxis = hdu9['CCFLAG'][0] # pixels, needs to be turned into RVs
CCF_delta = hdu9['CCFDW'][0] # Delta (log_10 lambda)
#print(hdu9['VHELIO'][0] - hdu9['VREL'][0])
# (the data model website says this log lambda spacing per lag step of
# 6.d-6 corresponds to 4.145 km/s)
# Get the systemic RV of the star according to APOGEE
apSystemic = 82 ## TODO: FIND ACTUAL SYSTEMIC RV PARAMETER !!!
# Get the barycentric velocities of each visit according to APOGEE
# This is called VHELIO for super obvious reasons, Jen Sobeck private communication, for reals
apBCVs = hdu9['VHELIO'][0]
CCF_rvaxis = [CCFxaxis * 4.145 + bcv for bcv in apBCVs]
# Get the timestamp for each CCF visit from the 0th HDU
hdu0 = hdu[0].header
ccftimes = []
for idx in range(1, len(CCFvalues)+1):
headercard = 'HJD' + str(idx)
ccftimes.append(hdu0[headercard] + 2400000.)
ccftimesAstropy = []
for ccftime in ccftimes:
ccftimesAstropy.append(Time(ccftime, scale='utc', format='jd'))
# Set up the main figure
fig = plt.figure(1, figsize=(15,10))
ax = fig.add_subplot(111)
ax.tick_params(top='off', bottom='off', left='off', right='off')
ax.axes.xaxis.set_ticklabels([])
ax.axes.yaxis.set_ticklabels([])
ax.set_xlabel('Radial Velocity (km s$^{-1}$)', labelpad=20, size='x-large')
ax.set_ylabel('Arbitrary CCF or BF amplitude', labelpad=20, size='x-large')
ax.spines['top'].set_color('none')
ax.spines['bottom'].set_color('none')
ax.spines['left'].set_color('none')
ax.spines['right'].set_color('none')
plt.title('KIC ' + KIC, size='x-large')
#windowrows = 4
#windowcols = 7
#fig.text(0.5, 0.04, 'Radial Velocity (km s$^{-1}$)', ha='center', va='center', size='x-large')
#fig.text(0.07, 0.5, 'Arbitrary CCF or BF amplitude', ha='center', va='center', size='x-large', rotation='vertical')
axlims = [-140, 140, -0.06, 0.56]
ccfyoffset = -0.12
bfyamp = 9 # arbitrary factor to stretch BF values for clarity
# used 7 for 686 and 9 for 613
# Loop over and plot CCF data
for idx, CCFdata in enumerate(CCFvalues):
ax0 = fig.add_subplot(windowrows, windowcols, idx+1)
plt.axis(axlims)
plt.plot(CCF_rvaxis[idx], CCFvalues[idx] + ccfyoffset)
plt.text(23, 0.5, ccftimesAstropy[idx].iso[0:10], size=10)
## TURN ON FOR TIMESTAMP TROUBLESHOOTING
#plt.text(25, 0.5, '{0:.3f}'.format(ccftimes[idx] - 2450000.), size=10, color='C0')
#plt.text(25, 0.3, idx, size=10, color='C0')
plt.subplots_adjust(wspace=0, hspace=0)
# Read in relevant BF info from the BF infile
bfdata = np.loadtxt(bfinfile, comments='#', usecols=(0,1,2), unpack=True)
BFrvaxis = bfdata[0]
BFvalues = bfdata[1]
BFerrors = bfdata[2]
# Get the timestamp for each BF
with open(bfinfile) as bfinfo:
bftimes = [float(line[13:]) for line in bfinfo if 'timestamp' in line]
# Save the indices that separate each BF's visit in the input file
visitidx = 0
BFindices = []
for idx, (rv, value, error) in enumerate(zip(BFrvaxis, BFvalues, BFerrors)):
if np.abs(BFrvaxis[idx-1] - rv) > 100:
visitidx = visitidx + 1
BFindices.append(idx)
# Read in barycentric velocity correction info from the BJD infile
BCVdata = np.loadtxt(bjdinfile, comments='#', usecols=(2,), unpack=True)
BCVdata = BCVdata[1:]
# Loop over and plot BF data
for idx in range(0, len(bftimes)):
ax1 = fig.add_subplot(windowrows, windowcols, idx+1)
plt.axis(axlims)
ax1.yaxis.set_major_locator(ticker.MultipleLocator(0.2))
for tick in ax1.xaxis.get_major_ticks():
tick.label.set_fontsize(12)
for tick in ax1.yaxis.get_major_ticks():
tick.label.set_fontsize(12)
if (idx < 20): # set to 18 for 686 and 20 for 613
ax1.set_xticklabels(())
if (idx!=0 and idx!=7 and idx!=14 and idx!=21):
ax1.set_yticklabels(())
## TURN ON FOR TIMESTAMP TROUBLESHOOTING
#plt.text(25, 0.4, '{0:.3f}'.format(bftimes[idx] - 2450000.), size=10, color='C1')
#plt.text(25, 0.2, idx, size=10, color='C1')
try:
plt.plot(BFrvaxis[BFindices[idx]:BFindices[idx+1]] - apSystemic + BCVdata[idx],
bfyamp*BFvalues[BFindices[idx]:BFindices[idx+1]])
except: # handle the final case where there is no idx+1
try:
plt.plot(BFrvaxis[BFindices[idx]::] - apSystemic + BCVdata[idx],
bfyamp*BFvalues[BFindices[idx]::])
except:
print('You\'re missing a BF where an APOGEE CCF exists')
continue
plt.show()
| savvytruffle/cauldron | rvs/BF_CCF_plotter.py | Python | mit | 6,791 |
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Module to compute Trotter errors in the plane-wave dual basis."""
from __future__ import absolute_import
from future.utils import iteritems, itervalues
import numpy
import openfermion.hamiltonians
from openfermion.ops import FermionOperator
from openfermion.utils import count_qubits, normal_ordered
from openfermion.utils._commutators import (
double_commutator,
trivially_double_commutes_dual_basis,
trivially_double_commutes_dual_basis_using_term_info)
def low_depth_second_order_trotter_error_operator(
terms, indices=None, is_hopping_operator=None,
jellium_only=False, verbose=False):
"""Determine the difference between the exact generator of unitary
evolution and the approximate generator given by the second-order
Trotter-Suzuki expansion.
Args:
terms: a list of FermionOperators in the Hamiltonian in the
order in which they will be simulated.
indices: a set of indices the terms act on in the same order as terms.
is_hopping_operator: a list of whether each term is a hopping operator.
jellium_only: Whether the terms are from the jellium Hamiltonian only,
rather than the full dual basis Hamiltonian (i.e. whether
c_i = c for all number operators i^ i, or whether they
depend on i as is possible in the general case).
verbose: Whether to print percentage progress.
Returns:
The difference between the true and effective generators of time
evolution for a single Trotter step.
Notes: follows Equation 9 of Poulin et al.'s work in "The Trotter Step
Size Required for Accurate Quantum Simulation of Quantum Chemistry",
applied to the "stagger"-based Trotter step for detailed in
Kivlichan et al., "Quantum Simulation of Electronic Structure with
Linear Depth and Connectivity", arxiv:1711.04789.
"""
more_info = bool(indices)
n_terms = len(terms)
if verbose:
import time
start = time.time()
error_operator = FermionOperator.zero()
for beta in range(n_terms):
if verbose and beta % (n_terms // 30) == 0:
print('%4.3f percent done in' % (
(float(beta) / n_terms) ** 3 * 100), time.time() - start)
for alpha in range(beta + 1):
for alpha_prime in range(beta):
# If we have pre-computed info on indices, use it to determine
# trivial double commutation.
if more_info:
if (not
trivially_double_commutes_dual_basis_using_term_info(
indices[alpha], indices[beta],
indices[alpha_prime], is_hopping_operator[alpha],
is_hopping_operator[beta],
is_hopping_operator[alpha_prime], jellium_only)):
# Determine the result of the double commutator.
double_com = double_commutator(
terms[alpha], terms[beta], terms[alpha_prime],
indices[beta], indices[alpha_prime],
is_hopping_operator[beta],
is_hopping_operator[alpha_prime])
if alpha == beta:
double_com /= 2.0
error_operator += double_com
# If we don't have more info, check for trivial double
# commutation using the terms directly.
elif not trivially_double_commutes_dual_basis(
terms[alpha], terms[beta], terms[alpha_prime]):
double_com = double_commutator(
terms[alpha], terms[beta], terms[alpha_prime])
if alpha == beta:
double_com /= 2.0
error_operator += double_com
error_operator /= 12.0
return error_operator
def low_depth_second_order_trotter_error_bound(
terms, indices=None, is_hopping_operator=None,
jellium_only=False, verbose=False):
"""Numerically upper bound the error in the ground state energy
for the second-order Trotter-Suzuki expansion.
Args:
terms: a list of single-term FermionOperators in the Hamiltonian
to be simulated.
indices: a set of indices the terms act on in the same order as terms.
is_hopping_operator: a list of whether each term is a hopping operator.
jellium_only: Whether the terms are from the jellium Hamiltonian only,
rather than the full dual basis Hamiltonian (i.e. whether
c_i = c for all number operators i^ i, or whether they
depend on i as is possible in the general case).
verbose: Whether to print percentage progress.
Returns:
A float upper bound on norm of error in the ground state energy.
Notes:
Follows Equation 9 of Poulin et al.'s work in "The Trotter Step
Size Required for Accurate Quantum Simulation of Quantum
Chemistry" to calculate the error operator, for the "stagger"-based
Trotter step for detailed in Kivlichan et al., "Quantum Simulation
of Electronic Structure with Linear Depth and Connectivity",
arxiv:1711.04789.
"""
# Return the 1-norm of the error operator (upper bound on error).
return numpy.sum(numpy.absolute(list(
low_depth_second_order_trotter_error_operator(
terms, indices, is_hopping_operator,
jellium_only, verbose).terms.values())))
def simulation_ordered_grouped_low_depth_terms_with_info(
hamiltonian, input_ordering=None, external_potential_at_end=False):
"""Give terms from the dual basis Hamiltonian in simulated order.
Uses the simulation ordering, grouping terms into hopping
(i^ j + j^ i) and number (i^j^ i j + c_i i^ i + c_j j^ j) operators.
Pre-computes term information (indices each operator acts on, as
well as whether each operator is a hopping operator.
Args:
hamiltonian (FermionOperator): The Hamiltonian.
input_ordering (list): The initial Jordan-Wigner canonical order.
If no input ordering is specified, defaults to
[0..n_qubits] where n_qubits is the number of
qubits in the Hamiltonian.
external_potential_at_end (bool): Whether to include the rotations from
the external potential at the end of the Trotter step, or
intersperse them throughout it.
Returns:
A 3-tuple of terms from the Hamiltonian in order of simulation,
the indices they act on, and whether they are hopping operators
(both also in the same order).
Notes:
Follows the "stagger"-based simulation order discussed in Kivlichan
et al., "Quantum Simulation of Electronic Structure with Linear
Depth and Connectivity", arxiv:1711.04789; as such, the only
permitted types of terms are hopping (i^ j + j^ i) and potential
terms which are products of at most two number operators.
"""
n_qubits = count_qubits(hamiltonian)
hamiltonian = normal_ordered(hamiltonian)
ordered_terms = []
ordered_indices = []
ordered_is_hopping_operator = []
# If no input mode ordering is specified, default to range(n_qubits).
try:
input_ordering = list(input_ordering)
except TypeError:
input_ordering = list(range(n_qubits))
# Half a second-order Trotter step reverses the input ordering: this tells
# us how much we need to include in the ordered list of terms.
final_ordering = list(reversed(input_ordering))
# Follow odd-even transposition sort. In alternating steps, swap each even
# qubits with the odd qubit to its right, and in the next step swap each
# the odd qubits with the even qubit to its right. Do this until the input
# ordering has been reversed.
parity = 0
while input_ordering != final_ordering:
results = stagger_with_info(
hamiltonian, input_ordering, parity,
external_potential_at_end)
terms_in_layer, indices_in_layer, is_hopping_operator_in_layer = (
results)
ordered_terms.extend(terms_in_layer)
ordered_indices.extend(indices_in_layer)
ordered_is_hopping_operator.extend(is_hopping_operator_in_layer)
# Alternate even and odd steps of the reversal procedure.
parity = 1 - parity
# If all the rotations from the external potential are in the final layer,
# i.e. we don't intersperse them throughout the Trotter step.
if external_potential_at_end:
terms_in_final_layer = []
indices_in_final_layer = []
is_hopping_operator_in_final_layer = []
for qubit in range(n_qubits):
coeff = hamiltonian.terms.get(((qubit, 1), (qubit, 0)), 0.0)
if coeff:
terms_in_final_layer.append(
FermionOperator(((qubit, 1), (qubit, 0)), coeff))
indices_in_final_layer.append(set((qubit,)))
is_hopping_operator_in_final_layer.append(False)
ordered_terms.extend(terms_in_final_layer)
ordered_indices.extend(indices_in_final_layer)
ordered_is_hopping_operator.extend(is_hopping_operator_in_final_layer)
return (ordered_terms, ordered_indices, ordered_is_hopping_operator)
def stagger_with_info(hamiltonian, input_ordering, parity,
external_potential_at_end=False):
"""Give terms simulated in a single stagger of a Trotter step.
Groups terms into hopping (i^ j + j^ i) and number
(i^j^ i j + c_i i^ i + c_j j^ j) operators.
Pre-computes term information (indices each operator acts on, as
well as whether each operator is a hopping operator).
Args:
hamiltonian (FermionOperator): The Hamiltonian.
input_ordering (list): The initial Jordan-Wigner canonical order.
parity (boolean): Whether to determine the terms from the next even
(False = 0) or odd (True = 1) stagger.
external_potential_at_end (bool): Whether to include the rotations from
the external potential at the end of the Trotter step, or
intersperse them throughout it.
Returns:
A 3-tuple of terms from the Hamiltonian that are simulated in the
stagger, the indices they act on, and whether they are hopping
operators (all in the same order).
Notes:
The "staggers" used here are the left (parity=False) and right
(parity=True) staggers detailed in Kivlichan et al., "Quantum
Simulation of Electronic Structure with Linear Depth and
Connectivity", arxiv:1711.04789. As such, the Hamiltonian must be
in the form discussed in that paper. This constrains it to have
only hopping terms (i^ j + j^ i) and potential terms which are
products of at most two number operators (n_i or n_i n_j).
"""
terms_in_layer = []
indices_in_layer = []
is_hopping_operator_in_layer = []
n_qubits = count_qubits(hamiltonian)
# A single round of odd-even transposition sort.
for i in range(parity, n_qubits - 1, 2):
# Always keep the max on the left to avoid having to normal order.
left = max(input_ordering[i], input_ordering[i + 1])
right = min(input_ordering[i], input_ordering[i + 1])
# Calculate the hopping operators in the Hamiltonian.
left_hopping_operator = FermionOperator(
((left, 1), (right, 0)), hamiltonian.terms.get(
((left, 1), (right, 0)), 0.0))
right_hopping_operator = FermionOperator(
((right, 1), (left, 0)), hamiltonian.terms.get(
((right, 1), (left, 0)), 0.0))
# Calculate the two-number operator l^ r^ l r in the Hamiltonian.
two_number_operator = FermionOperator(
((left, 1), (right, 1), (left, 0), (right, 0)),
hamiltonian.terms.get(
((left, 1), (right, 1), (left, 0), (right, 0)), 0.0))
if not external_potential_at_end:
# Calculate the left number operator, left^ left.
left_number_operator = FermionOperator(
((left, 1), (left, 0)), hamiltonian.terms.get(
((left, 1), (left, 0)), 0.0))
# Calculate the right number operator, right^ right.
right_number_operator = FermionOperator(
((right, 1), (right, 0)), hamiltonian.terms.get(
((right, 1), (right, 0)), 0.0))
# Divide single-number terms by n_qubits-1 to avoid over-accounting
# for the interspersed rotations. Each qubit is swapped n_qubits-1
# times total.
left_number_operator /= (n_qubits - 1)
right_number_operator /= (n_qubits - 1)
else:
left_number_operator = FermionOperator.zero()
right_number_operator = FermionOperator.zero()
# If the overall hopping operator isn't close to zero, append it.
# Include the indices it acts on and that it's a hopping operator.
if not (left_hopping_operator +
right_hopping_operator) == FermionOperator.zero():
terms_in_layer.append(left_hopping_operator +
right_hopping_operator)
indices_in_layer.append(set((left, right)))
is_hopping_operator_in_layer.append(True)
# If the overall number operator isn't close to zero, append it.
# Include the indices it acts on and that it's a number operator.
if not (two_number_operator + left_number_operator +
right_number_operator) == FermionOperator.zero():
terms_in_layer.append(two_number_operator +
left_number_operator +
right_number_operator)
terms_in_layer[-1].compress()
indices_in_layer.append(set((left, right)))
is_hopping_operator_in_layer.append(False)
# Modify the current Jordan-Wigner canonical ordering in-place.
input_ordering[i], input_ordering[i + 1] = (input_ordering[i + 1],
input_ordering[i])
return terms_in_layer, indices_in_layer, is_hopping_operator_in_layer
def ordered_low_depth_terms_no_info(hamiltonian):
"""Give terms from Hamiltonian in dictionary output order.
Args:
hamiltonian (FermionOperator): The Hamiltonian.
Returns:
A list of terms from the Hamiltonian in simulated order.
Notes:
Assumes the Hamiltonian is in the form discussed in Kivlichan
et al., "Quantum Simulation of Electronic Structure with Linear
Depth and Connectivity", arxiv:1711.04789. This constrains the
Hamiltonian to have only hopping terms (i^ j + j^ i) and potential
terms which are products of at most two number operators (n_i or
n_i n_j).
"""
n_qubits = count_qubits(hamiltonian)
hamiltonian = normal_ordered(hamiltonian)
terms = []
for operators, coefficient in iteritems(hamiltonian.terms):
terms += [FermionOperator(operators, coefficient)]
return terms
| jarrodmcc/OpenFermion | src/openfermion/utils/_low_depth_trotter_error.py | Python | apache-2.0 | 16,047 |
# -*- coding: utf-8 -*-
import datetime
from south.db import db
from south.v2 import SchemaMigration
from django.db import models
class Migration(SchemaMigration):
def forwards(self, orm):
# Changing field 'Monster.immunities'
db.alter_column('srd20_monster', 'immunities', self.gf('django.db.models.fields.CharField')(max_length=1024))
def backwards(self, orm):
# Changing field 'Monster.immunities'
db.alter_column('srd20_monster', 'immunities', self.gf('django.db.models.fields.CharField')(max_length=512))
models = {
'srd20.characterclass': {
'Meta': {'ordering': "('name',)", 'object_name': 'CharacterClass', 'db_table': "'class'"},
'alignment': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'class_skills': ('django.db.models.fields.TextField', [], {}),
'epic_feat_base_level': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'epic_feat_interval': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'epic_feat_list': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'epic_full_text': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'full_text': ('django.db.models.fields.TextField', [], {}),
'hit_die': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'proficiencies': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'req_base_attack_bonus': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'req_epic_feat': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_feat': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_languages': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'req_psionics': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_race': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'req_skill': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_special': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'req_spells': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'req_weapon_proficiency': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'skill_points': ('django.db.models.fields.CharField', [], {'max_length': '1'}),
'skill_points_ability': ('django.db.models.fields.CharField', [], {'max_length': '3'}),
'spell_list_1': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spell_list_2': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_3': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_4': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_list_5': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'spell_stat': ('django.db.models.fields.CharField', [], {'max_length': '4', 'blank': 'True'}),
'spell_type': ('django.db.models.fields.CharField', [], {'max_length': '16'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'srd20.feat': {
'Meta': {'ordering': "('name',)", 'object_name': 'Feat', 'db_table': "'feat'"},
'altname': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'benefit': ('django.db.models.fields.TextField', [], {}),
'choice': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'multiple': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'normal': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'prerequisite': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'special': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'stack': ('django.db.models.fields.BooleanField', [], {'default': 'False'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'})
},
'srd20.monster': {
'Meta': {'ordering': "['name']", 'object_name': 'Monster'},
'abilities': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'alignment': ('django.db.models.fields.CharField', [], {'max_length': '4'}),
'altname': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'armor_class': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'aura': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'base_attack_bonus': ('django.db.models.fields.IntegerField', [], {}),
'charisma': ('django.db.models.fields.IntegerField', [], {}),
'class_level': ('django.db.models.fields.CharField', [], {'max_length': '16', 'blank': 'True'}),
'combat_maneuver_bonus': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'combat_maneuver_defense': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'constitution': ('django.db.models.fields.IntegerField', [], {}),
'cr': ('django.db.models.fields.IntegerField', [], {}),
'damage_reduction_amount': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'damage_reduction_condition': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'defensive_abilities': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'dexterity': ('django.db.models.fields.IntegerField', [], {}),
'environment': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'feats': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'flavor_text': ('django.db.models.fields.TextField', [], {}),
'fortitude_save': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'gear': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'hit_points': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'immunities': ('django.db.models.fields.CharField', [], {'max_length': '1024', 'blank': 'True'}),
'initiative': ('django.db.models.fields.IntegerField', [], {}),
'intelligence': ('django.db.models.fields.IntegerField', [], {}),
'languages': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'melee': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'opposition_schools': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'organization': ('django.db.models.fields.TextField', [], {}),
'other_type': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'racial_modifiers': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'ranged': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'reach': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'reflex_save': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'resistance': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'senses': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'size': ('django.db.models.fields.IntegerField', [], {}),
'skills': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'sorcerer_spells_known': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'space': ('django.db.models.fields.DecimalField', [], {'null': 'True', 'max_digits': '6', 'decimal_places': '2'}),
'special_attacks': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'special_qualities': ('django.db.models.fields.CharField', [], {'max_length': '512', 'blank': 'True'}),
'speed': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'spell_like_abilities': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spell_resistance': ('django.db.models.fields.PositiveIntegerField', [], {'default': '0'}),
'spells_known': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'spells_prepared': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'strength': ('django.db.models.fields.IntegerField', [], {}),
'subtypes': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'treasure': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'type': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'weaknesses': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'will_save': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'wisdom': ('django.db.models.fields.IntegerField', [], {}),
'xp': ('django.db.models.fields.IntegerField', [], {})
},
'srd20.monsterability': {
'Meta': {'object_name': 'MonsterAbility'},
'description': ('django.db.models.fields.TextField', [], {}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'kind': ('django.db.models.fields.CharField', [], {'max_length': '2'}),
'monster': ('django.db.models.fields.related.ForeignKey', [], {'to': "orm['srd20.Monster']"}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '128'})
},
'srd20.spell': {
'Meta': {'ordering': "('name',)", 'object_name': 'Spell', 'db_table': "'spell'"},
'altname': ('django.db.models.fields.SlugField', [], {'max_length': '64'}),
'arcane_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'arcane_material_components': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'area': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'casting_time': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'cleric_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'components': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'description': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'descriptor': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'druid_focus': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'duration': ('django.db.models.fields.CharField', [], {'max_length': '128'}),
'effect': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'focus': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'id': ('django.db.models.fields.AutoField', [], {'primary_key': 'True'}),
'level': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'material_components': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'name': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'range': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'reference': ('django.db.models.fields.CharField', [], {'max_length': '30'}),
'saving_throw': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'school': ('django.db.models.fields.CharField', [], {'max_length': '32'}),
'short_description': ('django.db.models.fields.CharField', [], {'max_length': '256'}),
'spell_resistance': ('django.db.models.fields.CharField', [], {'max_length': '64'}),
'spellcraft_dc': ('django.db.models.fields.CharField', [], {'max_length': '64', 'blank': 'True'}),
'subschool': ('django.db.models.fields.CharField', [], {'max_length': '128', 'blank': 'True'}),
'target': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'to_develop': ('django.db.models.fields.TextField', [], {'blank': 'True'}),
'verbal_components': ('django.db.models.fields.CharField', [], {'max_length': '256', 'blank': 'True'}),
'xp_cost': ('django.db.models.fields.TextField', [], {'blank': 'True'})
}
}
complete_apps = ['srd20'] | machinalis/django-srd20 | srd20/migrations/0028_auto__chg_field_monster_immunities.py | Python | bsd-3-clause | 14,391 |
import socket
import struct
#create and connect to the socket
def getSocket(host, port):
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect((host, int(port)))
return s
def recvMessage(s):
l = s.recv(4)
L = struct.unpack('<I',l)[0]
#print L
data = ''
while (len(data)<L):
data += s.recv(L-len(data))
return data
def sendMessage(s,mess):
l = len(mess)
byteLength = struct.pack('<I', l)
s.sendall(byteLength)
s.sendall(mess)
def closeSocket(s):
s.close()
| idkwim/ictf-framework | services/poipoi/test/service.py | Python | gpl-2.0 | 537 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import migrations, models
import django.contrib.postgres.fields
class Migration(migrations.Migration):
dependencies = [
('api', '0004_remove_client_war_participated'),
]
operations = [
migrations.AddField(
model_name='client',
name='war_participated',
field=django.contrib.postgres.fields.ArrayField(default=[], base_field=models.IntegerField(), size=None),
),
]
| dborstelmann/Penguins-GH6 | api/migrations/0005_client_war_participated.py | Python | mit | 523 |
#!/usr/bin/env python3
#
# Copyright (c) 2019 Roberto Riggio
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
"""Exposes a RESTful interface ."""
import uuid
import empower.managers.apimanager.apimanager as apimanager
from empower.core.acl import ACL
from empower.core.etheraddress import EtherAddress
from empower.managers.ranmanager.lvapp.resourcepool import ResourcePool
# pylint: disable=W0223
class ProjectsHandler(apimanager.EmpowerAPIHandler):
"""Projects handler"""
URLS = [r"/api/v1/projects/?",
r"/api/v1/projects/([a-zA-Z0-9-]*)/?"]
@apimanager.validate(min_args=0, max_args=1)
def get(self, *args, **kwargs):
"""Lists all the projects.
Args:
[0], the project id (optional)
Example URLs:
GET /api/v1/projects
[
{
"bootstrap": {
"7069c865-8849-4840-9d96-e028663a5dcf": {
"name": "empower.apps.wifimobilitymanager.
wifimobilitymanager",
"params": {
"every": 2000,
"project_id": "52313ecb-9d00-4b7d-b873-
b55d3d9ada26",
"service_id": "7069c865-8849-4840-9d96-
e028663a5dcf"
}
}
},
"desc": "5G-EmPOWER Wi-Fi Network",
"lte_props": null,
"lte_slices": {
"0": {
"devices": {},
"properties": {
"rbgs": 2,
"ue_scheduler": 0
},
"slice_id": 0
}
},
"owner": "foo",
"project_id": "52313ecb-9d00-4b7d-b873-b55d3d9ada26",
"wifi_props": {
"allowed": {
"04:46:65:49:e0:1f": {
"addr": "04:46:65:49:e0:1f",
"desc": "Some laptop"
},
"04:46:65:49:e0:11": {
"addr": "04:46:65:49:e0:1f",
"desc": "Some other laptop"
},
"04:46:65:49:e0:12": {
"addr": "04:46:65:49:e0:1f",
"desc": "Yet another laptop"
}
},
"bssid_type": "unique",
"ssid": "EmPOWER"
},
"wifi_slices": {
"0": {
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 0
},
"80": {
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 80
}
}
}
]
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26
{
"bootstrap": {
"7069c865-8849-4840-9d96-e028663a5dcf": {
"name": "empower.apps.wifimobilitymanager.
wifimobilitymanager",
"params": {
"every": 2000,
"project_id": "52313ecb-9d00-4b7d-b873-
b55d3d9ada26",
"service_id": "7069c865-8849-4840-9d96-
e028663a5dcf"
}
}
},
"desc": "5G-EmPOWER Wi-Fi Network",
"lte_props": null,
"lte_slices": {
"0": {
"devices": {},
"properties": {
"rbgs": 2,
"ue_scheduler": 0
},
"slice_id": 0
}
},
"owner": "foo",
"project_id": "52313ecb-9d00-4b7d-b873-b55d3d9ada26",
"wifi_props": {
"allowed": {
"04:46:65:49:e0:1f": {
"addr": "04:46:65:49:e0:1f",
"desc": "Some laptop"
},
"04:46:65:49:e0:11": {
"addr": "04:46:65:49:e0:1f",
"desc": "Some other laptop"
},
"04:46:65:49:e0:12": {
"addr": "04:46:65:49:e0:1f",
"desc": "Yet another laptop"
}
},
"bssid_type": "unique",
"ssid": "EmPOWER"
},
"wifi_slices": {
"0": {
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 0
},
"80": {
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 80
}
}
}
"""
return self.service.projects \
if not args else self.service.projects[uuid.UUID(args[0])]
@apimanager.validate(returncode=201, min_args=0, max_args=1)
def post(self, *args, **kwargs):
"""Create a new project.
Args:
[0], the project id (optional)
Request:
version: protocol version (1.0)
desc: a human-readable description of the project
owner: the username of the requester
wifi_props: the Wi-Fi properties
lte_props: the LTE properties
"""
project_id = uuid.UUID(args[0]) if args else uuid.uuid4()
wifi_props = kwargs['wifi_props'] if 'wifi_props' in kwargs else None
lte_props = kwargs['lte_props'] if 'lte_props' in kwargs else None
wifi_slcs = kwargs['wifi_slices'] if 'wifi_slices' in kwargs else None
lte_slcs = kwargs['lte_slices'] if 'lte_slices' in kwargs else None
project = self.service.create(project_id=project_id,
desc=kwargs['desc'],
owner=kwargs['owner'],
wifi_props=wifi_props,
lte_props=lte_props)
if wifi_slcs:
for wifi_slice in wifi_slcs:
project.upsert_wifi_slice(**wifi_slice)
if lte_slcs:
for lte_slice in lte_slcs:
project.upsert_lte_slice(**lte_slice)
self.set_header("Location", "/api/v1/projects/%s" % project.project_id)
@apimanager.validate(returncode=204, min_args=1, max_args=1)
def put(self, *args, **kwargs):
"""Update a project.
Args:
[0], the project id (mandatory)
Request:
version: protocol version (1.0)
desc: a human-readable description of the project
"""
project_id = uuid.UUID(args[0])
self.service.update(project_id=project_id, desc=kwargs['desc'])
@apimanager.validate(returncode=204, min_args=0, max_args=1)
def delete(self, *args, **kwargs):
"""Delete one or all projects.
Args:
[0], the projects id
Example URLs:
DELETE /api/v1/projects
DELETE /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26
"""
if args:
self.service.remove(uuid.UUID(args[0]))
else:
self.service.remove_all()
# pylint: disable=W0223
class ProjectsWiFiACLHandler(apimanager.EmpowerAPIHandler):
"""Wi-Fi ACL handler"""
URLS = [r"/api/v1/projects/([a-zA-Z0-9-]*)/wifi_acl/?",
r"/api/v1/projects/([a-zA-Z0-9-]*)/wifi_acl/([a-zA-Z0-9:]*)/?"]
@apimanager.validate(min_args=1, max_args=2)
def get(self, *args, **kwargs):
"""Lists all clients in the ACL.
Args:
[0], the project id (mandatory)
[0]: the device address (optional)
Example URLs:
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_acl/
{
"60:57:18:B1:A4:B8": {
"addr": "60:57:18:B1:A4:B8",
"desc": "Dell Laptop"
},
"18:5E:0F:E3:B8:68": {
"addr": "18:5E:0F:E3:B8:68",
"desc": "Dell Laptop"
},
"60:F4:45:D0:3B:FC": {
"addr": "60:F4:45:D0:3B:FC",
"desc": "Roberto's iPhone"
}
}
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_acl/60:57:18:B1:A4:B8
{
"addr": "60:57:18:B1:A4:B8",
"desc": "Dell Laptop"
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
allowed = project.wifi_props.allowed
return allowed if not args else allowed[str(EtherAddress(args[1]))]
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def put(self, *args, **kwargs):
"""Update entry in ACL.
Args:
[0], the project id (mandatory)
[1]: the device address (mandatory)
Example URLs:
PUT /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_acl/60:57:18:B1:A4:B8
{
"desc": "Dell Laptop"
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
desc = "Generic Station" if 'desc' not in kwargs else kwargs['desc']
addr = EtherAddress(args[1])
project.upsert_acl(addr, desc)
@apimanager.validate(returncode=201, min_args=1, max_args=1)
def post(self, *args, **kwargs):
"""Add entry in ACL.
Args:
[0], the project id (mandatory)
Example URLs:
POST /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_acl
{
"addr": "60:57:18:B1:A4:B8",
"desc": "Dell Laptop"
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
desc = "Generic Station" if 'desc' not in kwargs else kwargs['desc']
addr = EtherAddress(kwargs['addr'])
acl = project.upsert_acl(addr, desc)
url = "/api/v1/projects/%s/wifi_acl/%s" % (project_id, acl.addr)
self.set_header("Location", url)
@apimanager.validate(returncode=204, min_args=1, max_args=2)
def delete(self, *args, **kwargs):
"""Delete an entry in ACL.
Args:
[0], the project id (mandatory)
[0]: the device address (mandatory)
Example URLs:
DELETE /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_acl/60:57:18:B1:A4:B8
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
if len(args) == 2:
project.remove_acl(EtherAddress(args[1]))
else:
project.remove_acl()
# pylint: disable=W0223
class ProjectsWiFiSlicesHandler(apimanager.EmpowerAPIHandler):
"""Wi-Fi slices handler"""
URLS = [r"/api/v1/projects/([a-zA-Z0-9-]*)/wifi_slices/?",
r"/api/v1/projects/([a-zA-Z0-9-]*)/wifi_slices/([0-9]*)/?"]
@apimanager.validate(min_args=1, max_args=2)
def get(self, *args, **kwargs):
"""Lists all slices in a project.
Args:
[0], the project id (mandatory)
[1], the slice id (optional)
Example URLs:
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_slices
[
{
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 0
},
{
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 80
}
]
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_slices/0
{
"devices": {
"00:0D:B9:30:3E:18": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
}
},
"properties": {
"amsdu_aggregation": false,
"quantum": 10000,
"sta_scheduler": 1
},
"slice_id": 0
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
return project.wifi_slices \
if len(args) == 1 else project.wifi_slices[str(args[1])]
@apimanager.validate(returncode=201, min_args=0, max_args=1)
def post(self, *args, **kwargs):
"""Create a new slice.
Args:
[0], the project id (mandatory)
Request:
version: protocol version (1.0)
slice_id: the slice id
properties: the properties for this slice
devices: the properties for the devices
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
slice_id = project.upsert_wifi_slice(**kwargs)
project.save()
project.refresh_from_db()
self.set_header("Location", "/api/v1/projects/%s/wifi_slices/%s" %
(project_id, slice_id))
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def put(self, *args, **kwargs):
"""Update a slice.
Args:
[0], the project id (mandatory)
[1], the slice id (mandatory)
Request:
version: protocol version (1.0)
slice_id: the slice id
properties: the properties for this slice
devices: the properties for the devices
"""
project_id = uuid.UUID(args[0])
slice_id = str(args[1])
kwargs['slice_id'] = slice_id
project = self.service.projects[project_id]
project.upsert_wifi_slice(**kwargs)
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def delete(self, *args, **kwargs):
"""Delete a slice.
Args:
[0], the project id
[1], the slice id
Example URLs:
DELETE /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
wifi_slices/80
"""
project_id = uuid.UUID(args[0])
slice_id = str(args[1])
project = self.service.projects[project_id]
project.delete_wifi_slice(slice_id)
# pylint: disable=W0223
class ProjectsLTESlicesHandler(apimanager.EmpowerAPIHandler):
"""LTE Slices handler"""
URLS = [r"/api/v1/projects/([a-zA-Z0-9-]*)/lte_slices/?",
r"/api/v1/projects/([a-zA-Z0-9-]*)/lte_slices/([0-9]*)/?"]
@apimanager.validate(min_args=1, max_args=2)
def get(self, *args, **kwargs):
"""Lists all slices in a project.
Args:
[0], the project id (mandatory)
[1], the slice id (optional)
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
return project.lte_slices \
if len(args) == 1 else project.lte_slices[str(args[1])]
@apimanager.validate(returncode=201, min_args=0, max_args=1)
def post(self, *args, **kwargs):
"""Create a new slice.
Args:
[0], the project id (mandatory)
Request:
version: protocol version (1.0)
slice_id: the slice id
properties: the properties for this slice
devices: the properties for the devices
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
slice_id = project.upsert_lte_slice(**kwargs)
project.save()
project.refresh_from_db()
self.set_header("Location", "/api/v1/projects/%s/lte_slices/%s" %
(project_id, slice_id))
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def put(self, *args, **kwargs):
"""Update slice.
Args:
[0], the project id (mandatory)
[1], the slice id (mandatory)
Request:
version: protocol version (1.0)
slice_id: the slice id
properties: the properties for this slice
devices: the properties for the devices
"""
project_id = uuid.UUID(args[0])
slice_id = str(args[1])
kwargs['slice_id'] = slice_id
project = self.service.projects[project_id]
project.upsert_lte_slice(**kwargs)
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def delete(self, *args, **kwargs):
"""Delete a slice.
Args:
[0], the project id (mandatory)
[1], the slice id (mandatory)
Example URLs:
DELETE /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/
lte_slices/80
"""
project_id = uuid.UUID(args[0])
slice_id = str(args[1])
project = self.service.projects[project_id]
project.delete_lte_slice(slice_id)
# pylint: disable=W0223
class ProjectLVAPsHandler(apimanager.EmpowerAPIHandler):
"""Handler for accessing LVAPs. in a project"""
URLS = [r"/api/v1/projects/([a-zA-Z0-9-]*)/lvaps/?",
r"/api/v1/projects/([a-zA-Z0-9-]*)/lvaps/([a-zA-Z0-9:]*)/?"]
@apimanager.validate(min_args=1, max_args=2)
def get(self, *args, **kwargs):
"""List the LVAPs.
Args:
[0], the project id (mandatory)
[1]: the lvap address (optional)
Example URLs:
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/lvaps
[
{
"addr": "60:F4:45:D0:3B:FC",
"assoc_id": 732,
"association_state": true,
"authentication_state": true,
"blocks": [
...
],
"bssid": "52:31:3E:D0:3B:FC",
"encap": "00:00:00:00:00:00",
"ht_caps": true,
"ht_caps_info": {
"DSSS_CCK_Mode_in_40_MHz": false,
"Forty_MHz_Intolerant": false,
"HT_Delayed_Block_Ack": false,
"HT_Greenfield": false,
"LDPC_Coding_Capability": true,
"L_SIG_TXOP_Protection_Support": false,
"Maximum_AMSDU_Length": false,
"Reserved": false,
"Rx_STBC": 0,
"SM_Power_Save": 3,
"Short_GI_for_20_MHz": true,
"Short_GI_for_40_MHz": true,
"Supported_Channel_Width_Set": true,
"Tx_STBC": false
},
"networks": [
[
"52:31:3E:D0:3B:FC",
"EmPOWER"
]
],
"pending": [],
"ssid": "EmPOWER",
"state": "running",
"wtp": {
...
}
}
]
GET /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/lvaps/
60:F4:45:D0:3B:FC
{
"addr": "60:F4:45:D0:3B:FC",
"assoc_id": 732,
"association_state": true,
"authentication_state": true,
"blocks": [
...
],
"bssid": "52:31:3E:D0:3B:FC",
"encap": "00:00:00:00:00:00",
"ht_caps": true,
"ht_caps_info": {
"DSSS_CCK_Mode_in_40_MHz": false,
"Forty_MHz_Intolerant": false,
"HT_Delayed_Block_Ack": false,
"HT_Greenfield": false,
"LDPC_Coding_Capability": true,
"L_SIG_TXOP_Protection_Support": false,
"Maximum_AMSDU_Length": false,
"Reserved": false,
"Rx_STBC": 0,
"SM_Power_Save": 3,
"Short_GI_for_20_MHz": true,
"Short_GI_for_40_MHz": true,
"Supported_Channel_Width_Set": true,
"Tx_STBC": false
},
"networks": [
[
"52:31:3E:D0:3B:FC",
"EmPOWER"
]
],
"pending": [],
"ssid": "EmPOWER",
"state": "running",
"wtp": {
...
}
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
return project.lvaps \
if len(args) == 1 else project.lvaps[EtherAddress(args[1])]
@apimanager.validate(returncode=204, min_args=2, max_args=2)
def put(self, *args, **kwargs):
"""Modify the LVAP
Args:
[0], the project id (mandatory)
[1]: the lvap address (mandatory)
Example URLs:
PUT /api/v1/projects/52313ecb-9d00-4b7d-b873-b55d3d9ada26/lvaps/
60:F4:45:D0:3B:FC
{
"version": "1.0",
"wtp": "04:F0:21:09:F9:AA"
}
"""
project_id = uuid.UUID(args[0])
project = self.service.projects[project_id]
lvap = project.lvaps[EtherAddress(args[1])]
if "blocks" in kwargs:
wtp = project.wtps[EtherAddress(kwargs['wtp'])]
pool = ResourcePool()
for block_id in kwargs["blocks"]:
pool.append(wtp.blocks[block_id])
lvap.blocks = pool
elif "wtp" in kwargs:
lvap.wtp = project.wtps[EtherAddress(kwargs['wtp'])]
if "encap" in kwargs:
encap = EtherAddress(kwargs["encap"])
lvap.encap = encap
| rriggio/empower-runtime | empower/managers/projectsmanager/projectshandler.py | Python | apache-2.0 | 26,352 |
__author__ = 'jbowman'
# https://pythonspot.com/inner-classes/
class Human:
def __init__(self, name):
self.name = name
self.head = self.Head()
def addhead(self):
self.head2 = self.Head()
class Head:
def __init__(self):
self.brain = self.Brain()
def talk(self):
return 'talking...'
class Brain:
def think(self):
return 'thinking...'
if __name__ == '__main__': # execute only if run as a script directly
joey = Human('Joey')
print(joey.name)
print(joey.head.talk())
print(joey.head.brain.think()) | deo1/deo1 | Legacy/PythonTutorial/18InnerClasses.py | Python | mit | 630 |
# -*- coding: utf-8 -*-
# (c) 2012-2014, Michael DeHaan <[email protected]>
# (c) 2016 Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division)
__metaclass__ = type
import errno
import json
import os
import sys
from io import BytesIO, StringIO
from units.mock.procenv import ModuleTestCase, swap_stdin_and_argv
from ansible.compat.tests import unittest
from ansible.compat.tests.mock import patch, MagicMock, mock_open, Mock, call
from ansible.module_utils.six.moves import builtins
realimport = builtins.__import__
class TestModuleUtilsBasic(ModuleTestCase):
def clear_modules(self, mods):
for mod in mods:
if mod in sys.modules:
del sys.modules[mod]
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_syslog(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'syslog':
raise ImportError
return realimport(name, *args, **kwargs)
self.clear_modules(['syslog', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.HAS_SYSLOG)
self.clear_modules(['syslog', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.HAS_SYSLOG)
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_selinux(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'selinux':
raise ImportError
return realimport(name, *args, **kwargs)
try:
self.clear_modules(['selinux', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.HAVE_SELINUX)
except ImportError:
# no selinux on test system, so skip
pass
self.clear_modules(['selinux', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.HAVE_SELINUX)
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_json(self, mock_import):
def _mock_import(name, *args, **kwargs):
if name == 'json':
raise ImportError
elif name == 'simplejson':
return MagicMock()
return realimport(name, *args, **kwargs)
self.clear_modules(['json', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.clear_modules(['json', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
# FIXME: doesn't work yet
#@patch.object(builtins, 'bytes')
#def test_module_utils_basic_bytes(self, mock_bytes):
# mock_bytes.side_effect = NameError()
# from ansible.module_utils import basic
@patch.object(builtins, '__import__')
@unittest.skipIf(sys.version_info[0] >= 3, "literal_eval is available in every version of Python3")
def test_module_utils_basic_import_literal_eval(self, mock_import):
def _mock_import(name, *args, **kwargs):
try:
fromlist = kwargs.get('fromlist', args[2])
except IndexError:
fromlist = []
if name == 'ast' and 'literal_eval' in fromlist:
raise ImportError
return realimport(name, *args, **kwargs)
mock_import.side_effect = _mock_import
self.clear_modules(['ast', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertEqual(mod.module_utils.basic.literal_eval("'1'"), "1")
self.assertEqual(mod.module_utils.basic.literal_eval("1"), 1)
self.assertEqual(mod.module_utils.basic.literal_eval("-1"), -1)
self.assertEqual(mod.module_utils.basic.literal_eval("(1,2,3)"), (1,2,3))
self.assertEqual(mod.module_utils.basic.literal_eval("[1]"), [1])
self.assertEqual(mod.module_utils.basic.literal_eval("True"), True)
self.assertEqual(mod.module_utils.basic.literal_eval("False"), False)
self.assertEqual(mod.module_utils.basic.literal_eval("None"), None)
#self.assertEqual(mod.module_utils.basic.literal_eval('{"a": 1}'), dict(a=1))
self.assertRaises(ValueError, mod.module_utils.basic.literal_eval, "asdfasdfasdf")
@patch.object(builtins, '__import__')
def test_module_utils_basic_import_systemd_journal(self, mock_import):
def _mock_import(name, *args, **kwargs):
try:
fromlist = kwargs.get('fromlist', args[2])
except IndexError:
fromlist = []
if name == 'systemd' and 'journal' in fromlist:
raise ImportError
return realimport(name, *args, **kwargs)
self.clear_modules(['systemd', 'ansible.module_utils.basic'])
mod = builtins.__import__('ansible.module_utils.basic')
self.assertTrue(mod.module_utils.basic.has_journal)
self.clear_modules(['systemd', 'ansible.module_utils.basic'])
mock_import.side_effect = _mock_import
mod = builtins.__import__('ansible.module_utils.basic')
self.assertFalse(mod.module_utils.basic.has_journal)
def test_module_utils_basic_get_platform(self):
with patch('platform.system', return_value='foo'):
from ansible.module_utils.basic import get_platform
self.assertEqual(get_platform(), 'foo')
def test_module_utils_basic_get_distribution(self):
from ansible.module_utils.basic import get_distribution
with patch('platform.system', return_value='Foo'):
self.assertEqual(get_distribution(), None)
with patch('platform.system', return_value='Linux'):
with patch('platform.linux_distribution', return_value=["foo"]):
self.assertEqual(get_distribution(), "Foo")
with patch('os.path.isfile', return_value=True):
with patch('platform.linux_distribution', side_effect=[("AmazonFooBar",)]):
self.assertEqual(get_distribution(), "Amazonfoobar")
with patch('platform.linux_distribution', side_effect=(("",), ("AmazonFooBam",))):
self.assertEqual(get_distribution(), "Amazon")
with patch('platform.linux_distribution', side_effect=[("",),("",)]):
self.assertEqual(get_distribution(), "OtherLinux")
def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
if supported_dists != ():
return ("Bar", "2", "Two")
else:
return ("", "", "")
with patch('platform.linux_distribution', side_effect=_dist):
self.assertEqual(get_distribution(), "Bar")
with patch('platform.linux_distribution', side_effect=Exception("boo")):
with patch('platform.dist', return_value=("bar", "2", "Two")):
self.assertEqual(get_distribution(), "Bar")
def test_module_utils_basic_get_distribution_version(self):
from ansible.module_utils.basic import get_distribution_version
with patch('platform.system', return_value='Foo'):
self.assertEqual(get_distribution_version(), None)
with patch('platform.system', return_value='Linux'):
with patch('platform.linux_distribution', return_value=("foo", "1", "One")):
self.assertEqual(get_distribution_version(), "1")
with patch('os.path.isfile', return_value=True):
def _dist(distname='', version='', id='', supported_dists=(), full_distribution_name=1):
if supported_dists != ():
return ("AmazonFooBar", "2", "")
else:
return ("", "", "")
with patch('platform.linux_distribution', side_effect=_dist):
self.assertEqual(get_distribution_version(), "2")
with patch('platform.linux_distribution', side_effect=Exception("boo")):
with patch('platform.dist', return_value=("bar", "3", "Three")):
self.assertEqual(get_distribution_version(), "3")
def test_module_utils_basic_load_platform_subclass(self):
class LinuxTest:
pass
class Foo(LinuxTest):
platform = "Linux"
distribution = None
class Bar(LinuxTest):
platform = "Linux"
distribution = "Bar"
from ansible.module_utils.basic import load_platform_subclass
# match just the platform class, not a specific distribution
with patch('ansible.module_utils.basic.get_platform', return_value="Linux"):
with patch('ansible.module_utils.basic.get_distribution', return_value=None):
self.assertIs(type(load_platform_subclass(LinuxTest)), Foo)
# match both the distribution and platform class
with patch('ansible.module_utils.basic.get_platform', return_value="Linux"):
with patch('ansible.module_utils.basic.get_distribution', return_value="Bar"):
self.assertIs(type(load_platform_subclass(LinuxTest)), Bar)
# if neither match, the fallback should be the top-level class
with patch('ansible.module_utils.basic.get_platform', return_value="Foo"):
with patch('ansible.module_utils.basic.get_distribution', return_value=None):
self.assertIs(type(load_platform_subclass(LinuxTest)), LinuxTest)
def test_module_utils_basic_json_dict_converters(self):
from ansible.module_utils.basic import json_dict_unicode_to_bytes, json_dict_bytes_to_unicode
test_data = dict(
item1 = u"Fóo",
item2 = [u"Bár", u"Bam"],
item3 = dict(sub1=u"Súb"),
item4 = (u"föo", u"bär", u"©"),
item5 = 42,
)
res = json_dict_unicode_to_bytes(test_data)
res2 = json_dict_bytes_to_unicode(res)
self.assertEqual(test_data, res2)
def test_module_utils_basic_get_module_path(self):
from ansible.module_utils.basic import get_module_path
with patch('os.path.realpath', return_value='/path/to/foo/'):
self.assertEqual(get_module_path(), '/path/to/foo')
def test_module_utils_basic_ansible_module_creation(self):
from ansible.module_utils import basic
am = basic.AnsibleModule(
argument_spec=dict(),
)
arg_spec = dict(
foo = dict(required=True),
bar = dict(),
bam = dict(),
baz = dict(),
)
mut_ex = (('bar', 'bam'),)
req_to = (('bam', 'baz'),)
# should test ok
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"foo": "hello"}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = arg_spec,
mutually_exclusive = mut_ex,
required_together = req_to,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
# FIXME: add asserts here to verify the basic config
# fail, because a required param was not specified
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
self.assertRaises(
SystemExit,
basic.AnsibleModule,
argument_spec = arg_spec,
mutually_exclusive = mut_ex,
required_together = req_to,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
# fail because of mutually exclusive parameters
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"foo":"hello", "bar": "bad", "bam": "bad"}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
self.assertRaises(
SystemExit,
basic.AnsibleModule,
argument_spec = arg_spec,
mutually_exclusive = mut_ex,
required_together = req_to,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
# fail because a param required due to another param was not specified
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"bam": "bad"}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
self.assertRaises(
SystemExit,
basic.AnsibleModule,
argument_spec = arg_spec,
mutually_exclusive = mut_ex,
required_together = req_to,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
def test_module_utils_basic_ansible_module_type_check(self):
from ansible.module_utils import basic
arg_spec = dict(
foo = dict(type='float'),
foo2 = dict(type='float'),
foo3 = dict(type='float'),
bar = dict(type='int'),
bar2 = dict(type='int'),
)
# should test ok
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={
"foo": 123.0, # float
"foo2": 123, # int
"foo3": "123", # string
"bar": 123, # int
"bar2": "123", # string
}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = arg_spec,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
# fail, because bar does not accept floating point numbers
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={"bar": 123.0}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
self.assertRaises(
SystemExit,
basic.AnsibleModule,
argument_spec = arg_spec,
no_log=True,
check_invalid_arguments=False,
add_file_common_args=True,
supports_check_mode=True,
)
def test_module_utils_basic_ansible_module_load_file_common_arguments(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
am.selinux_mls_enabled = MagicMock()
am.selinux_mls_enabled.return_value = True
am.selinux_default_context = MagicMock()
am.selinux_default_context.return_value = 'unconfined_u:object_r:default_t:s0'.split(':', 3)
# with no params, the result should be an empty dict
res = am.load_file_common_arguments(params=dict())
self.assertEqual(res, dict())
base_params = dict(
path = '/path/to/file',
mode = 0o600,
owner = 'root',
group = 'root',
seuser = '_default',
serole = '_default',
setype = '_default',
selevel = '_default',
)
extended_params = base_params.copy()
extended_params.update(dict(
follow = True,
foo = 'bar',
))
final_params = base_params.copy()
final_params.update(dict(
path = '/path/to/real_file',
secontext=['unconfined_u', 'object_r', 'default_t', 's0'],
attributes=None,
))
# with the proper params specified, the returned dictionary should represent
# only those params which have something to do with the file arguments, excluding
# other params and updated as required with proper values which may have been
# massaged by the method
with patch('os.path.islink', return_value=True):
with patch('os.path.realpath', return_value='/path/to/real_file'):
res = am.load_file_common_arguments(params=extended_params)
self.assertEqual(res, final_params)
def test_module_utils_basic_ansible_module_selinux_mls_enabled(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
basic.HAVE_SELINUX = False
self.assertEqual(am.selinux_mls_enabled(), False)
basic.HAVE_SELINUX = True
basic.selinux = Mock()
with patch.dict('sys.modules', {'selinux': basic.selinux}):
with patch('selinux.is_selinux_mls_enabled', return_value=0):
self.assertEqual(am.selinux_mls_enabled(), False)
with patch('selinux.is_selinux_mls_enabled', return_value=1):
self.assertEqual(am.selinux_mls_enabled(), True)
delattr(basic, 'selinux')
def test_module_utils_basic_ansible_module_selinux_initial_context(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
am.selinux_mls_enabled = MagicMock()
am.selinux_mls_enabled.return_value = False
self.assertEqual(am.selinux_initial_context(), [None, None, None])
am.selinux_mls_enabled.return_value = True
self.assertEqual(am.selinux_initial_context(), [None, None, None, None])
def test_module_utils_basic_ansible_module_selinux_enabled(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
# we first test the cases where the python selinux lib is
# not installed, which has two paths: one in which the system
# does have selinux installed (and the selinuxenabled command
# is present and returns 0 when run), or selinux is not installed
basic.HAVE_SELINUX = False
am.get_bin_path = MagicMock()
am.get_bin_path.return_value = '/path/to/selinuxenabled'
am.run_command = MagicMock()
am.run_command.return_value=(0, '', '')
self.assertRaises(SystemExit, am.selinux_enabled)
am.get_bin_path.return_value = None
self.assertEqual(am.selinux_enabled(), False)
# finally we test the case where the python selinux lib is installed,
# and both possibilities there (enabled vs. disabled)
basic.HAVE_SELINUX = True
basic.selinux = Mock()
with patch.dict('sys.modules', {'selinux': basic.selinux}):
with patch('selinux.is_selinux_enabled', return_value=0):
self.assertEqual(am.selinux_enabled(), False)
with patch('selinux.is_selinux_enabled', return_value=1):
self.assertEqual(am.selinux_enabled(), True)
delattr(basic, 'selinux')
def test_module_utils_basic_ansible_module_selinux_default_context(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
am.selinux_enabled = MagicMock(return_value=True)
# we first test the cases where the python selinux lib is not installed
basic.HAVE_SELINUX = False
self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
# all following tests assume the python selinux bindings are installed
basic.HAVE_SELINUX = True
basic.selinux = Mock()
with patch.dict('sys.modules', {'selinux': basic.selinux}):
# next, we test with a mocked implementation of selinux.matchpathcon to simulate
# an actual context being found
with patch('selinux.matchpathcon', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
self.assertEqual(am.selinux_default_context(path='/foo/bar'), ['unconfined_u', 'object_r', 'default_t', 's0'])
# we also test the case where matchpathcon returned a failure
with patch('selinux.matchpathcon', return_value=[-1, '']):
self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
# finally, we test where an OSError occurred during matchpathcon's call
with patch('selinux.matchpathcon', side_effect=OSError):
self.assertEqual(am.selinux_default_context(path='/foo/bar'), [None, None, None, None])
delattr(basic, 'selinux')
def test_module_utils_basic_ansible_module_selinux_context(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
am.selinux_initial_context = MagicMock(return_value=[None, None, None, None])
am.selinux_enabled = MagicMock(return_value=True)
# we first test the cases where the python selinux lib is not installed
basic.HAVE_SELINUX = False
self.assertEqual(am.selinux_context(path='/foo/bar'), [None, None, None, None])
# all following tests assume the python selinux bindings are installed
basic.HAVE_SELINUX = True
basic.selinux = Mock()
with patch.dict('sys.modules', {'selinux': basic.selinux}):
# next, we test with a mocked implementation of selinux.lgetfilecon_raw to simulate
# an actual context being found
with patch('selinux.lgetfilecon_raw', return_value=[0, 'unconfined_u:object_r:default_t:s0']):
self.assertEqual(am.selinux_context(path='/foo/bar'), ['unconfined_u', 'object_r', 'default_t', 's0'])
# we also test the case where matchpathcon returned a failure
with patch('selinux.lgetfilecon_raw', return_value=[-1, '']):
self.assertEqual(am.selinux_context(path='/foo/bar'), [None, None, None, None])
# finally, we test where an OSError occurred during matchpathcon's call
e = OSError()
e.errno = errno.ENOENT
with patch('selinux.lgetfilecon_raw', side_effect=e):
self.assertRaises(SystemExit, am.selinux_context, path='/foo/bar')
e = OSError()
with patch('selinux.lgetfilecon_raw', side_effect=e):
self.assertRaises(SystemExit, am.selinux_context, path='/foo/bar')
delattr(basic, 'selinux')
def test_module_utils_basic_ansible_module_is_special_selinux_path(self):
from ansible.module_utils import basic
args = json.dumps(dict(ANSIBLE_MODULE_ARGS={'_ansible_selinux_special_fs': "nfs,nfsd,foos"}))
with swap_stdin_and_argv(stdin_data=args):
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
def _mock_find_mount_point(path):
if path.startswith('/some/path'):
return '/some/path'
elif path.startswith('/weird/random/fstype'):
return '/weird/random/fstype'
return '/'
am.find_mount_point = MagicMock(side_effect=_mock_find_mount_point)
am.selinux_context = MagicMock(return_value=['foo_u', 'foo_r', 'foo_t', 's0'])
m = mock_open()
m.side_effect = OSError
with patch.object(builtins, 'open', m, create=True):
self.assertEqual(am.is_special_selinux_path('/some/path/that/should/be/nfs'), (False, None))
mount_data = [
'/dev/disk1 / ext4 rw,seclabel,relatime,data=ordered 0 0\n',
'1.1.1.1:/path/to/nfs /some/path nfs ro 0 0\n',
'whatever /weird/random/fstype foos rw 0 0\n',
]
# mock_open has a broken readlines() implementation apparently...
# this should work by default but doesn't, so we fix it
m = mock_open(read_data=''.join(mount_data))
m.return_value.readlines.return_value = mount_data
with patch.object(builtins, 'open', m, create=True):
self.assertEqual(am.is_special_selinux_path('/some/random/path'), (False, None))
self.assertEqual(am.is_special_selinux_path('/some/path/that/should/be/nfs'), (True, ['foo_u', 'foo_r', 'foo_t', 's0']))
self.assertEqual(am.is_special_selinux_path('/weird/random/fstype/path'), (True, ['foo_u', 'foo_r', 'foo_t', 's0']))
def test_module_utils_basic_ansible_module_user_and_group(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
mock_stat = MagicMock()
mock_stat.st_uid = 0
mock_stat.st_gid = 0
with patch('os.lstat', return_value=mock_stat):
self.assertEqual(am.user_and_group('/path/to/file'), (0, 0))
def test_module_utils_basic_ansible_module_find_mount_point(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
def _mock_ismount(path):
if path == '/':
return True
return False
with patch('os.path.ismount', side_effect=_mock_ismount):
self.assertEqual(am.find_mount_point('/root/fs/../mounted/path/to/whatever'), '/')
def _mock_ismount(path):
if path == '/subdir/mount':
return True
return False
with patch('os.path.ismount', side_effect=_mock_ismount):
self.assertEqual(am.find_mount_point('/subdir/mount/path/to/whatever'), '/subdir/mount')
def test_module_utils_basic_ansible_module_set_context_if_different(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
basic.HAVE_SELINUX = False
am.selinux_enabled = MagicMock(return_value=False)
self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True), True)
self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), False)
basic.HAVE_SELINUX = True
am.selinux_enabled = MagicMock(return_value=True)
am.selinux_context = MagicMock(return_value=['bar_u', 'bar_r', None, None])
am.is_special_selinux_path = MagicMock(return_value=(False, None))
basic.selinux = Mock()
with patch.dict('sys.modules', {'selinux': basic.selinux}):
with patch('selinux.lsetfilecon', return_value=0) as m:
self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
m.assert_called_with('/path/to/file', 'foo_u:foo_r:foo_t:s0')
m.reset_mock()
am.check_mode = True
self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
self.assertEqual(m.called, False)
am.check_mode = False
with patch('selinux.lsetfilecon', return_value=1) as m:
self.assertRaises(SystemExit, am.set_context_if_different, '/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
with patch('selinux.lsetfilecon', side_effect=OSError) as m:
self.assertRaises(SystemExit, am.set_context_if_different, '/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], True)
am.is_special_selinux_path = MagicMock(return_value=(True, ['sp_u', 'sp_r', 'sp_t', 's0']))
with patch('selinux.lsetfilecon', return_value=0) as m:
self.assertEqual(am.set_context_if_different('/path/to/file', ['foo_u', 'foo_r', 'foo_t', 's0'], False), True)
m.assert_called_with('/path/to/file', 'sp_u:sp_r:sp_t:s0')
delattr(basic, 'selinux')
def test_module_utils_basic_ansible_module_set_owner_if_different(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
self.assertEqual(am.set_owner_if_different('/path/to/file', None, True), True)
self.assertEqual(am.set_owner_if_different('/path/to/file', None, False), False)
am.user_and_group = MagicMock(return_value=(500, 500))
with patch('os.lchown', return_value=None) as m:
self.assertEqual(am.set_owner_if_different('/path/to/file', 0, False), True)
m.assert_called_with(b'/path/to/file', 0, -1)
def _mock_getpwnam(*args, **kwargs):
mock_pw = MagicMock()
mock_pw.pw_uid = 0
return mock_pw
m.reset_mock()
with patch('pwd.getpwnam', side_effect=_mock_getpwnam):
self.assertEqual(am.set_owner_if_different('/path/to/file', 'root', False), True)
m.assert_called_with(b'/path/to/file', 0, -1)
with patch('pwd.getpwnam', side_effect=KeyError):
self.assertRaises(SystemExit, am.set_owner_if_different, '/path/to/file', 'root', False)
m.reset_mock()
am.check_mode = True
self.assertEqual(am.set_owner_if_different('/path/to/file', 0, False), True)
self.assertEqual(m.called, False)
am.check_mode = False
with patch('os.lchown', side_effect=OSError) as m:
self.assertRaises(SystemExit, am.set_owner_if_different, '/path/to/file', 'root', False)
def test_module_utils_basic_ansible_module_set_group_if_different(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
self.assertEqual(am.set_group_if_different('/path/to/file', None, True), True)
self.assertEqual(am.set_group_if_different('/path/to/file', None, False), False)
am.user_and_group = MagicMock(return_value=(500, 500))
with patch('os.lchown', return_value=None) as m:
self.assertEqual(am.set_group_if_different('/path/to/file', 0, False), True)
m.assert_called_with(b'/path/to/file', -1, 0)
def _mock_getgrnam(*args, **kwargs):
mock_gr = MagicMock()
mock_gr.gr_gid = 0
return mock_gr
m.reset_mock()
with patch('grp.getgrnam', side_effect=_mock_getgrnam):
self.assertEqual(am.set_group_if_different('/path/to/file', 'root', False), True)
m.assert_called_with(b'/path/to/file', -1, 0)
with patch('grp.getgrnam', side_effect=KeyError):
self.assertRaises(SystemExit, am.set_group_if_different, '/path/to/file', 'root', False)
m.reset_mock()
am.check_mode = True
self.assertEqual(am.set_group_if_different('/path/to/file', 0, False), True)
self.assertEqual(m.called, False)
am.check_mode = False
with patch('os.lchown', side_effect=OSError) as m:
self.assertRaises(SystemExit, am.set_group_if_different, '/path/to/file', 'root', False)
@patch('tempfile.mkstemp')
@patch('os.umask')
@patch('shutil.copyfileobj')
@patch('shutil.move')
@patch('shutil.copy2')
@patch('os.rename')
@patch('pwd.getpwuid')
@patch('os.getuid')
@patch('os.environ')
@patch('os.getlogin')
@patch('os.chown')
@patch('os.chmod')
@patch('os.stat')
@patch('os.path.exists')
@patch('os.close')
def test_module_utils_basic_ansible_module_atomic_move(
self,
_os_close,
_os_path_exists,
_os_stat,
_os_chmod,
_os_chown,
_os_getlogin,
_os_environ,
_os_getuid,
_pwd_getpwuid,
_os_rename,
_shutil_copy2,
_shutil_move,
_shutil_copyfileobj,
_os_umask,
_tempfile_mkstemp):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
environ = dict()
_os_environ.__getitem__ = environ.__getitem__
_os_environ.__setitem__ = environ.__setitem__
am.selinux_enabled = MagicMock()
am.selinux_context = MagicMock()
am.selinux_default_context = MagicMock()
am.set_context_if_different = MagicMock()
# test destination does not exist, no selinux, login name = 'root',
# no environment, os.rename() succeeds
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
am.selinux_enabled.return_value = False
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with(b'/path/to/src', b'/path/to/dest')
self.assertEqual(_os_chmod.call_args_list, [call(b'/path/to/dest', basic.DEFAULT_PERM & ~18)])
# same as above, except selinux_enabled
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
mock_context = MagicMock()
am.selinux_default_context.return_value = mock_context
am.selinux_enabled.return_value = True
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.selinux_default_context.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with(b'/path/to/src', b'/path/to/dest')
self.assertEqual(_os_chmod.call_args_list, [call(b'/path/to/dest', basic.DEFAULT_PERM & ~18)])
self.assertEqual(am.selinux_default_context.call_args_list, [call('/path/to/dest')])
self.assertEqual(am.set_context_if_different.call_args_list, [call('/path/to/dest', mock_context, False)])
# now with dest present, no selinux, also raise OSError when using
# os.getlogin() to test corner case with no tty
_os_path_exists.side_effect = [True, True]
_os_getlogin.side_effect = OSError()
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
environ['LOGNAME'] = 'root'
stat1 = MagicMock()
stat1.st_mode = 0o0644
stat1.st_uid = 0
stat1.st_gid = 0
_os_stat.side_effect = [stat1,]
am.selinux_enabled.return_value = False
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with(b'/path/to/src', b'/path/to/dest')
# dest missing, selinux enabled
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
stat1 = MagicMock()
stat1.st_mode = 0o0644
stat1.st_uid = 0
stat1.st_gid = 0
_os_stat.side_effect = [stat1,]
mock_context = MagicMock()
am.selinux_context.return_value = mock_context
am.selinux_enabled.return_value = True
_os_chmod.reset_mock()
_os_chown.reset_mock()
am.set_context_if_different.reset_mock()
am.selinux_default_context.reset_mock()
am.atomic_move('/path/to/src', '/path/to/dest')
_os_rename.assert_called_with(b'/path/to/src', b'/path/to/dest')
self.assertEqual(am.selinux_context.call_args_list, [call('/path/to/dest')])
self.assertEqual(am.set_context_if_different.call_args_list, [call('/path/to/dest', mock_context, False)])
# now testing with exceptions raised
# have os.stat raise OSError which is not EPERM
_os_stat.side_effect = OSError()
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
self.assertRaises(OSError, am.atomic_move, '/path/to/src', '/path/to/dest')
# and now have os.stat return EPERM, which should not fail
_os_stat.side_effect = OSError(errno.EPERM, 'testing os stat with EPERM')
_os_path_exists.side_effect = [True, True]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_rename.return_value = None
_os_umask.side_effect = [18, 0]
# FIXME: we don't assert anything here yet
am.atomic_move('/path/to/src', '/path/to/dest')
# now we test os.rename() raising errors...
# first we test with a bad errno to verify it bombs out
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = OSError(errno.EIO, 'failing with EIO')
self.assertRaises(SystemExit, am.atomic_move, '/path/to/src', '/path/to/dest')
# next we test with EPERM so it continues to the alternate code for moving
# test with mkstemp raising an error first
_os_path_exists.side_effect = [False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_os_close.return_value = None
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
_tempfile_mkstemp.return_value = None
_tempfile_mkstemp.side_effect = OSError()
am.selinux_enabled.return_value = False
self.assertRaises(SystemExit, am.atomic_move, '/path/to/src', '/path/to/dest')
# then test with it creating a temp file
_os_path_exists.side_effect = [False, False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
mock_stat1 = MagicMock()
mock_stat2 = MagicMock()
mock_stat3 = MagicMock()
_os_stat.return_value = [mock_stat1, mock_stat2, mock_stat3]
_os_stat.side_effect = None
_tempfile_mkstemp.return_value = (None, '/path/to/tempfile')
_tempfile_mkstemp.side_effect = None
am.selinux_enabled.return_value = False
# FIXME: we don't assert anything here yet
am.atomic_move('/path/to/src', '/path/to/dest')
# same as above, but with selinux enabled
_os_path_exists.side_effect = [False, False, False]
_os_getlogin.return_value = 'root'
_os_getuid.return_value = 0
_pwd_getpwuid.return_value = ('root', '', 0, 0, '', '', '')
_os_umask.side_effect = [18, 0]
_os_rename.side_effect = [OSError(errno.EPERM, 'failing with EPERM'), None]
_tempfile_mkstemp.return_value = (None, None)
mock_context = MagicMock()
am.selinux_default_context.return_value = mock_context
am.selinux_enabled.return_value = True
am.atomic_move('/path/to/src', '/path/to/dest')
def test_module_utils_basic_ansible_module__symbolic_mode_to_octal(self):
from ansible.module_utils import basic
basic._ANSIBLE_ARGS = None
am = basic.AnsibleModule(
argument_spec = dict(),
)
mock_stat = MagicMock()
# FIXME: trying many more combinations here would be good
# directory, give full perms to all, then one group at a time
mock_stat.st_mode = 0o040000
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a+rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u+rwx,g+rwx,o+rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o+rwx'), 0o0007)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g+rwx'), 0o0070)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u+rwx'), 0o0700)
# same as above, but in reverse so removing permissions
mock_stat.st_mode = 0o040777
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a-rwx'), 0o0000)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u-rwx,g-rwx,o-rwx'), 0o0000)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o-rwx'), 0o0770)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g-rwx'), 0o0707)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u-rwx'), 0o0077)
# now using absolute assignment
mock_stat.st_mode = 0o040000
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'a=rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u=rwx,g=rwx,o=rwx'), 0o0777)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'o=rwx'), 0o0007)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'g=rwx'), 0o0070)
self.assertEqual(am._symbolic_mode_to_octal(mock_stat, 'u=rwx'), 0o0700)
# invalid modes
mock_stat.st_mode = 0o040000
self.assertRaises(ValueError, am._symbolic_mode_to_octal, mock_stat, 'a=foo')
| bjolivot/ansible | test/units/module_utils/test_basic.py | Python | gpl-3.0 | 44,007 |
# Copyright (C) 2017 ABRT Team
# Copyright (C) 2017 Red Hat, Inc.
#
# This file is part of faf.
#
# faf is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# faf is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with faf. If not, see <http://www.gnu.org/licenses/>.
import os
import urllib
from http.client import RemoteDisconnected
from pyfaf.actions import Action
from pyfaf.storage.opsys import Repo
class CheckRepo(Action):
name = "check-repo"
def check_repo(self, repo) -> int:
# Test available mirror
for mirror in repo.url_list:
if mirror.url.startswith("http:") or \
mirror.url.startswith("https:") or \
mirror.url.startswith("ftp:"):
repodata = urllib.parse.urljoin(mirror.url, "repodata")
try:
with urllib.request.urlopen(repodata):
# Ignore the result. We only want to know if the URL is live.
pass
break
except urllib.error.URLError:
pass
except RemoteDisconnected:
break
else:
if mirror.url.startswith("file://"):
mirror.url = mirror.url[7:]
self.log_error(mirror.url)
if os.path.exists(os.path.join(mirror.url, "repodata")):
break
else:
print("'{0}' does not have a valid url.".format(repo.name))
return 1
# Test if assigned
if not repo.opsysrelease_list:
print("'{0}' is not assigned with OpSys release.".format(repo.name))
return 1
if not repo.arch_list:
print("'{0}' is not assigned with architecture.".format(repo.name))
return 1
return 0
def run(self, cmdline, db) -> None:
repos = []
for reponame in cmdline.REPO:
repo = (db.session.query(Repo)
.filter(Repo.name == reponame)
.first())
if repo:
repos.append(repo)
else:
print("Repository '{0}' does not exists".format(reponame))
if not cmdline.REPO:
for repo in db.session.query(Repo):
repos.append(repo)
problems = 0
for repo in repos:
problems += self.check_repo(repo)
if problems:
print("There are some problems, please resolve them")
else:
print("Everything is OK!")
def tweak_cmdline_parser(self, parser) -> None:
parser.add_repo(multiple=True, helpstr="Name of the repo to be checked")
| abrt/faf | src/pyfaf/actions/check_repo.py | Python | gpl-3.0 | 3,131 |
"""
Copyright 2014
This file is part of Phase.
Phase is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
Phase is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with Phase. If not, see <http://www.gnu.org/licenses/>.
"""
from libphase import utilities,http_client
from threading import Thread
import Queue
from gi.repository import Gtk
from gi.repository import GObject
import os
import glob
import sys
class DialogBruteForce(Gtk.Dialog):
def __init__(self, parent,url):
Gtk.Dialog.__init__(self, "Brute Force", parent, 0, ())
self.add_button("Cancel",Gtk.ResponseType.CANCEL)
self.generate_button=self.add_button("Start",Gtk.ResponseType.OK)
box = self.get_content_area()
self.word_list=Gtk.ComboBox()
self.wordlist_store=Gtk.ListStore(str)
wordlist_dir=os.path.abspath(os.path.dirname(sys.argv[0]))+os.sep+"resources"+os.sep+"dirbuster-lists"+os.sep
self.wordlist_files=[]
for filename in glob.glob(wordlist_dir+"*.txt"):
self.wordlist_store.append([filename.replace(wordlist_dir,"")])
self.wordlist_files.append(filename)
self.word_list.set_model(self.wordlist_store)
wordlist_cell=Gtk.CellRendererText()
self.word_list.pack_start(wordlist_cell,True)
self.word_list.add_attribute(wordlist_cell,'text',0)
self.word_list.set_active(0)
word_list_box=Gtk.Box()
word_list_box.add(Gtk.Label("Word List:"))
word_list_box.add(self.word_list)
box.add(word_list_box)
self.recursive=Gtk.CheckButton("Recursive")
box.add(self.recursive)
self.show_all()
class DialogBruteForceView(Gtk.Dialog):
def __init__(self):
Gtk.Dialog.__init__(self, "Brute Force Status", None, 0, ())
self.ok_button=self.add_button("OK",Gtk.ResponseType.OK)
self.ok_button.connect("clicked",self.handler_delete,None)
box = self.get_content_area()
self.set_size_request(600,500)
vbox=Gtk.Box(orientation=Gtk.Orientation.VERTICAL)
progress_label=Gtk.Label("Progress:")
progress_label.set_alignment(0,0)
vbox.add(progress_label)
self.progress_bar=Gtk.ProgressBar()
self.progress_bar.set_size_request(-1,30)
self.progress_bar.set_show_text(True)
vbox.add(self.progress_bar)
output_label=Gtk.Label("Output:")
output_label.set_alignment(0,0)
vbox.add(output_label)
scrolled_window=Gtk.ScrolledWindow(expand=True)
self.textview=Gtk.TextView()
self.textview.set_editable(False)
scrolled_window.add(self.textview)
vbox.add(scrolled_window)
box.add(vbox)
self.connect("delete-event",self.handler_delete)
def handler_delete(self,widget,event):
self.hide()
return True
def progress_function(self,current_total,total,log):
self.progress_bar.set_fraction(float(current_total)/float(total))
self.progress_bar.set_text(str(current_total)+"/"+str(total))
self.textview.get_buffer().set_text(log)
class BruteForce(Thread):
iter=None
def __init__(self,config,base_url,wordlist,recursive,add_function,progress_function,finished_function):
Thread.__init__(self)
self.config=config
self.recursive=recursive
self.wordlist_length=0
self.wordlist=[]
self.total=0
self.current_total=0
self.finished=False
self.base_queue=Queue.Queue(0)
self.base_queue.put(base_url)
self.add_function=add_function
self.progress_function=progress_function
self.finished_function=finished_function
self._log_dict={}
self.view=DialogBruteForceView()
f=open(wordlist,"r")
for word in f:
word=word.strip()
if len(word) > 0:
if word[0] != "#":
if word[-1:] != "/":
word+="/"
self.wordlist_length+=1
self.wordlist.append(word)
f.close()
@property
def log(self):
log_view=str()
for key in sorted(self._log_dict.keys()):
log_view+=key+"\n=====================\n"
for url in self._log_dict[key]:
log_view+=url.strip()+"\n"
log_view+="\n\n"
return log_view
def run(self):
self.total+=self.wordlist_length
work_queue=Queue.Queue(0)
while not self.finished:
try:
base_url=self.base_queue.get_nowait()
for word in self.wordlist:
work_queue.put(base_url+word)
self.worker=http_client.HTTPMultiClient(self.config,self.response_callback,self.finished_callback)
self.worker.run("HEAD",work_queue)
except Queue.Empty:
self.finished=True
GObject.idle_add(self.finished_function,self.iter)
GObject.idle_add(self.view.progress_function,self.current_total,self.total,self.log)
def response_callback(self,flow):
url=flow.request.get_url()
if flow.response.code != 404:
if str(flow.response.code) in self._log_dict.keys():
self._log_dict[str(flow.response.code)].append(url)
else:
self._log_dict[str(flow.response.code)]=[url]
if flow.response.code == 200:
if self.recursive:
self.total+=self.wordlist_length
self.base_queue.put(url)
GObject.idle_add(self.add_function,url)
self.current_total+=1
if divmod(self.current_total,100)[1] == 0:
GObject.idle_add(self.progress_function,self.iter,self.current_total,self.total)
GObject.idle_add(self.view.progress_function,self.current_total,self.total,self.log)
def finished_callback(self):
pass
def stop(self):
self.worker.stop()
self.finished=True
| phase-dev/phase | libphase/brute_forcer.py | Python | gpl-3.0 | 5,575 |
# Copyright (c) 2016 EMC Corporation
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Contains some commonly used utility methods."""
try:
import cookielib as cookie_lib
except ImportError:
import http.cookiejar as cookie_lib
import json
import re
import socket
import oslo_serialization
from oslo_utils import timeutils
from oslo_utils import units
import requests
from requests import exceptions
import six
from cinder import exception
from cinder.i18n import _
from cinder.volume.drivers.coprhd.helpers import urihelper
PROD_NAME = 'storageos'
TIMEOUT_SEC = 20 # 20 SECONDS
global AUTH_TOKEN
AUTH_TOKEN = None
TASK_TIMEOUT = 300
URI_TASKS_BY_OPID = '/vdc/tasks/{0}'
def _decode_list(data):
rv = []
for item in data:
if isinstance(item, unicode):
item = item.encode('utf-8')
elif isinstance(item, list):
item = _decode_list(item)
elif isinstance(item, dict):
item = _decode_dict(item)
rv.append(item)
return rv
def _decode_dict(data):
rv = {}
for key, value in data.items():
if isinstance(key, unicode):
key = key.encode('utf-8')
if isinstance(value, unicode):
value = value.encode('utf-8')
elif isinstance(value, list):
value = _decode_list(value)
elif isinstance(value, dict):
value = _decode_dict(value)
rv[key] = value
return rv
def json_decode(rsp):
"""Used to decode the JSON encoded response."""
try:
o = json.loads(rsp, object_hook=_decode_dict)
except ValueError:
raise CoprHdError(CoprHdError.VALUE_ERR,
(_("Failed to recognize JSON payload:\n[%s]") % rsp))
return o
def service_json_request(ip_addr, port, http_method, uri, body,
contenttype='application/json', customheaders=None):
"""Used to make an HTTP request and get the response.
The message body is encoded in JSON format
:param ip_addr: IP address or host name of the server
:param port: port number of the server on which it
is listening to HTTP requests
:param http_method: one of GET, POST, PUT, DELETE
:param uri: the request URI
:param body: the request payload
:returns: a tuple of two elements: (response body, response headers)
:raises: CoprHdError in case of HTTP errors with err_code 3
"""
SEC_AUTHTOKEN_HEADER = 'X-SDS-AUTH-TOKEN'
headers = {'Content-Type': contenttype,
'ACCEPT': 'application/json, application/octet-stream',
'X-EMC-REST-CLIENT': 'TRUE'}
if customheaders:
headers.update(customheaders)
try:
protocol = "https://"
if port == 8080:
protocol = "http://"
url = protocol + ip_addr + ":" + six.text_type(port) + uri
cookiejar = cookie_lib.LWPCookieJar()
headers[SEC_AUTHTOKEN_HEADER] = AUTH_TOKEN
if http_method == 'GET':
response = requests.get(url, headers=headers, verify=False,
cookies=cookiejar)
elif http_method == 'POST':
response = requests.post(url, data=body, headers=headers,
verify=False, cookies=cookiejar)
elif http_method == 'PUT':
response = requests.put(url, data=body, headers=headers,
verify=False, cookies=cookiejar)
elif http_method == 'DELETE':
response = requests.delete(url, headers=headers, verify=False,
cookies=cookiejar)
else:
raise CoprHdError(CoprHdError.HTTP_ERR,
(_("Unknown/Unsupported HTTP method: %s") %
http_method))
if (response.status_code == requests.codes['ok'] or
response.status_code == 202):
return (response.text, response.headers)
error_msg = None
if response.status_code == 500:
response_text = json_decode(response.text)
error_details = ""
if 'details' in response_text:
error_details = response_text['details']
error_msg = (_("CoprHD internal server error. Error details: %s"),
error_details)
elif response.status_code == 401:
error_msg = _("Access forbidden: Authentication required")
elif response.status_code == 403:
error_msg = ""
error_details = ""
error_description = ""
response_text = json_decode(response.text)
if 'details' in response_text:
error_details = response_text['details']
error_msg = (_("%(error_msg)s Error details:"
" %(error_details)s"),
{'error_msg': error_msg,
'error_details': error_details
})
elif 'description' in response_text:
error_description = response_text['description']
error_msg = (_("%(error_msg)s Error description:"
" %(error_description)s"),
{'error_msg': error_msg,
'error_description': error_description
})
else:
error_msg = _("Access forbidden: You don't have"
" sufficient privileges to perform this"
" operation")
elif response.status_code == 404:
error_msg = "Requested resource not found"
elif response.status_code == 405:
error_msg = six.text_type(response.text)
elif response.status_code == 503:
error_msg = ""
error_details = ""
error_description = ""
response_text = json_decode(response.text)
if 'code' in response_text:
errorCode = response_text['code']
error_msg = "Error " + six.text_type(errorCode)
if 'details' in response_text:
error_details = response_text['details']
error_msg = error_msg + ": " + error_details
elif 'description' in response_text:
error_description = response_text['description']
error_msg = error_msg + ": " + error_description
else:
error_msg = _("Service temporarily unavailable:"
" The server is temporarily unable to"
" service your request")
else:
error_msg = response.text
if isinstance(error_msg, unicode):
error_msg = error_msg.encode('utf-8')
raise CoprHdError(CoprHdError.HTTP_ERR,
(_("HTTP code: %(status_code)s"
", %(reason)s"
" [%(error_msg)s]") % {
'status_code': six.text_type(
response.status_code),
'reason': six.text_type(
response.reason),
'error_msg': six.text_type(
error_msg)
}))
except (CoprHdError, socket.error, exceptions.SSLError,
exceptions.ConnectionError, exceptions.TooManyRedirects,
exceptions.Timeout) as e:
raise CoprHdError(CoprHdError.HTTP_ERR, six.text_type(e))
# TODO(Ravi) : Either following exception should have proper message or
# IOError should just be combined with the above statement
except IOError as e:
raise CoprHdError(CoprHdError.HTTP_ERR, six.text_type(e))
def is_uri(name):
"""Checks whether the name is a URI or not.
:param name: Name of the resource
:returns: True if name is URI, False otherwise
"""
try:
(urn, prod, trailer) = name.split(':', 2)
return (urn == 'urn' and prod == PROD_NAME)
except Exception:
return False
def format_json_object(obj):
"""Formats JSON object to make it readable by proper indentation.
:param obj: JSON object
:returns: a string of formatted JSON object
"""
return oslo_serialization.jsonutils.dumps(obj, sort_keys=True, indent=3)
def get_parent_child_from_xpath(name):
"""Returns the parent and child elements from XPath."""
if '/' in name:
(pname, label) = name.rsplit('/', 1)
else:
pname = None
label = name
return (pname, label)
def to_bytes(in_str):
"""Converts a size to bytes.
:param in_str: a number suffixed with a unit: {number}{unit}
units supported:
K, KB, k or kb - kilobytes
M, MB, m or mb - megabytes
G, GB, g or gb - gigabytes
T, TB, t or tb - terabytes
:returns: number of bytes
None; if input is incorrect
"""
match = re.search('^([0-9]+)([a-zA-Z]{0,2})$', in_str)
if not match:
return None
unit = match.group(2).upper()
value = match.group(1)
size_count = int(value)
if unit in ['K', 'KB']:
multiplier = int(units.Ki)
elif unit in ['M', 'MB']:
multiplier = int(units.Mi)
elif unit in ['G', 'GB']:
multiplier = int(units.Gi)
elif unit in ['T', 'TB']:
multiplier = int(units.Ti)
elif unit == "":
return size_count
else:
return None
size_in_bytes = int(size_count * multiplier)
return size_in_bytes
def get_list(json_object, parent_node_name, child_node_name=None):
"""Returns a list of values from child_node_name.
If child_node is not given, then it will retrieve list from parent node
"""
if not json_object:
return []
return_list = []
if isinstance(json_object[parent_node_name], list):
for detail in json_object[parent_node_name]:
if child_node_name:
return_list.append(detail[child_node_name])
else:
return_list.append(detail)
else:
if child_node_name:
return_list.append(json_object[parent_node_name][child_node_name])
else:
return_list.append(json_object[parent_node_name])
return return_list
def get_node_value(json_object, parent_node_name, child_node_name=None):
"""Returns value of given child_node.
If child_node is not given, then value of parent node is returned
returns None: If json_object or parent_node is not given,
If child_node is not found under parent_node
"""
if not json_object:
return None
if not parent_node_name:
return None
detail = json_object[parent_node_name]
if not child_node_name:
return detail
return_value = None
if child_node_name in detail:
return_value = detail[child_node_name]
else:
return_value = None
return return_value
def format_err_msg_and_raise(operation_type, component,
error_message, error_code):
"""Method to format error message.
:param operation_type: create, update, add, etc
:param component: storagesystem, vpool, etc
:param error_code: Error code from the API call
:param error_message: Detailed error message
"""
formated_err_msg = (_("Error: Failed to %(operation_type)s"
" %(component)s") %
{'operation_type': operation_type,
'component': component
})
if error_message.startswith("\"\'") and error_message.endswith("\'\""):
# stripping the first 2 and last 2 characters, which are quotes.
error_message = error_message[2:len(error_message) - 2]
formated_err_msg = formated_err_msg + "\nReason:" + error_message
raise CoprHdError(error_code, formated_err_msg)
def search_by_tag(resource_search_uri, ipaddr, port):
"""Fetches the list of resources with a given tag.
:param resource_search_uri: The tag based search uri
Example: '/block/volumes/search?tag=tagexample1'
:param ipaddr: IP address of CoprHD host
:param port: Port number
"""
# check if the URI passed has both project and name parameters
str_uri = six.text_type(resource_search_uri)
if 'search' in str_uri and '?tag=' in str_uri:
# Get the project URI
(s, h) = service_json_request(
ipaddr, port, "GET",
resource_search_uri, None)
o = json_decode(s)
if not o:
return None
resources = get_node_value(o, "resource")
resource_uris = []
for resource in resources:
resource_uris.append(resource["id"])
return resource_uris
else:
raise CoprHdError(CoprHdError.VALUE_ERR, (_("Search URI %s"
" is not in the expected"
" format, it should end"
" with ?tag={0}")
% str_uri))
# Blocks the operation until the task is complete/error out/timeout
def block_until_complete(component_type,
resource_uri,
task_id,
ipaddr,
port,
synctimeout=0):
if not synctimeout:
synctimeout = TASK_TIMEOUT
t = timeutils.StopWatch(duration=synctimeout)
t.start()
while not t.expired():
if component_type == 'block':
out = show_task_opid(task_id, ipaddr, port)
else:
out = get_task_by_resourceuri_and_taskId(
component_type, resource_uri, task_id, ipaddr, port)
if out:
if out["state"] == "ready":
# stop the timer and return
t.stop()
break
# if the status of the task is 'error' then stop the timer
# and raise exception
if out["state"] == "error":
# stop the timer
t.stop()
error_message = "Please see logs for more details"
if ("service_error" in out and
"details" in out["service_error"]):
error_message = out["service_error"]["details"]
raise CoprHdError(CoprHdError.VALUE_ERR,
(_("Task: %(task_id)s"
" is failed with"
" error: %(error_message)s") %
{'task_id': task_id,
'error_message': error_message
}))
else:
raise CoprHdError(CoprHdError.TIME_OUT,
(_("Task did not complete in %d secs."
" Operation timed out. Task in CoprHD"
" will continue") % synctimeout))
return
def show_task_opid(taskid, ipaddr, port):
(s, h) = service_json_request(
ipaddr, port,
"GET",
URI_TASKS_BY_OPID.format(taskid),
None)
if (not s):
return None
o = json_decode(s)
return o
def get_task_by_resourceuri_and_taskId(component_type, resource_uri,
task_id, ipaddr, port):
"""Returns the single task details."""
task_uri_constant = urihelper.singletonURIHelperInstance.getUri(
component_type, "task")
(s, h) = service_json_request(
ipaddr, port, "GET",
task_uri_constant.format(resource_uri, task_id), None)
if not s:
return None
o = json_decode(s)
return o
class CoprHdError(exception.VolumeBackendAPIException):
"""Custom exception class used to report logical errors.
Attributes:
err_code - String error code
msg - String text
"""
SOS_FAILURE_ERR = 1
CMD_LINE_ERR = 2
HTTP_ERR = 3
VALUE_ERR = 4
NOT_FOUND_ERR = 1
ENTRY_ALREADY_EXISTS_ERR = 5
MAX_COUNT_REACHED = 6
TIME_OUT = 7
def __init__(self, err_code, msg):
self.err_code = err_code
self.msg = msg
def __str__(self):
return repr(self.msg)
class CoprHDResource(object):
def __init__(self, ipaddr, port):
"""Constructor: takes IP address and port of the CoprHD instance.
These are needed to make http requests for REST API
"""
self.ipaddr = ipaddr
self.port = port
| Nexenta/cinder | cinder/volume/drivers/coprhd/helpers/commoncoprhdapi.py | Python | apache-2.0 | 17,348 |
#! /usr/bin/env python
# -*- coding: utf-8 -*-
# Copyright (C) 2011 Deepin, Inc.
# 2011 Hou Shaohui
#
# Author: Hou Shaohui <[email protected]>
# Maintainer: Hou ShaoHui <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import gtk
from dtk.ui.menu import Menu
from player import Player
from widget.skin import app_theme
from helper import Dispatcher
from config import config
from nls import _
class BaseTrayIcon(object):
'''Trayicon base, needs to be derived from.'''
def __init__(self, instance):
self.update_icon()
self.connect_events()
self.instance = instance
self.menu = None
def update_icon(self):
self.set_from_pixbuf(app_theme.get_pixbuf("skin/logo.ico").get_pixbuf())
def get_volume_menu(self):
menu_items = [
(None, _("Increase volume"), Player.increase_volume),
(None, _("Decrease volume"), Player.decrease_volume),
(None, _("Mute"), Player.mute_volume),
]
return Menu(menu_items)
def update_menu(self):
menu_items = []
if Player.is_paused():
pixbuf_group = self.get_pixbuf_group("play")
status_label = _("Play")
else:
pixbuf_group = self.get_pixbuf_group("pause")
status_label = _("Pause")
menu_items.append((pixbuf_group, status_label, Player.playpause))
menu_items.append((self.get_pixbuf_group("previous"), _("Previous"), Player.previous))
menu_items.append((self.get_pixbuf_group("next"), _("Next"), Player.next))
menu_items.append(None)
menu_items.append((self.get_pixbuf_group("volume"), _("Volume"), self.get_volume_menu()))
menu_items.append(None)
if config.getboolean("lyrics", "locked"):
menu_items.append((self.get_pixbuf_group("unlock"), _("Unlock lyrics"), lambda : Dispatcher.unlock_lyrics()))
else:
menu_items.append((self.get_pixbuf_group("lock"), _("Lock lyrics"), lambda : Dispatcher.lock_lyrics()))
if config.getboolean("lyrics", "status"):
menu_items.append((None, _("Lyrics on"), lambda : Dispatcher.close_lyrics()))
else:
menu_items.append((None, _("Lyrics off"), lambda : Dispatcher.show_lyrics()))
menu_items.append(None)
menu_items.append((self.get_pixbuf_group("setting"), _("Preference"), lambda : Dispatcher.show_setting()))
menu_items.append((self.get_pixbuf_group("close"), _("Quit"), lambda : Dispatcher.quit()))
if self.menu is not None:
del self.menu
self.menu = Menu(menu_items, True)
def get_pixbuf_group(self, name):
return (app_theme.get_pixbuf("tray/%s_normal.png" % name), app_theme.get_pixbuf("tray/%s_hover.png" % name))
def connect_events(self):
self.connect("button-press-event", self.on_button_press_event)
def on_button_press_event(self, widget, event):
if event.button == 1:
if event.state == gtk.gdk.CONTROL_MASK:
Player.previous()
else:
self.instance.toggle_visible()
elif event.button == 2:
Player.playpause()
elif event.button == 3:
if event.state == gtk.gdk.CONTROL_MASK:
Player.next()
else:
menu = gtk.Menu()
(x, y, z) = self.get_menu_position(menu, self)
self.update_menu()
self.menu.show((int(x), int(y)), (0, -32))
def destroy(self):
self.instance.toggle_visible(True)
self.set_visible(False)
class TrayIcon(gtk.StatusIcon, BaseTrayIcon):
def __init__(self, instance):
gtk.StatusIcon.__init__(self)
BaseTrayIcon.__init__(self, instance)
def get_menu_position(self, menu, icon):
return gtk.status_icon_position_menu(menu, icon)
| hillwoodroc/deepin-music-player | src/widget/tray.py | Python | gpl-3.0 | 4,642 |
# -*- coding: utf-8 -*-
from __future__ import absolute_import
from __future__ import print_function
from zerver.lib.alert_words import (
add_user_alert_words,
alert_words_in_realm,
remove_user_alert_words,
user_alert_words,
)
from zerver.lib.test_helpers import (
get_user_profile_by_email,
most_recent_message,
most_recent_usermessage,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.models import (
Recipient,
UserProfile,
)
from typing import Text
import ujson
class AlertWordTests(ZulipTestCase):
interesting_alert_word_list = ['alert', 'multi-word word', u'☃']
def test_internal_endpoint(self):
# type: () -> None
email = "[email protected]"
self.login(email)
params = {
'alert_words': ujson.dumps(['milk', 'cookies'])
}
result = self.client_post('/json/users/me/alert_words', params)
self.assert_json_success(result)
user = get_user_profile_by_email(email)
words = user_alert_words(user)
self.assertEqual(words, ['milk', 'cookies'])
def test_default_no_words(self):
# type: () -> None
"""
Users start out with no alert words.
"""
user = self.example_user('cordelia')
words = user_alert_words(user)
self.assertEqual(words, [])
def test_add_word(self):
# type: () -> None
"""
add_user_alert_words can add multiple alert words at once.
"""
user = self.example_user('cordelia')
# Add several words, including multi-word and non-ascii words.
add_user_alert_words(user, self.interesting_alert_word_list)
words = user_alert_words(user)
self.assertEqual(words, self.interesting_alert_word_list)
def test_remove_word(self):
# type: () -> None
"""
Removing alert words works via remove_user_alert_words, even
for multi-word and non-ascii words.
"""
user = self.example_user('cordelia')
add_user_alert_words(user, self.interesting_alert_word_list)
theoretical_remaining_alerts = self.interesting_alert_word_list[:]
for alert_word in self.interesting_alert_word_list:
remove_user_alert_words(user, alert_word)
theoretical_remaining_alerts.remove(alert_word)
actual_remaining_alerts = user_alert_words(user)
self.assertEqual(actual_remaining_alerts,
theoretical_remaining_alerts)
def test_realm_words(self):
# type: () -> None
"""
We can gather alert words for an entire realm via
alert_words_in_realm. Alerts added for one user do not impact other
users.
"""
user1 = self.example_user('cordelia')
add_user_alert_words(user1, self.interesting_alert_word_list)
user2 = self.example_user('othello')
add_user_alert_words(user2, ['another'])
realm_words = alert_words_in_realm(user2.realm)
self.assertEqual(len(realm_words), 2)
self.assertEqual(list(realm_words.keys()), [user1.id, user2.id])
self.assertEqual(realm_words[user1.id],
self.interesting_alert_word_list)
self.assertEqual(realm_words[user2.id], ['another'])
def test_json_list_default(self):
# type: () -> None
self.login("[email protected]")
result = self.client_get('/json/users/me/alert_words')
self.assert_json_success(result)
data = ujson.loads(result.content)
self.assertEqual(data['alert_words'], [])
def test_json_list_add(self):
# type: () -> None
self.login("[email protected]")
result = self.client_put('/json/users/me/alert_words', {'alert_words': ujson.dumps(['one ', '\n two', 'three'])})
self.assert_json_success(result)
result = self.client_get('/json/users/me/alert_words')
self.assert_json_success(result)
data = ujson.loads(result.content)
self.assertEqual(data['alert_words'], ['one', 'two', 'three'])
def test_json_list_remove(self):
# type: () -> None
self.login("[email protected]")
result = self.client_put('/json/users/me/alert_words', {'alert_words': ujson.dumps(['one', 'two', 'three'])})
self.assert_json_success(result)
result = self.client_delete('/json/users/me/alert_words', {'alert_words': ujson.dumps(['one'])})
self.assert_json_success(result)
result = self.client_get('/json/users/me/alert_words')
self.assert_json_success(result)
data = ujson.loads(result.content)
self.assertEqual(data['alert_words'], ['two', 'three'])
def test_json_list_set(self):
# type: () -> None
self.login("[email protected]")
result = self.client_put('/json/users/me/alert_words', {'alert_words': ujson.dumps(['one', 'two', 'three'])})
self.assert_json_success(result)
result = self.client_post('/json/users/me/alert_words', {'alert_words': ujson.dumps(['a', 'b', 'c'])})
self.assert_json_success(result)
result = self.client_get('/json/users/me/alert_words')
self.assert_json_success(result)
data = ujson.loads(result.content)
self.assertEqual(data['alert_words'], ['a', 'b', 'c'])
def message_does_alert(self, user_profile, message):
# type: (UserProfile, Text) -> bool
"""Send a bunch of messages as othello, so Hamlet is notified"""
self.send_message("[email protected]", "Denmark", Recipient.STREAM, message)
user_message = most_recent_usermessage(user_profile)
return 'has_alert_word' in user_message.flags_list()
def test_alert_flags(self):
# type: () -> None
self.login("[email protected]")
user_profile_hamlet = self.example_user('hamlet')
result = self.client_put('/json/users/me/alert_words', {'alert_words': ujson.dumps(['one', 'two', 'three'])})
self.assert_json_success(result)
result = self.client_get('/json/users/me/alert_words')
self.assert_json_success(result)
data = ujson.loads(result.content)
self.assertEqual(data['alert_words'], ['one', 'two', 'three'])
# Alerts in the middle of messages work.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "Normal alert one time"))
# Alerts at the end of messages work.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "Normal alert one"))
# Alerts at the beginning of messages work.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "two normal alerts"))
# Alerts with surrounding punctuation work.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "This one? should alert"))
self.assertTrue(self.message_does_alert(user_profile_hamlet, "Definitely time for three."))
# Multiple alerts in a message work.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "One two three o'clock"))
# Alerts are case-insensitive.
self.assertTrue(self.message_does_alert(user_profile_hamlet, "One o'clock"))
self.assertTrue(self.message_does_alert(user_profile_hamlet, "Case of ONE, won't stop me"))
# We don't cause alerts for matches in URLs.
self.assertFalse(self.message_does_alert(user_profile_hamlet, "Don't alert on http://t.co/one/ urls"))
self.assertFalse(self.message_does_alert(user_profile_hamlet, "Don't alert on http://t.co/one urls"))
def test_update_alert_words(self):
# type: () -> None
user_profile = self.example_user('hamlet')
me_email = user_profile.email
self.login(me_email)
result = self.client_put('/json/users/me/alert_words', {'alert_words': ujson.dumps(['ALERT'])})
content = 'this is an ALERT for you'
self.send_message(me_email, "Denmark", Recipient.STREAM, content)
self.assert_json_success(result)
original_message = most_recent_message(user_profile)
user_message = most_recent_usermessage(user_profile)
self.assertIn('has_alert_word', user_message.flags_list())
result = self.client_patch("/json/messages/" + str(original_message.id), {
'message_id': original_message.id,
'content': 'new ALERT for you',
})
self.assert_json_success(result)
user_message = most_recent_usermessage(user_profile)
self.assertEqual(user_message.message.content, 'new ALERT for you')
self.assertIn('has_alert_word', user_message.flags_list())
result = self.client_patch("/json/messages/" + str(original_message.id), {
'message_id': original_message.id,
'content': 'sorry false alarm',
})
self.assert_json_success(result)
user_message = most_recent_usermessage(user_profile)
self.assertEqual(user_message.message.content, 'sorry false alarm')
self.assertNotIn('has_alert_word', user_message.flags_list())
| jphilipsen05/zulip | zerver/tests/test_alert_words.py | Python | apache-2.0 | 9,107 |
# ***************************************************************************
# * *
# * Copyright (c) 2015 - FreeCAD Developers *
# * Author (c) 2015 - Przemo Fiszt < [email protected]> *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "Fem Commands"
__author__ = "Przemo Firszt"
__url__ = "http://www.freecadweb.org"
import FreeCAD
if FreeCAD.GuiUp:
import FreeCADGui
import FemGui
from PySide import QtCore
class FemCommands(object):
def __init__(self):
self.resources = {'Pixmap': 'fem-frequency-analysis',
'MenuText': QtCore.QT_TRANSLATE_NOOP("Fem_Command", "Default Fem Command MenuText"),
'Accel': "",
'ToolTip': QtCore.QT_TRANSLATE_NOOP("Fem_Command", "Default Fem Command ToolTip")}
# FIXME add option description
self.is_active = None
def GetResources(self):
return self.resources
def IsActive(self):
if not self.is_active:
active = False
elif self.is_active == 'with_document':
active = FreeCADGui.ActiveDocument is not None
elif self.is_active == 'with_analysis':
active = FemGui.getActiveAnalysis() is not None and self.active_analysis_in_active_doc()
elif self.is_active == 'with_results':
active = FemGui.getActiveAnalysis() is not None and self.active_analysis_in_active_doc() and self.results_present()
elif self.is_active == 'with_part_feature':
active = FreeCADGui.ActiveDocument is not None and self.part_feature_selected()
elif self.is_active == 'with_material':
active = FemGui.getActiveAnalysis() is not None and self.active_analysis_in_active_doc() and self.material_selected()
elif self.is_active == 'with_solver':
active = FemGui.getActiveAnalysis() is not None and self.active_analysis_in_active_doc() and self.solver_selected()
elif self.is_active == 'with_analysis_without_solver':
active = FemGui.getActiveAnalysis() is not None and self.active_analysis_in_active_doc() and not self.analysis_has_solver()
return active
def results_present(self):
results = False
analysis_members = FemGui.getActiveAnalysis().Member
for o in analysis_members:
if o.isDerivedFrom('Fem::FemResultObject'):
results = True
return results
def part_feature_selected(self):
sel = FreeCADGui.Selection.getSelection()
if len(sel) == 1 and sel[0].isDerivedFrom("Part::Feature"):
return True
else:
return False
def material_selected(self):
sel = FreeCADGui.Selection.getSelection()
if len(sel) == 1 and sel[0].isDerivedFrom("App::MaterialObjectPython"):
return True
else:
return False
def active_analysis_in_active_doc(self):
return FemGui.getActiveAnalysis().Document is FreeCAD.ActiveDocument
def solver_selected(self):
sel = FreeCADGui.Selection.getSelection()
if len(sel) == 1 and sel[0].isDerivedFrom("Fem::FemSolverObjectPython"):
return True
else:
return False
def analysis_has_solver(self):
solver = False
analysis_members = FemGui.getActiveAnalysis().Member
for o in analysis_members:
if o.isDerivedFrom("Fem::FemSolverObjectPython"):
solver = True
if solver is True:
return True
else:
return False
def hide_parts_constraints_show_meshes(self):
if FreeCAD.GuiUp:
for acnstrmesh in FemGui.getActiveAnalysis().Member:
# if "Constraint" in acnstrmesh.TypeId:
# acnstrmesh.ViewObject.Visibility = False
fem_prefs = FreeCAD.ParamGet("User parameter:BaseApp/Preferences/Mod/Fem/General")
hide_constraints = fem_prefs.GetBool("HideConstraint", False)
if hide_constraints:
if "Constraint" in acnstrmesh.TypeId:
acnstrmesh.ViewObject.Visibility = False
if "Mesh" in acnstrmesh.TypeId:
aparttoshow = acnstrmesh.Name.replace("_Mesh", "")
for apart in FreeCAD.activeDocument().Objects:
if aparttoshow == apart.Name:
apart.ViewObject.Visibility = False
acnstrmesh.ViewObject.Visibility = True # OvG: Hide constraints and parts and show meshes
def hide_meshes_show_parts_constraints(self):
if FreeCAD.GuiUp:
for acnstrmesh in FemGui.getActiveAnalysis().Member:
if "Constraint" in acnstrmesh.TypeId:
acnstrmesh.ViewObject.Visibility = True
if "Mesh" in acnstrmesh.TypeId:
aparttoshow = acnstrmesh.Name.replace("_Mesh", "")
for apart in FreeCAD.activeDocument().Objects:
if aparttoshow == apart.Name:
apart.ViewObject.Visibility = True
acnstrmesh.ViewObject.Visibility = False # OvG: Hide meshes and show constraints and meshed part e.g. on purging results
| wood-galaxy/FreeCAD | src/Mod/Fem/FemCommands.py | Python | lgpl-2.1 | 7,121 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
SleekXMPP: The Sleek XMPP Library
Copyright (C) 2010 Nathanael C. Fritz
This file is part of SleekXMPP.
See the file LICENSE for copying permission.
"""
import sys
import logging
import getpass
from optparse import OptionParser
import sleekxmpp
from sleekxmpp.componentxmpp import ComponentXMPP
# Python versions before 3.0 do not use UTF-8 encoding
# by default. To ensure that Unicode is handled properly
# throughout SleekXMPP, we will set the default encoding
# ourselves to UTF-8.
if sys.version_info < (3, 0):
reload(sys)
sys.setdefaultencoding('utf8')
else:
raw_input = input
class EchoComponent(ComponentXMPP):
"""
A simple SleekXMPP component that echoes messages.
"""
def __init__(self, jid, secret, server, port):
ComponentXMPP.__init__(self, jid, secret, server, port)
# You don't need a session_start handler, but that is
# where you would broadcast initial presence.
# The message event is triggered whenever a message
# stanza is received. Be aware that that includes
# MUC messages and error messages.
self.add_event_handler("message", self.message)
def message(self, msg):
"""
Process incoming message stanzas. Be aware that this also
includes MUC messages and error messages. It is usually
a good idea to check the messages's type before processing
or sending replies.
Since a component may send messages from any number of JIDs,
it is best to always include a from JID.
Arguments:
msg -- The received message stanza. See the documentation
for stanza objects and the Message stanza to see
how it may be used.
"""
# The reply method will use the messages 'to' JID as the
# outgoing reply's 'from' JID.
msg.reply("Thanks for sending\n%(body)s" % msg).send()
if __name__ == '__main__':
# Setup the command line arguments.
optp = OptionParser()
# Output verbosity options.
optp.add_option('-q', '--quiet', help='set logging to ERROR',
action='store_const', dest='loglevel',
const=logging.ERROR, default=logging.INFO)
optp.add_option('-d', '--debug', help='set logging to DEBUG',
action='store_const', dest='loglevel',
const=logging.DEBUG, default=logging.INFO)
optp.add_option('-v', '--verbose', help='set logging to COMM',
action='store_const', dest='loglevel',
const=5, default=logging.INFO)
# JID and password options.
optp.add_option("-j", "--jid", dest="jid",
help="JID to use")
optp.add_option("-p", "--password", dest="password",
help="password to use")
optp.add_option("-s", "--server", dest="server",
help="server to connect to")
optp.add_option("-P", "--port", dest="port",
help="port to connect to")
opts, args = optp.parse_args()
if opts.jid is None:
opts.jid = raw_input("Component JID: ")
if opts.password is None:
opts.password = getpass.getpass("Password: ")
if opts.server is None:
opts.server = raw_input("Server: ")
if opts.port is None:
opts.port = int(raw_input("Port: "))
# Setup logging.
logging.basicConfig(level=opts.loglevel,
format='%(levelname)-8s %(message)s')
# Setup the EchoComponent and register plugins. Note that while plugins
# may have interdependencies, the order in which you register them does
# not matter.
xmpp = EchoComponent(opts.jid, opts.password, opts.server, opts.port)
xmpp.registerPlugin('xep_0030') # Service Discovery
xmpp.registerPlugin('xep_0004') # Data Forms
xmpp.registerPlugin('xep_0060') # PubSub
xmpp.registerPlugin('xep_0199') # XMPP Ping
# Connect to the XMPP server and start processing XMPP stanzas.
if xmpp.connect():
xmpp.process(block=True)
print("Done")
else:
print("Unable to connect.")
| tiancj/emesene | emesene/e3/xmpp/SleekXMPP/examples/echo_component.py | Python | gpl-3.0 | 4,190 |
#!/usr/bin/env python
"""Compatibility check of synchronous interface automata"""
__author__ = "Simon Maurer"
__version__ = "0.9.0"
__maintainer__ = "Simon Maurer"
__email__ = "[email protected]"
__status__ = "Prototype"
import igraph, sia
import sys, argparse
import igraph.vendor.texttable
sys.settrace
parser = argparse.ArgumentParser('This script performs the folding operation on interface automata passed as graphml files')
parser.add_argument( '-p', '--plot', action='store_true', help='plot the graph of the folded system' )
parser.add_argument( '-f', metavar="FORMAT", dest='format', choices=['graphml', 'gml'], default='graphml', help='set the format of the input graph (default: graphml)' )
parser.add_argument( '-o', metavar="OUTFILE", dest='output', default='out', help='set the output path of the result (default: out.[FORMAT])' )
parser.add_argument( 'net', metavar="NET", nargs=1, help='the dependency graph of the PNSC' )
parser.add_argument( 'infiles', nargs='+', metavar="INFILE", help="the graph files to be folded" )
args = parser.parse_args()
def main():
"""main program entry point"""
g_arr = []
for gf in args.infiles:
g_arr.append( igraph.load( gf, format=args.format ) )
net = igraph.load( args.net[0], format=args.format )
pnsc = sia.Pnsc( net, g_arr )
pnsc.fold()
pnsc.print_error()
if args.plot: pnsc.sia.save()
if args.output == parser.get_default( 'output' ):
args.output = args.output + "." + args.format
pnsc.sia.g.save( args.output )
if __name__ == "__main__":
main()
| moiri/streamix-sia | check_sia.py | Python | gpl-2.0 | 1,575 |
#!/usr/bin/env python2
import sys
from pwn import log
from pycparser import parse_file
import pycparser
import json
import os.path
FAKE_LIBC = "c_code/utils/fake_libc_include"
class information:
def __init__(self):
self.isptr = False
self.name = ""
self.type = ""
self.isfunct = False
self.params = []
def get_type(x):
f = information()
while(not isinstance(x, pycparser.c_ast.IdentifierType)):
if(isinstance(x, pycparser.c_ast.PtrDecl)):
f.isptr = True
x = x.type
f.type = " ".join(x.names)
return f
def print_function(f):
tolog = []
for p in f.params:
con = " "
if(p.isptr):
con = "* "
if(p.name):
tolog.append(p.type + con + p.name)
params = ", ".join(tolog)
con = " "
if(f.isptr):
con = "* "
log.info(f.type + con + f.name + "(" + params + ")")
def export_function(f):
fdesc = {}
fdesc["name"] = f.name
fdesc["parameters"] = []
fdesc["monitor"] = True
for p in f.params:
entry = {}
entry["name"] = p.name
if(p.isptr):
entry["type"] = "addr"
if("char" in p.type):
entry["type"] = "string"
else:
entry["type"] = "num"
entry["monitor"] = False
if(p.name):
fdesc["parameters"].append(entry)
filename = "functions/" + f.name + ".json"
if(os.path.isfile(filename)):
log.warn("File " + filename + " already exists!")
else:
with open(filename, 'w') as outfile:
json.dump(fdesc, outfile,
sort_keys=True, indent=4, separators=(',', ': '))
def get_functions(tree):
functions = []
for e in tree:
if isinstance(e.type, pycparser.c_ast.FuncDecl):
f = get_type(e)
f.isfunct = True
f.name = e.name
for ee in e.type.args.params:
p = get_type(ee)
p.name = ee.name
f.params.append(p)
functions.append(f)
return functions
def main(filename):
ast = parse_file(filename, use_cpp=True, cpp_args="-I" + FAKE_LIBC)
functions = get_functions(ast.ext)
for f in functions:
# print_function(f)
export_function(f)
if __name__ == '__main__':
if len(sys.argv) != 2:
try:
log.error("Usage: %s <path_to_file>" % __file__)
except:
sys.exit(-1)
main(sys.argv[1])
| sigttou/analyzecrypt.py | get_functions.py | Python | mit | 2,513 |
import swiftclient
def connect(creds):
"""
Construct a connection value from a container
"""
return swiftclient.Connection(
authurl=creds.authurl,
user=creds.user,
key=creds.password,
auth_version=creds.auth_version,
tenant_name=creds.tenant_name,
os_options={
"region_name": creds.region,
"endpoint_type": creds.endpoint_type
}
)
| x86Labs/wal-e | wal_e/blobstore/swift/calling_format.py | Python | bsd-3-clause | 433 |
# Copyright 2018 Verily Life Sciences LLC.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Tests for query characterize_genotypes.sql.
See https://github.com/verilylifesciences/analysis-py-utils for more details
about the testing framework.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from ddt import data
from ddt import ddt
from jinja2 import Template
import os
import unittest
from verily.bigquery_wrapper import bq_test_case
from test_constants import TestConstants
@ddt
class QueryTest(bq_test_case.BQTestCase):
@classmethod
def setUpClass(cls):
"""Set up class."""
super(QueryTest, cls).setUpClass(use_mocks=False)
cls.longMessage = True
cls.sql_to_test = open(
os.path.join(
os.path.dirname(os.path.realpath(__file__)),
"characterize_genotypes.sql"),
"r").read()
@classmethod
def create_mock_tables(cls):
"""Create mock tables."""
cls.basic_tables = {}
for tbl_name, tbl_input in (TestConstants
.GENOME_CALL_OR_MULTISAMPLE_TABLES.iteritems()):
cls.basic_tables[tbl_name] = cls.client.path(tbl_name)
cls.client.create_table_from_query(tbl_input, tbl_name)
# Table created specifically for this test.
cls.src_table_name = cls.client.path("genome_call_table")
cls.client.create_table_from_query("""
SELECT * FROM UNNEST([
STRUCT<reference_name STRING,
start_position INT64,
end_position INT64,
reference_bases STRING,
alternate_bases ARRAY<STRUCT<alt STRING>>,
call ARRAY<STRUCT<name STRING,
genotype ARRAY<INT64>>> >
-- SNP
('chr1', 123, 124, 'A', ARRAY[STRUCT('T')], ARRAY[STRUCT('sample1', ARRAY[0, 0]),
STRUCT('sample2', ARRAY[0, 1]),
STRUCT('sample3', ARRAY[1, 0]),
STRUCT('sample4', ARRAY[1, 1]),
STRUCT('sample5', ARRAY[-1, 1]),
STRUCT('sample6', ARRAY[1, -1]),
STRUCT('sample7', ARRAY[-1, -1])]),
-- Multiallelic SNP
('chr1', 456, 457, 'C', ARRAY[STRUCT('A'),
STRUCT('G'),
STRUCT('T')], ARRAY[STRUCT('sample1', ARRAY[2, 2]),
STRUCT('sample2', ARRAY[3, 3]),
STRUCT('sample3', ARRAY[1, 4]),
STRUCT('sample4', ARRAY[0, 0]),
STRUCT('sample5', ARRAY[1, 1]),
STRUCT('sample6', ARRAY[4, 4]),
STRUCT('sample7', ARRAY[-1, 3])])
])""", cls.src_table_name)
def test(self):
sql = Template(self.sql_to_test).render(
{"GENOME_CALL_OR_MULTISAMPLE_VARIANT_TABLE": self.src_table_name})
expected = [
# genotype, genotype_count
("-1,-1", 1),
("-1,1", 1),
("1,-1", 1),
("0,0", 2),
("0,1", 1),
("1,0", 1),
("1,1", 2),
("2,2", 1),
("3,3", 1),
("1,4", 1),
("-1,3", 1),
("4,4", 1)
]
self.expect_query_result(
query=sql, expected=expected, enforce_ordering=False)
@data(TestConstants.GENOME_CALL_TABLE)
def test_basic_input_genome_call_table(self, table):
sql = Template(self.sql_to_test).render(
{"GENOME_CALL_OR_MULTISAMPLE_VARIANT_TABLE": self.basic_tables[table]})
expected = [
# genotype, genotype_count
("-1,-1", 1),
("-1,1", 1),
("0,0", 3),
("0,1", 2),
("1,1", 2),
("1,2", 1)
]
self.expect_query_result(
query=sql, expected=expected, enforce_ordering=False)
@data(TestConstants.MULTISAMPLE_VARIANTS_TABLE)
def test_basic_input_multisample_variants_table(self, table):
sql = Template(self.sql_to_test).render(
{"GENOME_CALL_OR_MULTISAMPLE_VARIANT_TABLE": self.basic_tables[table]})
expected = [
# genotype, genotype_count
("-1,-1", 18),
("-1,1", 1),
("0,0", 1),
("0,1", 2),
("1,1", 2),
("1,2", 1)
]
self.expect_query_result(
query=sql, expected=expected, enforce_ordering=False)
if __name__ == "__main__":
unittest.main()
| verilylifesciences/variant-qc | sql/characterize_genotypes_test.py | Python | apache-2.0 | 5,161 |
# views.py
from flask import render_template
from app import app
@app.route('/')
def index():
return render_template("index.html")
@app.route('/about')
def about():
return render_template("about.html")
| rayidghani/CharBucks | app/views.py | Python | mit | 214 |
import _plotly_utils.basevalidators
class FontValidator(_plotly_utils.basevalidators.CompoundValidator):
def __init__(self, plotly_name="font", parent_name="box.hoverlabel", **kwargs):
super(FontValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
data_class_str=kwargs.pop("data_class_str", "Font"),
data_docs=kwargs.pop(
"data_docs",
"""
color
colorsrc
Sets the source reference on Chart Studio Cloud
for color .
family
HTML font family - the typeface that will be
applied by the web browser. The web browser
will only be able to apply a font if it is
available on the system which it operates.
Provide multiple font families, separated by
commas, to indicate the preference in which to
apply fonts if they aren't available on the
system. The Chart Studio Cloud (at
https://chart-studio.plotly.com or on-premise)
generates images on a server, where only a
select number of fonts are installed and
supported. These include "Arial", "Balto",
"Courier New", "Droid Sans",, "Droid Serif",
"Droid Sans Mono", "Gravitas One", "Old
Standard TT", "Open Sans", "Overpass", "PT Sans
Narrow", "Raleway", "Times New Roman".
familysrc
Sets the source reference on Chart Studio Cloud
for family .
size
sizesrc
Sets the source reference on Chart Studio Cloud
for size .
""",
),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/box/hoverlabel/_font.py | Python | mit | 1,855 |
# Purely included to allow for app install.
| shaunsephton/django-snippetscream | snippetscream/models.py | Python | bsd-3-clause | 44 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Bezleputh
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
from weboob.browser import PagesBrowser, URL
from .pages import EventListPage, EventPage
from datetime import timedelta, date
class AgendadulibreBrowser(PagesBrowser):
event_list_page = URL('events\?start_date=(?P<date_from>.*)(?P<region>.*)', EventListPage)
event_page = URL('events/(?P<_id>.*)', EventPage)
def __init__(self, website, region, *args, **kwargs):
self.BASEURL = u'%s' % website
self.region = '®ion=%s' % region if region else ''
PagesBrowser.__init__(self, *args, **kwargs)
def list_events(self, date_from, date_to, city=None, categories=None, max_date=None):
_max_date = date_from + timedelta(days=365)
max_date = date(year=_max_date.year, month=_max_date.month, day=_max_date.day)
return self.event_list_page.go(date_from=date_from.strftime("%Y-%m-%d"),
region=self.region)\
.list_events(date_from=date_from,
date_to=date_to,
city=city,
categories=categories,
max_date=max_date)
def get_event(self, event_id, event=None):
_id = event_id.split('#')[-1]
return self.event_page.go(_id=_id).get_event(obj=event)
| Konubinix/weboob | modules/agendadulibre/browser.py | Python | agpl-3.0 | 2,003 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
import functools
import numpy as np
import pytest
from astropy import units as u
from astropy.utils import iers
from astropy.time import Time
from astropy.table import Table
try:
import h5py # pylint: disable=W0611 # noqa
except ImportError:
HAS_H5PY = False
else:
HAS_H5PY = True
try:
import yaml # pylint: disable=W0611 # noqa
HAS_YAML = True
except ImportError:
HAS_YAML = False
allclose_sec = functools.partial(np.allclose, rtol=2. ** -52,
atol=2. ** -52 * 24 * 3600) # 20 ps atol
is_masked = np.ma.is_masked
def test_simple():
t = Time([1, 2, 3], format='cxcsec')
assert t.masked is False
assert np.all(t.mask == [False, False, False])
# Before masking, format output is not a masked array (it is an ndarray
# like always)
assert not isinstance(t.value, np.ma.MaskedArray)
assert not isinstance(t.unix, np.ma.MaskedArray)
t[2] = np.ma.masked
assert t.masked is True
assert np.all(t.mask == [False, False, True])
assert allclose_sec(t.value[:2], [1, 2])
assert is_masked(t.value[2])
assert is_masked(t[2].value)
# After masking format output is a masked array
assert isinstance(t.value, np.ma.MaskedArray)
assert isinstance(t.unix, np.ma.MaskedArray)
# Todo : test all formats
def test_scalar_init():
t = Time('2000:001')
assert t.masked is False
assert t.mask == np.array(False)
def test_mask_not_writeable():
t = Time('2000:001')
with pytest.raises(AttributeError) as err:
t.mask = True
assert "can't set attribute" in str(err.value)
t = Time(['2000:001'])
with pytest.raises(ValueError) as err:
t.mask[0] = True
assert "assignment destination is read-only" in str(err.value)
def test_str():
t = Time(['2000:001', '2000:002'])
t[1] = np.ma.masked
assert str(t) == "['2000:001:00:00:00.000' --]"
assert repr(t) == "<Time object: scale='utc' format='yday' value=['2000:001:00:00:00.000' --]>"
expected = ["masked_array(data=['2000-01-01 00:00:00.000', --],",
' mask=[False, True],',
" fill_value='N/A',",
" dtype='<U23')"]
# Note that we need to take care to allow for big-endian platforms,
# for which the dtype will be >U23 instead of <U23, which we do with
# the call to replace().
assert repr(t.iso).replace('>U23', '<U23').splitlines() == expected
# Assign value to unmask
t[1] = '2000:111'
assert str(t) == "['2000:001:00:00:00.000' '2000:111:00:00:00.000']"
assert t.masked is False
def test_transform():
with iers.conf.set_temp('auto_download', False):
t = Time(['2000:001', '2000:002'])
t[1] = np.ma.masked
# Change scale (this tests the ERFA machinery with masking as well)
t_ut1 = t.ut1
assert is_masked(t_ut1.value[1])
assert not is_masked(t_ut1.value[0])
assert np.all(t_ut1.mask == [False, True])
# Change format
t_unix = t.unix
assert is_masked(t_unix[1])
assert not is_masked(t_unix[0])
assert np.all(t_unix.mask == [False, True])
def test_masked_input():
v0 = np.ma.MaskedArray([[1, 2], [3, 4]]) # No masked elements
v1 = np.ma.MaskedArray([[1, 2], [3, 4]],
mask=[[True, False], [False, False]])
v2 = np.ma.MaskedArray([[10, 20], [30, 40]],
mask=[[False, False], [False, True]])
# Init from various combinations of masked arrays
t = Time(v0, format='cxcsec')
assert np.ma.allclose(t.value, v0)
assert np.all(t.mask == [[False, False], [False, False]])
assert t.masked is False
t = Time(v1, format='cxcsec')
assert np.ma.allclose(t.value, v1)
assert np.all(t.mask == v1.mask)
assert np.all(t.value.mask == v1.mask)
assert t.masked is True
t = Time(v1, v2, format='cxcsec')
assert np.ma.allclose(t.value, v1 + v2)
assert np.all(t.mask == (v1 + v2).mask)
assert t.masked is True
t = Time(v0, v1, format='cxcsec')
assert np.ma.allclose(t.value, v0 + v1)
assert np.all(t.mask == (v0 + v1).mask)
assert t.masked is True
t = Time(0, v2, format='cxcsec')
assert np.ma.allclose(t.value, v2)
assert np.all(t.mask == v2.mask)
assert t.masked is True
# Init from a string masked array
t_iso = t.iso
t2 = Time(t_iso)
assert np.all(t2.value == t_iso)
assert np.all(t2.mask == v2.mask)
assert t2.masked is True
def test_all_masked_input():
"""Fix for #9612"""
# Test with jd=0 and jd=np.nan. Both triggered an exception prior to #9624
# due to astropy.utils.exceptions.ErfaError.
for val in (0, np.nan):
t = Time(np.ma.masked_array([val], mask=[True]), format='jd')
assert str(t.iso) == '[--]'
def test_serialize_fits_masked(tmpdir):
tm = Time([1, 2, 3], format='cxcsec')
tm[1] = np.ma.masked
fn = str(tmpdir.join('tempfile.fits'))
t = Table([tm])
t.write(fn)
t2 = Table.read(fn, astropy_native=True)
# Time FITS handling does not current round-trip format in FITS
t2['col0'].format = tm.format
assert t2['col0'].masked
assert np.all(t2['col0'].mask == [False, True, False])
assert np.all(t2['col0'].value == t['col0'].value)
@pytest.mark.skipif(not HAS_YAML or not HAS_H5PY,
reason='Need both h5py and yaml')
def test_serialize_hdf5_masked(tmpdir):
tm = Time([1, 2, 3], format='cxcsec')
tm[1] = np.ma.masked
fn = str(tmpdir.join('tempfile.hdf5'))
t = Table([tm])
t.write(fn, path='root', serialize_meta=True)
t2 = Table.read(fn)
assert t2['col0'].masked
assert np.all(t2['col0'].mask == [False, True, False])
assert np.all(t2['col0'].value == t['col0'].value)
# Ignore warning in MIPS https://github.com/astropy/astropy/issues/9750
@pytest.mark.skipif('not HAS_YAML')
@pytest.mark.filterwarnings('ignore:invalid value encountered')
@pytest.mark.parametrize('serialize_method', ['jd1_jd2', 'formatted_value'])
def test_serialize_ecsv_masked(serialize_method, tmpdir):
tm = Time([1, 2, 3], format='cxcsec')
tm[1] = np.ma.masked
tm.info.serialize_method['ecsv'] = serialize_method
fn = str(tmpdir.join('tempfile.ecsv'))
t = Table([tm])
t.write(fn)
t2 = Table.read(fn)
assert t2['col0'].masked
assert np.all(t2['col0'].mask == [False, True, False])
# Serializing formatted_value loses some precision.
atol = 0.1*u.us if serialize_method == 'formatted_value' else 1*u.ps
assert np.all(abs(t2['col0'] - t['col0']) <= atol)
| dhomeier/astropy | astropy/time/tests/test_mask.py | Python | bsd-3-clause | 6,709 |
# coding=utf-8
#
# <BestRussianTV plugin for XBMC>
# Copyright (C) <2011> <BestRussianTV>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
# encoding:utf-8
import httplib
import Config
from BeautifulSoup import BeautifulSoup
class Obj:
def __init__(self, id, descr, name, epcount, length, startTime, epnum, cont, imgcount):
self.id = id
self.descr = descr
self.name = name
self.epcount = epcount
self.length = length
self.startTime = startTime
self.epnum = epnum
self.cont = cont
self.imgcount = imgcount
class GObj:
def __init__(self, TPage, Page, Items):
self.tpage = int(TPage)
self.page = int(Page)
self.items = Items
def GetItem(sID, type, page=1, keyword="", date="0001-01-01", id="0"):
Items = []
soup = BeautifulSoup(Request(sID, type, page, keyword, date, id))
try:
tpage = soup("b:totalpages")[0].text
except:
tpage = "1"
try:
page = soup("b:pagenumber")[0].text
except:
page = "1"
d = soup("a:programguideitem")
for i in range(len(d)):
c = d[i]
sup = BeautifulSoup(c.prettify())
name = sup("a:name")[0].text.encode('utf-8')
descr = sup("a:description")[0].text.encode('utf-8')
descr = descr.replace('
', '')
epcount = sup("a:episodescount")[0].text
length = sup("a:length")[0].text
id = sup("a:id")[0].text
startTime = sup("a:starttime")[0].text
epnum = sup("a:episodenum")[0].text
cont = sup("a:iscontainer")[0].text
imgcount = sup("a:imagecount")[0].text
Items.append(Obj(id, descr, name, epcount, length, startTime, epnum, cont, imgcount))
return GObj(tpage, page, Items)
def Request(sID, type, page, keyword, date, id ):
ctype = ['ArcPlus','VOD','DVR']
t = int(type)
#keyword = "Маша"
#keyword = keyword.decode('utf-8')
#keyword = keyword.encode('cp1251')
#req = '<?xml version="1.0" encoding="utf-8"?>' \
#'<s:Envelope '
k = [keyword,""]
req = \
'<?xml version="1.0" encoding="utf-8"?>' \
'<s:Envelope xmlns:s="http://schemas.xmlsoap.org/soap/envelope/"><s:Body><GetClientProgramGuide xmlns="http://ivsmedia.iptv-distribution.net"><sessionID>' + sID + '</sessionID><type>'+ctype[t] +'</type>' \
'<request xmlns:d4p1="http://schemas.datacontract.org/2004/07/IVS.Content.Data" xmlns:i="http://www.w3.org/2001/XMLSchema-instance">' \
'<d4p1:filter><d4p1:availableOnly>true</d4p1:availableOnly><d4p1:contentGenre i:nil="true" />' \
'<d4p1:contentType>'+ctype[t]+'</d4p1:contentType><d4p1:date>' + date + 'T23:59:59</d4p1:date><d4p1:dateTill>' + date +'T00:00:00</d4p1:dateTill><d4p1:favoritesOnly>false</d4p1:favoritesOnly><d4p1:keyWord>' + keyword.decode('utf-8') + '</d4p1:keyWord>' \
'<d4p1:orderBy i:nil="true" /><d4p1:showItems>true</d4p1:showItems><d4p1:studioID>' + id + '</d4p1:studioID><d4p1:visibleOnly>false</d4p1:visibleOnly></d4p1:filter>' \
'<d4p1:paging xmlns:d5p1="http://schemas.datacontract.org/2004/07/IVS.Common"><d5p1:itemsOnPage>40</d5p1:itemsOnPage><d5p1:pageNumber>'+str(page)+'</d5p1:pageNumber><d5p1:totalItems>0</d5p1:totalItems><d5p1:totalPages>0</d5p1:totalPages>' \
'</d4p1:paging><d4p1:sort i:nil="true" />' \
'<d4p1:type>'+ ctype[t] +'</d4p1:type><d4p1:requestType>SearchByName</d4p1:requestType><d4p1:channelID>0</d4p1:channelID><d4p1:fromTime>0001-01-01T00:00:00</d4p1:fromTime><d4p1:itemCountAfter>0</d4p1:itemCountAfter>' \
'<d4p1:itemCountBefore>0</d4p1:itemCountBefore><d4p1:streamZone i:nil="true" /><d4p1:streamZoneUTCOffset>0</d4p1:streamZoneUTCOffset><d4p1:tillTime>0001-01-01T00:00:00</d4p1:tillTime><d4p1:watchingZone i:nil="true" />' \
'<d4p1:watchingZoneUTCOffset>0</d4p1:watchingZoneUTCOffset></request></GetClientProgramGuide></s:Body></s:Envelope>'
#req = unicode(req, 'utf8')
#req = req % k[0]
req = req.encode('utf-8')
#req = req.replace('{sessionID}', sID).replace('{ID}', id).replace('{CTYPE}', ctype[t]).replace('{PageNum}',str(page)).replace('{DATE}',date)
conn = httplib.HTTPConnection('ivsmedia.iptv-distribution.net')
conn.request('POST', Config.ContentService, req, {
'Host': 'ivsmedia.iptv-distribution.net',
'SOAPAction': 'http://ivsmedia.iptv-distribution.net/ContentService/GetClientProgramGuide',
'Content-Type': 'text/xml; charset=utf-8'
})
response = conn.getresponse()
return response.read() | tatigo/XBMC-BestRussianTVPlugin | ArcSearch.py | Python | gpl-3.0 | 5,298 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Tests boosted_trees estimators and model_fn."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
from tensorflow.core.kernels.boosted_trees import boosted_trees_pb2
from tensorflow.python.data.ops import dataset_ops
from tensorflow.python.estimator import model_fn
from tensorflow.python.estimator import run_config
from tensorflow.python.estimator.canned import boosted_trees
from tensorflow.python.estimator.inputs import numpy_io
from tensorflow.python.feature_column import feature_column
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import test_util
from tensorflow.python.ops import gen_boosted_trees_ops
from tensorflow.python.ops import resources
from tensorflow.python.ops import variables
from tensorflow.python.platform import googletest
from tensorflow.python.training import checkpoint_utils
from tensorflow.python.training import session_run_hook
NUM_FEATURES = 3
BUCKET_BOUNDARIES = [-2., .5, 12.] # Boundaries for all the features.
INPUT_FEATURES = np.array(
[
[12.5, 1.0, -2.001, -2.0001, -1.999], # feature_0 quantized:[3,2,0,0,1]
[2.0, -3.0, 0.5, 0.0, 0.4995], # feature_1 quantized:[2,0,2,1,1]
[3.0, 20.0, 50.0, -100.0, 102.75], # feature_2 quantized:[2,3,3,0,3]
],
dtype=np.float32)
CLASSIFICATION_LABELS = [[0.], [1.], [1.], [0.], [0.]]
REGRESSION_LABELS = [[1.5], [0.3], [0.2], [2.], [5.]]
FEATURES_DICT = {'f_%d' % i: INPUT_FEATURES[i] for i in range(NUM_FEATURES)}
# EXAMPLE_ID is not exposed to Estimator yet, but supported at model_fn level.
EXAMPLE_IDS = np.array([0, 1, 2, 3, 4], dtype=np.int64)
EXAMPLE_ID_COLUMN = '__example_id__'
def _make_train_input_fn(is_classification):
"""Makes train input_fn for classification/regression."""
def _input_fn():
features_dict = dict(FEATURES_DICT) # copies the dict to add an entry.
features_dict[EXAMPLE_ID_COLUMN] = constant_op.constant(EXAMPLE_IDS)
labels = CLASSIFICATION_LABELS if is_classification else REGRESSION_LABELS
return features_dict, labels
return _input_fn
def _make_train_input_fn_dataset(is_classification, batch=None, repeat=None):
"""Makes input_fn using Dataset."""
def _input_fn():
features_dict = dict(FEATURES_DICT) # copies the dict to add an entry.
features_dict[EXAMPLE_ID_COLUMN] = constant_op.constant(EXAMPLE_IDS)
labels = CLASSIFICATION_LABELS if is_classification else REGRESSION_LABELS
if batch:
ds = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.from_tensor_slices(features_dict),
dataset_ops.Dataset.from_tensor_slices(labels))).batch(batch)
else:
ds = dataset_ops.Dataset.zip(
(dataset_ops.Dataset.from_tensors(features_dict),
dataset_ops.Dataset.from_tensors(labels)))
# repeat indefinitely by default, or stop at the given step.
ds = ds.repeat(repeat)
return ds
return _input_fn
class BoostedTreesEstimatorTest(test_util.TensorFlowTestCase):
def setUp(self):
self._feature_columns = {
feature_column.bucketized_column(
feature_column.numeric_column('f_%d' % i, dtype=dtypes.float32),
BUCKET_BOUNDARIES)
for i in range(NUM_FEATURES)
}
def _assert_checkpoint(self, model_dir, global_step, finalized_trees,
attempted_layers):
self._assert_checkpoint_and_return_model(model_dir, global_step,
finalized_trees, attempted_layers)
def _assert_checkpoint_and_return_model(self, model_dir, global_step,
finalized_trees, attempted_layers):
reader = checkpoint_utils.load_checkpoint(model_dir)
self.assertEqual(global_step, reader.get_tensor(ops.GraphKeys.GLOBAL_STEP))
serialized = reader.get_tensor('boosted_trees:0_serialized')
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertEqual(
finalized_trees,
sum([1 for t in ensemble_proto.tree_metadata if t.is_finalized]))
self.assertEqual(attempted_layers,
ensemble_proto.growing_metadata.num_layers_attempted)
return ensemble_proto
def testFirstCheckpointWorksFine(self):
"""Tests that eval/pred doesn't crash with the very first checkpoint.
The step-0 checkpoint will have only an empty ensemble, and a separate eval
job might read from it and crash.
This test ensures that prediction/evaluation works fine with it.
"""
input_fn = _make_train_input_fn(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
class BailOutWithoutTraining(session_run_hook.SessionRunHook):
def before_run(self, run_context):
raise StopIteration('to bail out.')
est.train(input_fn, steps=100, # must stop at 0 anyway.
hooks=[BailOutWithoutTraining()])
self._assert_checkpoint(
est.model_dir, global_step=0, finalized_trees=0, attempted_layers=0)
# Empty ensemble returns 0 logits, so that all output labels are 0.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 0.6)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [0], [0], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainAndEvaluateBinaryClassifier(self):
input_fn = _make_train_input_fn(is_classification=True)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
def testInferBinaryClassifier(self):
train_input_fn = _make_train_input_fn(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(train_input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithRankOneLabel(self):
"""Tests that label with rank-1 tensor is also accepted by classifier."""
def _input_fn_with_rank_one_label():
return FEATURES_DICT, [0., 1., 1., 0., 0.]
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(_input_fn_with_rank_one_label, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_rank_one_label, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
def testTrainClassifierWithLabelVocabulary(self):
apple, banana = 'apple', 'banana'
def _input_fn_with_label_vocab():
return FEATURES_DICT, [[apple], [banana], [banana], [apple], [apple]]
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5,
label_vocabulary=[apple, banana])
est.train(input_fn=_input_fn_with_label_vocab, steps=5)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_label_vocab, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithIntegerLabel(self):
def _input_fn_with_integer_label():
return (FEATURES_DICT,
constant_op.constant([[0], [1], [1], [0], [0]], dtypes.int32))
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(input_fn=_input_fn_with_integer_label, steps=5)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_integer_label, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainClassifierWithDataset(self):
train_input_fn = _make_train_input_fn_dataset(is_classification=True)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesClassifier(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['accuracy'], 1.0)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose([[0], [1], [1], [0], [0]],
[pred['class_ids'] for pred in predictions])
def testTrainAndEvaluateRegressor(self):
input_fn = _make_train_input_fn(is_classification=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=2,
max_depth=5)
# It will stop after 10 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=10, finalized_trees=2, attempted_layers=10)
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 1.008551)
def testInferRegressor(self):
train_input_fn = _make_train_input_fn(is_classification=False)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(train_input_fn, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithRankOneLabel(self):
"""Tests that label with rank-1 tensor is also accepted by regressor."""
def _input_fn_with_rank_one_label():
return FEATURES_DICT, [1.5, 0.3, 0.2, 2., 5.]
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# It will stop after 5 steps because of the max depth and num trees.
num_steps = 100
# Train for a few steps, and validate final checkpoint.
est.train(_input_fn_with_rank_one_label, steps=num_steps)
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=_input_fn_with_rank_one_label, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
def testTrainRegressorWithDataset(self):
train_input_fn = _make_train_input_fn_dataset(is_classification=False)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetBatch(self):
# The batch_size as the entire data size should yield the same result as
# dataset without batching.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=5)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetLargerBatch(self):
# The batch_size as the multiple of the entire data size should still yield
# the same result.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=15)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
est.train(train_input_fn, steps=100) # will stop after 5 steps anyway.
self._assert_checkpoint(
est.model_dir, global_step=5, finalized_trees=1, attempted_layers=5)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetSmallerBatch(self):
# Even when using small batches, if (n_batches_per_layer * batch_size) makes
# the same entire data size, the result should be the same.
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, batch=1)
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=5,
n_trees=1,
max_depth=5)
# Train stops after (n_batches_per_layer * n_trees * max_depth) steps.
est.train(train_input_fn, steps=100)
self._assert_checkpoint(
est.model_dir, global_step=25, finalized_trees=1, attempted_layers=5)
# 5 batches = one epoch.
eval_res = est.evaluate(input_fn=train_input_fn, steps=5)
self.assertAllClose(eval_res['average_loss'], 2.478283)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.571619], [0.262821], [0.124549], [0.956801], [1.769801]],
[pred['predictions'] for pred in predictions])
def testTrainRegressorWithDatasetWhenInputIsOverEarlier(self):
train_input_fn = _make_train_input_fn_dataset(
is_classification=False, repeat=3) # to stop input after 3 steps.
predict_input_fn = numpy_io.numpy_input_fn(
x=FEATURES_DICT, y=None, batch_size=1, num_epochs=1, shuffle=False)
est = boosted_trees.BoostedTreesRegressor(
feature_columns=self._feature_columns,
n_batches_per_layer=1,
n_trees=1,
max_depth=5)
# Note that training will stop when input exhausts.
# This might not be a typical pattern, but dataset.repeat(3) causes
# the input stream to cease after 3 steps.
est.train(train_input_fn, steps=100)
self._assert_checkpoint(
est.model_dir, global_step=3, finalized_trees=0, attempted_layers=3)
eval_res = est.evaluate(input_fn=train_input_fn, steps=1)
self.assertAllClose(eval_res['average_loss'], 3.777295)
predictions = list(est.predict(input_fn=predict_input_fn))
self.assertAllClose(
[[0.353850], [0.254100], [0.106850], [0.712100], [1.012100]],
[pred['predictions'] for pred in predictions])
def testTrainEvaluateAndPredictWithIndicatorColumn(self):
categorical = feature_column.categorical_column_with_vocabulary_list(
key='categorical', vocabulary_list=('bad', 'good', 'ok'))
feature_indicator = feature_column.indicator_column(categorical)
bucketized_col = feature_column.bucketized_column(
feature_column.numeric_column(
'an_uninformative_feature', dtype=dtypes.float32),
BUCKET_BOUNDARIES)
labels = np.array([[0.], [5.7], [5.7], [0.], [0.]], dtype=np.float32)
# Our categorical feature defines the labels perfectly
input_fn = numpy_io.numpy_input_fn(
x={
'an_uninformative_feature': np.array([1, 1, 1, 1, 1]),
'categorical': np.array(['bad', 'good', 'good', 'ok', 'bad']),
},
y=labels,
batch_size=5,
shuffle=False)
# Train depth 1 tree.
est = boosted_trees.BoostedTreesRegressor(
feature_columns=[bucketized_col, feature_indicator],
n_batches_per_layer=1,
n_trees=1,
learning_rate=1.0,
max_depth=1)
num_steps = 1
est.train(input_fn, steps=num_steps)
ensemble = self._assert_checkpoint_and_return_model(
est.model_dir, global_step=1, finalized_trees=1, attempted_layers=1)
# We learnt perfectly.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['loss'], 0)
predictions = list(est.predict(input_fn))
self.assertAllClose(
labels,
[pred['predictions'] for pred in predictions])
self.assertEqual(3, len(ensemble.trees[0].nodes))
# Check that the split happened on 'good' value, which will be encoded as
# feature with index 2 (0-numeric, 1 - 'bad')
self.assertEqual(2, ensemble.trees[0].nodes[0].bucketized_split.feature_id)
self.assertEqual(0, ensemble.trees[0].nodes[0].bucketized_split.threshold)
def testTrainEvaluateAndPredictWithOnlyIndicatorColumn(self):
categorical = feature_column.categorical_column_with_vocabulary_list(
key='categorical', vocabulary_list=('bad', 'good', 'ok'))
feature_indicator = feature_column.indicator_column(categorical)
labels = np.array([[0.], [5.7], [5.7], [0.], [0.]], dtype=np.float32)
# Our categorical feature defines the labels perfectly
input_fn = numpy_io.numpy_input_fn(
x={
'categorical': np.array(['bad', 'good', 'good', 'ok', 'bad']),
},
y=labels,
batch_size=5,
shuffle=False)
# Train depth 1 tree.
est = boosted_trees.BoostedTreesRegressor(
feature_columns=[feature_indicator],
n_batches_per_layer=1,
n_trees=1,
learning_rate=1.0,
max_depth=1)
num_steps = 1
est.train(input_fn, steps=num_steps)
ensemble = self._assert_checkpoint_and_return_model(
est.model_dir, global_step=1, finalized_trees=1, attempted_layers=1)
# We learnt perfectly.
eval_res = est.evaluate(input_fn=input_fn, steps=1)
self.assertAllClose(eval_res['loss'], 0)
predictions = list(est.predict(input_fn))
self.assertAllClose(
labels,
[pred['predictions'] for pred in predictions])
self.assertEqual(3, len(ensemble.trees[0].nodes))
# Check that the split happened on 'good' value, which will be encoded as
# feature with index 1 (0 - 'bad', 2 - 'ok')
self.assertEqual(1, ensemble.trees[0].nodes[0].bucketized_split.feature_id)
self.assertEqual(0, ensemble.trees[0].nodes[0].bucketized_split.threshold)
class ModelFnTests(test_util.TensorFlowTestCase):
"""Tests bt_model_fn including unexposed internal functionalities."""
def setUp(self):
self._feature_columns = {
feature_column.bucketized_column(
feature_column.numeric_column('f_%d' % i, dtype=dtypes.float32),
BUCKET_BOUNDARIES) for i in range(NUM_FEATURES)
}
def _get_expected_ensembles_for_classification(self):
first_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.0625
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
gain: 0.0
original_leaf {
scalar: -0.181818
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.105518
original_leaf {
scalar: 0.0625
}
}
}
nodes {
leaf {
scalar: -0.348397
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.224091
}
}
nodes {
leaf {
scalar: 0.056815
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.387675
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
gain: 0.0
original_leaf {
scalar: -0.181818
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.105518
original_leaf {
scalar: 0.0625
}
}
}
nodes {
leaf {
scalar: -0.348397
}
}
nodes {
leaf {
scalar: -0.181818
}
}
nodes {
leaf {
scalar: 0.224091
}
}
nodes {
leaf {
scalar: 0.056815
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 0
left_id: 1
right_id: 2
}
metadata {
gain: 0.287131
}
}
nodes {
leaf {
scalar: 0.162042
}
}
nodes {
leaf {
scalar: -0.086986
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round)
def _get_expected_ensembles_for_classification_with_bias(self):
first_round = """
trees {
nodes {
leaf {
scalar: -0.405086
}
}
}
tree_weights: 1.0
tree_metadata {
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.407711
original_leaf {
scalar: -0.405086
}
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.301233
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.407711
original_leaf {
scalar: -0.405086
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 3
left_id: 3
right_id: 4
}
metadata {
original_leaf {
scalar: -0.556054
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.09876
original_leaf {
scalar: -0.301233
}
}
}
nodes {
leaf {
scalar: -0.698072
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.106016
}
}
nodes {
leaf {
scalar: -0.27349
}
}
}
trees {
nodes {
leaf {
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_end: 1
}
"""
forth_round = """
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.4077113
original_leaf {
scalar: -0.405086
}
}
}
nodes {
bucketized_split {
threshold: 3
left_id: 3
right_id: 4
}
metadata {
original_leaf {
scalar: -0.556054
}
}
}
nodes {
bucketized_split {
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.09876
original_leaf {
scalar: -0.301233
}
}
}
nodes {
leaf {
scalar: -0.698072
}
}
nodes {
leaf {
scalar: -0.556054
}
}
nodes {
leaf {
scalar: -0.106016
}
}
nodes {
leaf {
scalar: -0.27349
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 2
threshold: 2
left_id: 1
right_id: 2
}
metadata {
gain: 0.289927
}
}
nodes {
leaf {
scalar: -0.134588
}
}
nodes {
leaf {
scalar: 0.083838
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round, forth_round)
def _get_expected_ensembles_for_regression(self):
first_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
leaf {
scalar: 0.241322
}
}
nodes {
leaf {
scalar: 0.083951
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.673407
original_leaf {
scalar: 0.241322
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.324102
original_leaf {
scalar: 0.083951
}
}
}
nodes {
leaf {
scalar: 0.563167
}
}
nodes {
leaf {
scalar: 0.247047
}
}
nodes {
leaf {
scalar: 0.095273
}
}
nodes {
leaf {
scalar: 0.222102
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.169714
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.673407
original_leaf {
scalar: 0.241322
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.324102
original_leaf {
scalar: 0.083951
}
}
}
nodes {
leaf {
scalar: 0.563167
}
}
nodes {
leaf {
scalar: 0.247047
}
}
nodes {
leaf {
scalar: 0.095273
}
}
nodes {
leaf {
scalar: 0.222102
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 0
left_id: 1
right_id: 2
}
metadata {
gain: 0.981026
}
}
nodes {
leaf {
scalar: 0.005166
}
}
nodes {
leaf {
scalar: 0.180281
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round)
def _get_expected_ensembles_for_regression_with_bias(self):
first_round = """
trees {
nodes {
leaf {
scalar: 1.799974
}
}
}
tree_weights: 1.0
tree_metadata {
}
"""
second_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
leaf {
scalar: 1.862786
}
}
nodes {
leaf {
scalar: 1.706149
}
}
}
tree_weights: 1.0
tree_metadata {
num_layers_grown: 1
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 1
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
third_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.683594
original_leaf {
scalar: 1.862786
}
}
}
nodes {
bucketized_split {
feature_id: 0
threshold: 0
left_id: 5
right_id: 6
}
metadata {
gain: 0.322693
original_leaf {
scalar: 1.706149
}
}
}
nodes {
leaf {
scalar: 2.024487
}
}
nodes {
leaf {
scalar: 1.710319
}
}
nodes {
leaf {
scalar: 1.559208
}
}
nodes {
leaf {
scalar: 1.686037
}
}
}
trees {
nodes {
leaf {
scalar: 0.0
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 0
is_finalized: false
}
growing_metadata {
num_trees_attempted: 1
num_layers_attempted: 2
last_layer_node_start: 0
last_layer_node_end: 1
}
"""
forth_round = """
trees {
nodes {
bucketized_split {
feature_id: 1
threshold: 1
left_id: 1
right_id: 2
}
metadata {
gain: 1.190442
original_leaf {
scalar: 1.799974
}
}
}
nodes {
bucketized_split {
threshold: 1
left_id: 3
right_id: 4
}
metadata {
gain: 2.683594
original_leaf {
scalar: 1.8627863
}
}
}
nodes {
bucketized_split {
left_id: 5
right_id: 6
}
metadata {
gain: 0.322693
original_leaf {
scalar: 1.706149
}
}
}
nodes {
leaf {
scalar: 2.024487
}
}
nodes {
leaf {
scalar: 1.710319
}
}
nodes {
leaf {
scalar: 1.5592078
}
}
nodes {
leaf {
scalar: 1.686037
}
}
}
trees {
nodes {
bucketized_split {
feature_id: 1
left_id: 1
right_id: 2
}
metadata {
gain: 0.972589
}
}
nodes {
leaf {
scalar: -0.137592
}
}
nodes {
leaf {
scalar: 0.034926
}
}
}
tree_weights: 1.0
tree_weights: 1.0
tree_metadata {
num_layers_grown: 2
is_finalized: true
}
tree_metadata {
num_layers_grown: 1
}
growing_metadata {
num_trees_attempted: 2
num_layers_attempted: 3
last_layer_node_start: 1
last_layer_node_end: 3
}
"""
return (first_round, second_round, third_round, forth_round)
def _get_train_op_and_ensemble(self,
head,
config,
is_classification,
train_in_memory,
center_bias=False):
"""Calls bt_model_fn() and returns the train_op and ensemble_serialzed."""
features, labels = _make_train_input_fn(is_classification)()
tree_hparams = boosted_trees._TreeHParams( # pylint:disable=protected-access
n_trees=2,
max_depth=2,
learning_rate=0.1,
l1=0.,
l2=0.01,
tree_complexity=0.,
min_node_weight=0.,
center_bias=center_bias,
pruning_mode='none')
estimator_spec = boosted_trees._bt_model_fn( # pylint:disable=protected-access
features=features,
labels=labels,
mode=model_fn.ModeKeys.TRAIN,
head=head,
feature_columns=self._feature_columns,
tree_hparams=tree_hparams,
example_id_column_name=EXAMPLE_ID_COLUMN,
n_batches_per_layer=1,
config=config,
train_in_memory=train_in_memory)
resources.initialize_resources(resources.shared_resources()).run()
variables.global_variables_initializer().run()
variables.local_variables_initializer().run()
# Gets the train_op and serialized proto of the ensemble.
shared_resources = resources.shared_resources()
self.assertEqual(1, len(shared_resources))
train_op = estimator_spec.train_op
with ops.control_dependencies([train_op]):
_, ensemble_serialized = (
gen_boosted_trees_ops.boosted_trees_serialize_ensemble(
shared_resources[0].handle))
return train_op, ensemble_serialized
def testTrainClassifierInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_classification())
with self.test_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=True)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainClassifierWithCenterBiasInMemory(self):
ops.reset_default_graph()
# When bias centering is on, we expect the very first node to have the
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_classification_with_bias())
with self.test_session() as sess:
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=True,
center_bias=True)
# 4 iterations to center bias.
for _ in range(4):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainClassifierNonInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_classification())
with self.test_session() as sess:
# Train without train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=False)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainClassifierWithCenterBiasNonInMemory(self):
ops.reset_default_graph()
# When bias centering is on, we expect the very first node to have the
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_classification_with_bias())
with self.test_session() as sess:
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_classification_head(n_classes=2),
run_config.RunConfig(),
is_classification=True,
train_in_memory=False,
center_bias=True)
# 4 iterations to center bias.
for _ in range(4):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainRegressorInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_regression())
with self.test_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=True)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainRegressorInMemoryWithCenterBias(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_regression_with_bias())
with self.test_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=True,
center_bias=True)
# 3 iterations to center bias.
for _ in range(3):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
def testTrainRegressorNonInMemory(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third = (
self._get_expected_ensembles_for_regression())
with self.test_session() as sess:
# Train without train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=False)
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
def testTrainRegressorNotInMemoryWithCenterBias(self):
ops.reset_default_graph()
expected_first, expected_second, expected_third, expected_forth = (
self._get_expected_ensembles_for_regression_with_bias())
with self.test_session() as sess:
# Train with train_in_memory mode.
with sess.graph.as_default():
train_op, ensemble_serialized = self._get_train_op_and_ensemble(
boosted_trees._create_regression_head(label_dimension=1),
run_config.RunConfig(),
is_classification=False,
train_in_memory=False,
center_bias=True)
# 3 iterations to center the bias (because we are using regularization).
for _ in range(3):
_, serialized = sess.run([train_op, ensemble_serialized])
# Validate the trained ensemble.
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_first, ensemble_proto)
# Run one more time and validate the trained ensemble.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_second, ensemble_proto)
# Third round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_third, ensemble_proto)
# Forth round training and validation.
_, serialized = sess.run([train_op, ensemble_serialized])
ensemble_proto = boosted_trees_pb2.TreeEnsemble()
ensemble_proto.ParseFromString(serialized)
self.assertProtoEquals(expected_forth, ensemble_proto)
if __name__ == '__main__':
googletest.main()
| aselle/tensorflow | tensorflow/python/estimator/canned/boosted_trees_test.py | Python | apache-2.0 | 58,435 |
# Copyright 2018 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""This API defines FeatureColumn for sequential input.
NOTE: This API is a work in progress and will likely be changing frequently.
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import collections
from tensorflow.python.feature_column import feature_column_v2 as fc
from tensorflow.python.feature_column import utils as fc_utils
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.keras import utils
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import check_ops
from tensorflow.python.ops import parsing_ops
from tensorflow.python.ops import sparse_ops
from tensorflow.python.util.tf_export import keras_export
from tensorflow.python.util.tf_export import tf_export
# pylint: disable=protected-access
@keras_export('keras.experimental.SequenceFeatures')
class SequenceFeatures(fc._BaseFeaturesLayer):
"""A layer for sequence input.
All `feature_columns` must be sequence dense columns with the same
`sequence_length`. The output of this method can be fed into sequence
networks, such as RNN.
The output of this method is a 3D `Tensor` of shape `[batch_size, T, D]`.
`T` is the maximum sequence length for this batch, which could differ from
batch to batch.
If multiple `feature_columns` are given with `Di` `num_elements` each, their
outputs are concatenated. So, the final `Tensor` has shape
`[batch_size, T, D0 + D1 + ... + Dn]`.
Example:
```python
rating = sequence_numeric_column('rating')
watches = sequence_categorical_column_with_identity(
'watches', num_buckets=1000)
watches_embedding = embedding_column(watches, dimension=10)
columns = [rating, watches_embedding]
sequence_input_layer = SequenceFeatures(columns)
features = tf.io.parse_example(...,
features=make_parse_example_spec(columns))
sequence_input, sequence_length = sequence_input_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
"""
def __init__(
self,
feature_columns,
trainable=True,
name=None,
**kwargs):
""""Constructs a SequenceFeatures layer.
Args:
feature_columns: An iterable of dense sequence columns. Valid columns are
- `embedding_column` that wraps a `sequence_categorical_column_with_*`
- `sequence_numeric_column`.
trainable: Boolean, whether the layer's variables will be updated via
gradient descent during training.
name: Name to give to the SequenceFeatures.
**kwargs: Keyword arguments to construct a layer.
Raises:
ValueError: If any of the `feature_columns` is not a
`SequenceDenseColumn`.
"""
super(SequenceFeatures, self).__init__(
feature_columns=feature_columns,
trainable=trainable,
name=name,
expected_column_type=fc.SequenceDenseColumn,
**kwargs)
def _target_shape(self, input_shape, total_elements):
return (input_shape[0], input_shape[1], total_elements)
def call(self, features):
"""Returns sequence input corresponding to the `feature_columns`.
Args:
features: A dict mapping keys to tensors.
Returns:
An `(input_layer, sequence_length)` tuple where:
- input_layer: A float `Tensor` of shape `[batch_size, T, D]`.
`T` is the maximum sequence length for this batch, which could differ
from batch to batch. `D` is the sum of `num_elements` for all
`feature_columns`.
- sequence_length: An int `Tensor` of shape `[batch_size]`. The sequence
length for each example.
Raises:
ValueError: If features are not a dictionary.
"""
if not isinstance(features, dict):
raise ValueError('We expected a dictionary here. Instead we got: ',
features)
transformation_cache = fc.FeatureTransformationCache(features)
output_tensors = []
sequence_lengths = []
for column in self._feature_columns:
with ops.name_scope(column.name):
dense_tensor, sequence_length = column.get_sequence_dense_tensor(
transformation_cache, self._state_manager)
# Flattens the final dimension to produce a 3D Tensor.
output_tensors.append(self._process_dense_tensor(column, dense_tensor))
sequence_lengths.append(sequence_length)
# Check and process sequence lengths.
fc._verify_static_batch_size_equality(sequence_lengths,
self._feature_columns)
sequence_length = _assert_all_equal_and_return(sequence_lengths)
return self._verify_and_concat_tensors(output_tensors), sequence_length
def concatenate_context_input(context_input, sequence_input):
"""Replicates `context_input` across all timesteps of `sequence_input`.
Expands dimension 1 of `context_input` then tiles it `sequence_length` times.
This value is appended to `sequence_input` on dimension 2 and the result is
returned.
Args:
context_input: A `Tensor` of dtype `float32` and shape `[batch_size, d1]`.
sequence_input: A `Tensor` of dtype `float32` and shape `[batch_size,
padded_length, d0]`.
Returns:
A `Tensor` of dtype `float32` and shape `[batch_size, padded_length,
d0 + d1]`.
Raises:
ValueError: If `sequence_input` does not have rank 3 or `context_input` does
not have rank 2.
"""
seq_rank_check = check_ops.assert_rank(
sequence_input,
3,
message='sequence_input must have rank 3',
data=[array_ops.shape(sequence_input)])
seq_type_check = check_ops.assert_type(
sequence_input,
dtypes.float32,
message='sequence_input must have dtype float32; got {}.'.format(
sequence_input.dtype))
ctx_rank_check = check_ops.assert_rank(
context_input,
2,
message='context_input must have rank 2',
data=[array_ops.shape(context_input)])
ctx_type_check = check_ops.assert_type(
context_input,
dtypes.float32,
message='context_input must have dtype float32; got {}.'.format(
context_input.dtype))
with ops.control_dependencies(
[seq_rank_check, seq_type_check, ctx_rank_check, ctx_type_check]):
padded_length = array_ops.shape(sequence_input)[1]
tiled_context_input = array_ops.tile(
array_ops.expand_dims(context_input, 1),
array_ops.concat([[1], [padded_length], [1]], 0))
return array_ops.concat([sequence_input, tiled_context_input], 2)
@tf_export('feature_column.sequence_categorical_column_with_identity')
def sequence_categorical_column_with_identity(
key, num_buckets, default_value=None):
"""Returns a feature column that represents sequences of integers.
Pass this to `embedding_column` or `indicator_column` to convert sequence
categorical data into dense representation for input to sequence NN, such as
RNN.
Example:
```python
watches = sequence_categorical_column_with_identity(
'watches', num_buckets=1000)
watches_embedding = embedding_column(watches, dimension=10)
columns = [watches_embedding]
features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
sequence_feature_layer = SequenceFeatures(columns)
sequence_input, sequence_length = sequence_feature_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
Args:
key: A unique string identifying the input feature.
num_buckets: Range of inputs. Namely, inputs are expected to be in the
range `[0, num_buckets)`.
default_value: If `None`, this column's graph operations will fail for
out-of-range inputs. Otherwise, this value must be in the range
`[0, num_buckets)`, and will replace out-of-range inputs.
Returns:
A `SequenceCategoricalColumn`.
Raises:
ValueError: if `num_buckets` is less than one.
ValueError: if `default_value` is not in range `[0, num_buckets)`.
"""
return fc.SequenceCategoricalColumn(
fc.categorical_column_with_identity(
key=key,
num_buckets=num_buckets,
default_value=default_value))
@tf_export('feature_column.sequence_categorical_column_with_hash_bucket')
def sequence_categorical_column_with_hash_bucket(
key, hash_bucket_size, dtype=dtypes.string):
"""A sequence of categorical terms where ids are set by hashing.
Pass this to `embedding_column` or `indicator_column` to convert sequence
categorical data into dense representation for input to sequence NN, such as
RNN.
Example:
```python
tokens = sequence_categorical_column_with_hash_bucket(
'tokens', hash_bucket_size=1000)
tokens_embedding = embedding_column(tokens, dimension=10)
columns = [tokens_embedding]
features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
sequence_feature_layer = SequenceFeatures(columns)
sequence_input, sequence_length = sequence_feature_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
Args:
key: A unique string identifying the input feature.
hash_bucket_size: An int > 1. The number of buckets.
dtype: The type of features. Only string and integer types are supported.
Returns:
A `SequenceCategoricalColumn`.
Raises:
ValueError: `hash_bucket_size` is not greater than 1.
ValueError: `dtype` is neither string nor integer.
"""
return fc.SequenceCategoricalColumn(
fc.categorical_column_with_hash_bucket(
key=key,
hash_bucket_size=hash_bucket_size,
dtype=dtype))
@tf_export('feature_column.sequence_categorical_column_with_vocabulary_file')
def sequence_categorical_column_with_vocabulary_file(
key, vocabulary_file, vocabulary_size=None, num_oov_buckets=0,
default_value=None, dtype=dtypes.string):
"""A sequence of categorical terms where ids use a vocabulary file.
Pass this to `embedding_column` or `indicator_column` to convert sequence
categorical data into dense representation for input to sequence NN, such as
RNN.
Example:
```python
states = sequence_categorical_column_with_vocabulary_file(
key='states', vocabulary_file='/us/states.txt', vocabulary_size=50,
num_oov_buckets=5)
states_embedding = embedding_column(states, dimension=10)
columns = [states_embedding]
features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
sequence_feature_layer = SequenceFeatures(columns)
sequence_input, sequence_length = sequence_feature_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
Args:
key: A unique string identifying the input feature.
vocabulary_file: The vocabulary file name.
vocabulary_size: Number of the elements in the vocabulary. This must be no
greater than length of `vocabulary_file`, if less than length, later
values are ignored. If None, it is set to the length of `vocabulary_file`.
num_oov_buckets: Non-negative integer, the number of out-of-vocabulary
buckets. All out-of-vocabulary inputs will be assigned IDs in the range
`[vocabulary_size, vocabulary_size+num_oov_buckets)` based on a hash of
the input value. A positive `num_oov_buckets` can not be specified with
`default_value`.
default_value: The integer ID value to return for out-of-vocabulary feature
values, defaults to `-1`. This can not be specified with a positive
`num_oov_buckets`.
dtype: The type of features. Only string and integer types are supported.
Returns:
A `SequenceCategoricalColumn`.
Raises:
ValueError: `vocabulary_file` is missing or cannot be opened.
ValueError: `vocabulary_size` is missing or < 1.
ValueError: `num_oov_buckets` is a negative integer.
ValueError: `num_oov_buckets` and `default_value` are both specified.
ValueError: `dtype` is neither string nor integer.
"""
return fc.SequenceCategoricalColumn(
fc.categorical_column_with_vocabulary_file(
key=key,
vocabulary_file=vocabulary_file,
vocabulary_size=vocabulary_size,
num_oov_buckets=num_oov_buckets,
default_value=default_value,
dtype=dtype))
@tf_export('feature_column.sequence_categorical_column_with_vocabulary_list')
def sequence_categorical_column_with_vocabulary_list(
key, vocabulary_list, dtype=None, default_value=-1, num_oov_buckets=0):
"""A sequence of categorical terms where ids use an in-memory list.
Pass this to `embedding_column` or `indicator_column` to convert sequence
categorical data into dense representation for input to sequence NN, such as
RNN.
Example:
```python
colors = sequence_categorical_column_with_vocabulary_list(
key='colors', vocabulary_list=('R', 'G', 'B', 'Y'),
num_oov_buckets=2)
colors_embedding = embedding_column(colors, dimension=3)
columns = [colors_embedding]
features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
sequence_feature_layer = SequenceFeatures(columns)
sequence_input, sequence_length = sequence_feature_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
Args:
key: A unique string identifying the input feature.
vocabulary_list: An ordered iterable defining the vocabulary. Each feature
is mapped to the index of its value (if present) in `vocabulary_list`.
Must be castable to `dtype`.
dtype: The type of features. Only string and integer types are supported.
If `None`, it will be inferred from `vocabulary_list`.
default_value: The integer ID value to return for out-of-vocabulary feature
values, defaults to `-1`. This can not be specified with a positive
`num_oov_buckets`.
num_oov_buckets: Non-negative integer, the number of out-of-vocabulary
buckets. All out-of-vocabulary inputs will be assigned IDs in the range
`[len(vocabulary_list), len(vocabulary_list)+num_oov_buckets)` based on a
hash of the input value. A positive `num_oov_buckets` can not be specified
with `default_value`.
Returns:
A `SequenceCategoricalColumn`.
Raises:
ValueError: if `vocabulary_list` is empty, or contains duplicate keys.
ValueError: `num_oov_buckets` is a negative integer.
ValueError: `num_oov_buckets` and `default_value` are both specified.
ValueError: if `dtype` is not integer or string.
"""
return fc.SequenceCategoricalColumn(
fc.categorical_column_with_vocabulary_list(
key=key,
vocabulary_list=vocabulary_list,
dtype=dtype,
default_value=default_value,
num_oov_buckets=num_oov_buckets))
@tf_export('feature_column.sequence_numeric_column')
def sequence_numeric_column(
key,
shape=(1,),
default_value=0.,
dtype=dtypes.float32,
normalizer_fn=None):
"""Returns a feature column that represents sequences of numeric data.
Example:
```python
temperature = sequence_numeric_column('temperature')
columns = [temperature]
features = tf.io.parse_example(..., features=make_parse_example_spec(columns))
sequence_feature_layer = SequenceFeatures(columns)
sequence_input, sequence_length = sequence_feature_layer(features)
sequence_length_mask = tf.sequence_mask(sequence_length)
rnn_cell = tf.keras.layers.SimpleRNNCell(hidden_size)
rnn_layer = tf.keras.layers.RNN(rnn_cell)
outputs, state = rnn_layer(sequence_input, mask=sequence_length_mask)
```
Args:
key: A unique string identifying the input features.
shape: The shape of the input data per sequence id. E.g. if `shape=(2,)`,
each example must contain `2 * sequence_length` values.
default_value: A single value compatible with `dtype` that is used for
padding the sparse data into a dense `Tensor`.
dtype: The type of values.
normalizer_fn: If not `None`, a function that can be used to normalize the
value of the tensor after `default_value` is applied for parsing.
Normalizer function takes the input `Tensor` as its argument, and returns
the output `Tensor`. (e.g. lambda x: (x - 3.0) / 4.2). Please note that
even though the most common use case of this function is normalization, it
can be used for any kind of Tensorflow transformations.
Returns:
A `SequenceNumericColumn`.
Raises:
TypeError: if any dimension in shape is not an int.
ValueError: if any dimension in shape is not a positive integer.
ValueError: if `dtype` is not convertible to `tf.float32`.
"""
shape = fc._check_shape(shape=shape, key=key)
if not (dtype.is_integer or dtype.is_floating):
raise ValueError('dtype must be convertible to float. '
'dtype: {}, key: {}'.format(dtype, key))
if normalizer_fn is not None and not callable(normalizer_fn):
raise TypeError(
'normalizer_fn must be a callable. Given: {}'.format(normalizer_fn))
return SequenceNumericColumn(
key,
shape=shape,
default_value=default_value,
dtype=dtype,
normalizer_fn=normalizer_fn)
def _assert_all_equal_and_return(tensors, name=None):
"""Asserts that all tensors are equal and returns the first one."""
with ops.name_scope(name, 'assert_all_equal', values=tensors):
if len(tensors) == 1:
return tensors[0]
assert_equal_ops = []
for t in tensors[1:]:
assert_equal_ops.append(check_ops.assert_equal(tensors[0], t))
with ops.control_dependencies(assert_equal_ops):
return array_ops.identity(tensors[0])
class SequenceNumericColumn(
fc.SequenceDenseColumn,
collections.namedtuple(
'SequenceNumericColumn',
('key', 'shape', 'default_value', 'dtype', 'normalizer_fn'))):
"""Represents sequences of numeric data."""
@property
def _is_v2_column(self):
return True
@property
def name(self):
"""See `FeatureColumn` base class."""
return self.key
@property
def parse_example_spec(self):
"""See `FeatureColumn` base class."""
return {self.key: parsing_ops.VarLenFeature(self.dtype)}
def transform_feature(self, transformation_cache, state_manager):
"""See `FeatureColumn` base class.
In this case, we apply the `normalizer_fn` to the input tensor.
Args:
transformation_cache: A `FeatureTransformationCache` object to access
features.
state_manager: A `StateManager` to create / access resources such as
lookup tables.
Returns:
Normalized input tensor.
"""
input_tensor = transformation_cache.get(self.key, state_manager)
if self.normalizer_fn is not None:
input_tensor = self.normalizer_fn(input_tensor)
return input_tensor
@property
def variable_shape(self):
"""Returns a `TensorShape` representing the shape of sequence input."""
return tensor_shape.TensorShape(self.shape)
def get_sequence_dense_tensor(self, transformation_cache, state_manager):
"""Returns a `TensorSequenceLengthPair`.
Args:
transformation_cache: A `FeatureTransformationCache` object to access
features.
state_manager: A `StateManager` to create / access resources such as
lookup tables.
"""
sp_tensor = transformation_cache.get(self, state_manager)
dense_tensor = sparse_ops.sparse_tensor_to_dense(
sp_tensor, default_value=self.default_value)
# Reshape into [batch_size, T, variable_shape].
dense_shape = array_ops.concat(
[array_ops.shape(dense_tensor)[:1], [-1], self.variable_shape],
axis=0)
dense_tensor = array_ops.reshape(dense_tensor, shape=dense_shape)
# Get the number of timesteps per example
# For the 2D case, the raw values are grouped according to num_elements;
# for the 3D case, the grouping happens in the third dimension, and
# sequence length is not affected.
if sp_tensor.shape.ndims == 2:
num_elements = self.variable_shape.num_elements()
else:
num_elements = 1
seq_length = fc_utils.sequence_length_from_sparse_tensor(
sp_tensor, num_elements=num_elements)
return fc.SequenceDenseColumn.TensorSequenceLengthPair(
dense_tensor=dense_tensor, sequence_length=seq_length)
@property
def parents(self):
"""See 'FeatureColumn` base class."""
return [self.key]
def _get_config(self):
"""See 'FeatureColumn` base class."""
config = dict(zip(self._fields, self))
config['normalizer_fn'] = utils.serialize_keras_object(self.normalizer_fn)
config['dtype'] = self.dtype.name
return config
@classmethod
def _from_config(cls, config, custom_objects=None, columns_by_name=None):
"""See 'FeatureColumn` base class."""
fc._check_config_keys(config, cls._fields)
kwargs = config.copy()
kwargs['normalizer_fn'] = utils.deserialize_keras_object(
config['normalizer_fn'], custom_objects=custom_objects)
kwargs['dtype'] = dtypes.as_dtype(config['dtype'])
return cls(**kwargs)
# pylint: enable=protected-access
| ghchinoy/tensorflow | tensorflow/python/feature_column/sequence_feature_column.py | Python | apache-2.0 | 22,673 |
from __future__ import unicode_literals
import logging
import struct
import boto3
import gzip
import io
import msgpack
import numpy as np
logger = logging.getLogger(__name__)
def get_inputs_from_s3():
s3 = boto3.resource('s3', region_name='eu-west-1')
bucket = s3.Bucket('chessbot')
logger.info("Fetching input object summary items from S3")
object_summary_items = list(bucket.objects.filter(Prefix='input'))
logger.info('Inputs found: %d', len(object_summary_items))
for index, object_summary in enumerate(object_summary_items):
logger.info("Processing item %d: %s", index + 1, object_summary.key)
object = object_summary.get()
with io.BytesIO(object['Body'].read()) as bytestream:
with gzip.GzipFile(fileobj=bytestream, mode='rb') as f:
unpacker = msgpack.Unpacker(f)
inputs = [struct.unpack("<769B", input) for input in unpacker]
yield np.array(inputs, dtype=np.int32)
| srom/chessbot | classifier/train/load.py | Python | mit | 990 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10 on 2016-11-28 15:54
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [
('movies', '0004_auto_20161128_1544'),
]
operations = [
migrations.AddField(
model_name='movie',
name='titlehash',
field=models.CharField(default=0, max_length=200),
preserve_default=False,
),
]
| pdevetto/super-duper-disco | movies/migrations/0005_movie_titlehash.py | Python | gpl-3.0 | 496 |
# Copyright (c) David Wilson 2015
# Icarus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
# Icarus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
# You should have received a copy of the GNU General Public License
# along with Icarus. If not, see <http://www.gnu.org/licenses/>.
import unittest
from Platform import Platform
class TestPlatform(unittest.TestCase):
def test_from_dict(self):
Platform.from_dict({"":""})
def test_from_dict_returns_platform(self):
result = Platform.from_dict({"":""})
self.assertIsInstance(result, Platform)
def test_from_dict_performs_mappings(self):
d = {"name": "name",
"description": "description"}
result = Platform.from_dict(d)
self.assertEqual(d["name"], result.name)
self.assertEqual(d["description"], result.description)
def test_from_mongo_result_performs_mapping(self):
"""Initialise the mapper
:param mongo_result: A MongoDB result. The following fields
can currently be mapped:
* _id
* _Platform__name
* _Platform__description
"""
d = {"_id": "id",
"_Platform__name": "name",
"_Platform__description": "description"}
p = Platform.from_mongo_result(d)
self.assertEqual(d["_id"], p.id)
self.assertEqual(d["_Platform__name"], p.name)
self.assertEqual(d["_Platform__description"], p.description)
| jeroanan/GameCollection | Tests/TestPlatform.py | Python | gpl-3.0 | 1,844 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2017 Google
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
# ----------------------------------------------------------------------------
#
# *** AUTO GENERATED CODE *** AUTO GENERATED CODE ***
#
# ----------------------------------------------------------------------------
#
# This file is automatically generated by Magic Modules and manual
# changes will be clobbered when the file is regenerated.
#
# Please read more about how to change this file at
# https://www.github.com/GoogleCloudPlatform/magic-modules
#
# ----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function
__metaclass__ = type
################################################################################
# Documentation
################################################################################
ANSIBLE_METADATA = {'metadata_version': '1.1', 'status': ["preview"], 'supported_by': 'community'}
DOCUMENTATION = '''
---
module: gcp_compute_instance_template_facts
description:
- Gather facts for GCP InstanceTemplate
short_description: Gather facts for GCP InstanceTemplate
version_added: 2.7
author: Google Inc. (@googlecloudplatform)
requirements:
- python >= 2.6
- requests >= 2.18.4
- google-auth >= 1.3.0
options:
filters:
description:
- A list of filter value pairs. Available filters are listed here U(https://cloud.google.com/sdk/gcloud/reference/topic/filters.)
- Each additional filter in the list will act be added as an AND condition (filter1
and filter2) .
extends_documentation_fragment: gcp
'''
EXAMPLES = '''
- name: " a instance template facts"
gcp_compute_instance_template_facts:
filters:
- name = test_object
project: test_project
auth_kind: serviceaccount
service_account_file: "/tmp/auth.pem"
state: facts
'''
RETURN = '''
items:
description: List of items
returned: always
type: complex
contains:
creationTimestamp:
description:
- Creation timestamp in RFC3339 text format.
returned: success
type: str
description:
description:
- An optional description of this resource. Provide this property when you create
the resource.
returned: success
type: str
id:
description:
- The unique identifier for the resource. This identifier is defined by the
server.
returned: success
type: int
name:
description:
- Name of the resource. The name is 1-63 characters long and complies with RFC1035.
returned: success
type: str
properties:
description:
- The instance properties for this instance template.
returned: success
type: complex
contains:
canIpForward:
description:
- Enables instances created based on this template to send packets with
source IP addresses other than their own and receive packets with destination
IP addresses other than their own. If these instances will be used as
an IP gateway or it will be set as the next-hop in a Route resource, specify
true. If unsure, leave this set to false.
returned: success
type: bool
description:
description:
- An optional text description for the instances that are created from this
instance template.
returned: success
type: str
disks:
description:
- An array of disks that are associated with the instances that are created
from this template.
returned: success
type: complex
contains:
autoDelete:
description:
- Specifies whether the disk will be auto-deleted when the instance
is deleted (but not when the disk is detached from the instance).
- 'Tip: Disks should be set to autoDelete=true so that leftover disks
are not left behind on machine deletion.'
returned: success
type: bool
boot:
description:
- Indicates that this is a boot disk. The virtual machine will use the
first partition of the disk for its root filesystem.
returned: success
type: bool
deviceName:
description:
- Specifies a unique device name of your choice that is reflected into
the /dev/disk/by-id/google-* tree of a Linux operating system running
within the instance. This name can be used to reference the device
for mounting, resizing, and so on, from within the instance.
returned: success
type: str
diskEncryptionKey:
description:
- Encrypts or decrypts a disk using a customer-supplied encryption key.
returned: success
type: complex
contains:
rawKey:
description:
- Specifies a 256-bit customer-supplied encryption key, encoded
in RFC 4648 base64 to either encrypt or decrypt this resource.
returned: success
type: str
rsaEncryptedKey:
description:
- Specifies an RFC 4648 base64 encoded, RSA-wrapped 2048-bit customer-supplied
encryption key to either encrypt or decrypt this resource.
returned: success
type: str
sha256:
description:
- The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
returned: success
type: str
index:
description:
- Assigns a zero-based index to this disk, where 0 is reserved for the
boot disk. For example, if you have many disks attached to an instance,
each disk would have a unique index number. If not specified, the
server will choose an appropriate value.
returned: success
type: int
initializeParams:
description:
- Specifies the parameters for a new disk that will be created alongside
the new instance. Use initialization parameters to create boot disks
or local SSDs attached to the new instance.
returned: success
type: complex
contains:
diskName:
description:
- Specifies the disk name. If not specified, the default is to use
the name of the instance.
returned: success
type: str
diskSizeGb:
description:
- Specifies the size of the disk in base-2 GB.
returned: success
type: int
diskType:
description:
- Reference to a disk type.
- Specifies the disk type to use to create the instance.
- If not specified, the default is pd-standard.
returned: success
type: str
sourceImage:
description:
- The source image to create this disk. When creating a new instance,
one of initializeParams.sourceImage or disks.source is required.
To create a disk with one of the public operating system images,
specify the image by its family name.
returned: success
type: str
sourceImageEncryptionKey:
description:
- The customer-supplied encryption key of the source image. Required
if the source image is protected by a customer-supplied encryption
key.
- Instance templates do not store customer-supplied encryption keys,
so you cannot create disks for instances in a managed instance
group if the source images are encrypted with your own keys.
returned: success
type: complex
contains:
rawKey:
description:
- Specifies a 256-bit customer-supplied encryption key, encoded
in RFC 4648 base64 to either encrypt or decrypt this resource.
returned: success
type: str
sha256:
description:
- The RFC 4648 base64 encoded SHA-256 hash of the customer-supplied
encryption key that protects this resource.
returned: success
type: str
interface:
description:
- Specifies the disk interface to use for attaching this disk, which
is either SCSI or NVME. The default is SCSI.
- Persistent disks must always use SCSI and the request will fail if
you attempt to attach a persistent disk in any other format than SCSI.
returned: success
type: str
mode:
description:
- The mode in which to attach this disk, either READ_WRITE or READ_ONLY.
If not specified, the default is to attach the disk in READ_WRITE
mode.
returned: success
type: str
source:
description:
- Reference to a disk. When creating a new instance, one of initializeParams.sourceImage
or disks.source is required.
- If desired, you can also attach existing non-root persistent disks
using this property. This field is only applicable for persistent
disks.
- Note that for InstanceTemplate, specify the disk name, not the URL
for the disk.
returned: success
type: dict
type:
description:
- Specifies the type of the disk, either SCRATCH or PERSISTENT. If not
specified, the default is PERSISTENT.
returned: success
type: str
machineType:
description:
- The machine type to use in the VM instance template.
returned: success
type: str
minCpuPlatform:
description:
- Specifies a minimum CPU platform for the VM instance. Applicable values
are the friendly names of CPU platforms .
returned: success
type: str
metadata:
description:
- The metadata key/value pairs to assign to instances that are created from
this template. These pairs can consist of custom metadata or predefined
keys.
returned: success
type: dict
guestAccelerators:
description:
- List of the type and count of accelerator cards attached to the instance
.
returned: success
type: complex
contains:
acceleratorCount:
description:
- The number of the guest accelerator cards exposed to this instance.
returned: success
type: int
acceleratorType:
description:
- Full or partial URL of the accelerator type resource to expose to
this instance.
returned: success
type: str
networkInterfaces:
description:
- An array of configurations for this interface. This specifies how this
interface is configured to interact with other network services, such
as connecting to the internet. Only one network interface is supported
per instance.
returned: success
type: complex
contains:
accessConfigs:
description:
- An array of configurations for this interface. Currently, only one
access config, ONE_TO_ONE_NAT, is supported. If there are no accessConfigs
specified, then this instance will have no external internet access.
returned: success
type: complex
contains:
name:
description:
- The name of this access configuration. The default and recommended
name is External NAT but you can use any arbitrary string you
would like. For example, My external IP or Network Access.
returned: success
type: str
natIP:
description:
- Reference to an address.
- An external IP address associated with this instance.
- Specify an unused static external IP address available to the
project or leave this field undefined to use an IP from a shared
ephemeral IP address pool. If you specify a static external IP
address, it must live in the same region as the zone of the instance.
returned: success
type: dict
type:
description:
- The type of configuration. The default and only option is ONE_TO_ONE_NAT.
returned: success
type: str
aliasIpRanges:
description:
- An array of alias IP ranges for this network interface. Can only be
specified for network interfaces on subnet-mode networks.
returned: success
type: complex
contains:
ipCidrRange:
description:
- The IP CIDR range represented by this alias IP range.
- This IP CIDR range must belong to the specified subnetwork and
cannot contain IP addresses reserved by system or used by other
network interfaces. This range may be a single IP address (e.g.
10.2.3.4), a netmask (e.g. /24) or a CIDR format string (e.g.
10.1.2.0/24).
returned: success
type: str
subnetworkRangeName:
description:
- Optional subnetwork secondary range name specifying the secondary
range from which to allocate the IP CIDR range for this alias
IP range. If left unspecified, the primary range of the subnetwork
will be used.
returned: success
type: str
name:
description:
- The name of the network interface, generated by the server. For network
devices, these are eth0, eth1, etc .
returned: success
type: str
network:
description:
- Specifies the title of an existing network. When creating an instance,
if neither the network nor the subnetwork is specified, the default
network global/networks/default is used; if the network is not specified
but the subnetwork is specified, the network is inferred.
returned: success
type: dict
networkIP:
description:
- An IPv4 internal network address to assign to the instance for this
network interface. If not specified by the user, an unused internal
IP is assigned by the system.
returned: success
type: str
subnetwork:
description:
- Reference to a VPC network.
- If the network resource is in legacy mode, do not provide this property.
If the network is in auto subnet mode, providing the subnetwork is
optional. If the network is in custom subnet mode, then this field
should be specified.
returned: success
type: dict
scheduling:
description:
- Sets the scheduling options for this instance.
returned: success
type: complex
contains:
automaticRestart:
description:
- Specifies whether the instance should be automatically restarted if
it is terminated by Compute Engine (not terminated by a user).
- You can only set the automatic restart option for standard instances.
Preemptible instances cannot be automatically restarted.
returned: success
type: bool
onHostMaintenance:
description:
- Defines the maintenance behavior for this instance. For standard instances,
the default behavior is MIGRATE. For preemptible instances, the default
and only possible behavior is TERMINATE.
- For more information, see Setting Instance Scheduling Options.
returned: success
type: str
preemptible:
description:
- Defines whether the instance is preemptible. This can only be set
during instance creation, it cannot be set or changed after the instance
has been created.
returned: success
type: bool
serviceAccounts:
description:
- A list of service accounts, with their specified scopes, authorized for
this instance. Only one service account per VM instance is supported.
returned: success
type: complex
contains:
email:
description:
- Email address of the service account.
returned: success
type: str
scopes:
description:
- The list of scopes to be made available for this service account.
returned: success
type: list
tags:
description:
- A list of tags to apply to this instance. Tags are used to identify valid
sources or targets for network firewalls and are specified by the client
during instance creation. The tags can be later modified by the setTags
method. Each tag within the list must comply with RFC1035.
returned: success
type: complex
contains:
fingerprint:
description:
- Specifies a fingerprint for this request, which is essentially a hash
of the metadata's contents and used for optimistic locking.
- The fingerprint is initially generated by Compute Engine and changes
after every request to modify or update metadata. You must always
provide an up-to-date fingerprint hash in order to update or change
metadata.
returned: success
type: str
items:
description:
- An array of tags. Each tag must be 1-63 characters long, and comply
with RFC1035.
returned: success
type: list
'''
################################################################################
# Imports
################################################################################
from ansible.module_utils.gcp_utils import navigate_hash, GcpSession, GcpModule, GcpRequest
import json
################################################################################
# Main
################################################################################
def main():
module = GcpModule(argument_spec=dict(filters=dict(type='list', elements='str')))
if not module.params['scopes']:
module.params['scopes'] = ['https://www.googleapis.com/auth/compute']
items = fetch_list(module, collection(module), query_options(module.params['filters']))
if items.get('items'):
items = items.get('items')
else:
items = []
return_value = {'items': items}
module.exit_json(**return_value)
def collection(module):
return "https://www.googleapis.com/compute/v1/projects/{project}/global/instanceTemplates".format(**module.params)
def fetch_list(module, link, query):
auth = GcpSession(module, 'compute')
response = auth.get(link, params={'filter': query})
return return_if_object(module, response)
def query_options(filters):
if not filters:
return ''
if len(filters) == 1:
return filters[0]
else:
queries = []
for f in filters:
# For multiple queries, all queries should have ()
if f[0] != '(' and f[-1] != ')':
queries.append("(%s)" % ''.join(f))
else:
queries.append(f)
return ' '.join(queries)
def return_if_object(module, response):
# If not found, return nothing.
if response.status_code == 404:
return None
# If no content, return nothing.
if response.status_code == 204:
return None
try:
module.raise_for_status(response)
result = response.json()
except getattr(json.decoder, 'JSONDecodeError', ValueError) as inst:
module.fail_json(msg="Invalid JSON response with error: %s" % inst)
if navigate_hash(result, ['error', 'errors']):
module.fail_json(msg=navigate_hash(result, ['error', 'errors']))
return result
if __name__ == "__main__":
main()
| tersmitten/ansible | lib/ansible/modules/cloud/google/gcp_compute_instance_template_facts.py | Python | gpl-3.0 | 22,189 |
import sys
def get_most_popular_destination(n):
destinations = {}
for i in range(n):
destination = input()
if destination in destinations:
destinations[destination] += 1
else:
destinations[destination] = 1
return max(destinations, key=destinations.get)
n = int(input())
print(get_most_popular_destination(n))
| MightyPixel/algorithms | Interviews/Company_1/problem_01.py | Python | gpl-2.0 | 373 |
from expyriment import stimuli
from expyriment.misc import constants
from config import templatePicture, cardColor, picturesFolder
class LdCard(object):
def __init__(self, size, color=cardColor):
self._size = size
self._stimuli = (stimuli.Picture(templatePicture, position=None),
stimuli.Rectangle(size, colour=color, line_width=None, position=None))
@property
def stimuli(self):
return self._stimuli
@property
def size(self):
return self._size
@property
def position(self):
return self._position
@property
def color(self):
return self._stimuli[1].colour
@position.setter
def position(self, value):
self._position = value
self._stimuli[0].replace(value)
self._stimuli[1].replace(value)
@color.setter
def color(self, value):
self._stimuli = (
self.stimuli[0], stimuli.Rectangle(self.size, colour=value, line_width=None, position=self.position))
def setPicture(self, value, scale=True):
self._stimuli = (stimuli.Picture(value, position=self.position),
stimuli.Rectangle(self.size, colour=constants.C_WHITE, line_width=None,
position=self.position))
if scale:
self._stimuli[0].scale(self.size[0]/float(300))
| arnaudbore/declarativeTask | src/ld_card.py | Python | mit | 1,380 |
from django.urls import reverse_lazy
from django.views.generic import ListView
from django.views.generic.edit import UpdateView
from .models import Toy
class ToyEditView(UpdateView):
model = Toy
fields = '__all__'
template_name_suffix = '_edit'
success_url = reverse_lazy('toy:list')
class ToyListView(ListView):
def get_queryset(self):
return Toy.objects.all()
| toystori/v2 | app/toy/views.py | Python | mit | 396 |
#############################################################################
##
## Copyright (c) 2014 Riverbank Computing Limited <[email protected]>
##
## This file is part of PyQt5.
##
## This file may be used under the terms of the GNU General Public License
## version 3.0 as published by the Free Software Foundation and appearing in
## the file LICENSE included in the packaging of this file. Please review the
## following information to ensure the GNU General Public License version 3.0
## requirements will be met: http://www.gnu.org/copyleft/gpl.html.
##
## If you do not wish to use this file under the terms of the GPL version 3.0
## then you may purchase a commercial license. For more information contact
## [email protected].
##
## This file is provided AS IS with NO WARRANTY OF ANY KIND, INCLUDING THE
## WARRANTY OF DESIGN, MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE.
##
#############################################################################
import re
def as_string(obj):
if isinstance(obj, basestring):
return '"' + _escape(obj.encode('UTF-8')) + '"'
return str(obj)
_esc_regex = re.compile(r"(\"|\'|\\)")
def _escape(text):
# This escapes any escaped single or double quote or backslash.
x = _esc_regex.sub(r"\\\1", text)
# This replaces any '\n' with an escaped version and a real line break.
return re.sub(r'\n', r'\\n"\n"', x)
| dragondjf/PyQt5 | python2.7/PyQt5/uic/port_v2/as_string.py | Python | gpl-2.0 | 1,435 |
import _plotly_utils.basevalidators
class LatValidator(_plotly_utils.basevalidators.NumberValidator):
def __init__(self, plotly_name="lat", parent_name="layout.geo.center", **kwargs):
super(LatValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "plot"),
role=kwargs.pop("role", "info"),
**kwargs
)
| plotly/python-api | packages/python/plotly/plotly/validators/layout/geo/center/_lat.py | Python | mit | 439 |
from django.contrib import admin
from django import forms
from django.utils.translation import ugettext_lazy as _
from ovp_core.models import Cause
class CauseInline(admin.TabularInline):
model = Cause
class CauseAdmin(admin.ModelAdmin):
fields = ['id', 'name', 'image']
list_display = ['id', 'name']
list_filter = []
list_editable = ['name']
search_fields = ['id', 'name']
readonly_fields = ['id']
raw_id_fields = []
admin.site.register(Cause, CauseAdmin)
| OpenVolunteeringPlatform/django-ovp-core | ovp_core/admin/cause.py | Python | agpl-3.0 | 480 |
"""Problem 78
10 September 2004
Let p(n) represent the number of different ways in which n coins can
be separated into piles. For example, five coins can separated into
piles in exactly seven different ways, so p(5)=7.
OOOOO
OOOO O
OOO OO
OOO O O
OO OO O
OO O O O
O O O O O
Find the least value of n for which p(n) is divisible by one million.
"""
# I successfully found a way to calculate partitions in problem 76, but the
# function is too slow for this case.
# Then I tried to implement the generating function based on this:
# http://en.wikipedia.org/wiki/Partition_%28number_theory%29#Generating_function
# ...with no success.
# Then I "cheated", and got the function p() below at:
# http://stackoverflow.com/questions/3164305/optimizing-a-partition-function
from eulerlib import pentagonalNum
def generalizedPentagonalNum(n):
if n%2:
i = (n + 1)/2
else:
i = -n/2
return pentagonalNum(i)
partitions = [1]
def p(n):
try:
return partitions[int(n)]
except IndexError:
total = 0
sign = 1
i = 1
k = generalizedPentagonalNum(i)
while n - k >= 0:
total += sign * p(n - k)
i += 1
if i%2: sign *= -1
k = generalizedPentagonalNum(i)
partitions.insert(n, total)
return total
n = 1
while True:
x = p(n)
if x % 10000 == 0:
print(n, x, sep="\t")
if x % 1000000 == 0:
break
n += 1
#10 9
#100 74
#1000 449
#10000 599
#100000 11224
#1000000 55374
| feliposz/project-euler-solutions | python/euler78.py | Python | mit | 1,566 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from io import BytesIO
import os
import gzip
import tempfile
import unittest
import zlib
from django.core.exceptions import ImproperlyConfigured
from django.core.files import File
from django.core.files.move import file_move_safe
from django.core.files.base import ContentFile
from django.core.files.uploadedfile import SimpleUploadedFile, UploadedFile
from django.core.files.temp import NamedTemporaryFile
from django.utils._os import upath
from django.utils import six
try:
from django.utils.image import Image
from django.core.files import images
except ImproperlyConfigured:
Image = None
class FileTests(unittest.TestCase):
def test_unicode_uploadedfile_name(self):
uf = UploadedFile(name='¿Cómo?', content_type='text')
self.assertIs(type(repr(uf)), str)
def test_unicode_file_name(self):
f = File(None, 'djángö')
self.assertIs(type(repr(f)), str)
def test_context_manager(self):
orig_file = tempfile.TemporaryFile()
base_file = File(orig_file)
with base_file as f:
self.assertIs(base_file, f)
self.assertFalse(f.closed)
self.assertTrue(f.closed)
self.assertTrue(orig_file.closed)
def test_namedtemporaryfile_closes(self):
"""
The symbol django.core.files.NamedTemporaryFile is assigned as
a different class on different operating systems. In
any case, the result should minimally mock some of the API of
tempfile.NamedTemporaryFile from the Python standard library.
"""
tempfile = NamedTemporaryFile()
self.assertTrue(hasattr(tempfile, "closed"))
self.assertFalse(tempfile.closed)
tempfile.close()
self.assertTrue(tempfile.closed)
def test_file_mode(self):
# Should not set mode to None if it is not present.
# See #14681, stdlib gzip module crashes if mode is set to None
file = SimpleUploadedFile("mode_test.txt", b"content")
self.assertFalse(hasattr(file, 'mode'))
gzip.GzipFile(fileobj=file)
def test_file_iteration(self):
"""
File objects should yield lines when iterated over.
Refs #22107.
"""
file = File(BytesIO(b'one\ntwo\nthree'))
self.assertEqual(list(file), [b'one\n', b'two\n', b'three'])
class NoNameFileTestCase(unittest.TestCase):
"""
Other examples of unnamed files may be tempfile.SpooledTemporaryFile or
urllib.urlopen()
"""
def test_noname_file_default_name(self):
self.assertEqual(File(BytesIO(b'A file with no name')).name, None)
def test_noname_file_get_size(self):
self.assertEqual(File(BytesIO(b'A file with no name')).size, 19)
class ContentFileTestCase(unittest.TestCase):
def test_content_file_default_name(self):
self.assertEqual(ContentFile(b"content").name, None)
def test_content_file_custom_name(self):
"""
Test that the constructor of ContentFile accepts 'name' (#16590).
"""
name = "I can have a name too!"
self.assertEqual(ContentFile(b"content", name=name).name, name)
def test_content_file_input_type(self):
"""
Test that ContentFile can accept both bytes and unicode and that the
retrieved content is of the same type.
"""
self.assertIsInstance(ContentFile(b"content").read(), bytes)
if six.PY3:
self.assertIsInstance(ContentFile("español").read(), six.text_type)
else:
self.assertIsInstance(ContentFile("español").read(), bytes)
class DimensionClosingBug(unittest.TestCase):
"""
Test that get_image_dimensions() properly closes files (#8817)
"""
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_not_closing_of_files(self):
"""
Open files passed into get_image_dimensions() should stay opened.
"""
empty_io = BytesIO()
try:
images.get_image_dimensions(empty_io)
finally:
self.assertTrue(not empty_io.closed)
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_closing_of_filenames(self):
"""
get_image_dimensions() called with a filename should closed the file.
"""
# We need to inject a modified open() builtin into the images module
# that checks if the file was closed properly if the function is
# called with a filename instead of an file object.
# get_image_dimensions will call our catching_open instead of the
# regular builtin one.
class FileWrapper(object):
_closed = []
def __init__(self, f):
self.f = f
def __getattr__(self, name):
return getattr(self.f, name)
def close(self):
self._closed.append(True)
self.f.close()
def catching_open(*args):
return FileWrapper(open(*args))
images.open = catching_open
try:
images.get_image_dimensions(os.path.join(os.path.dirname(upath(__file__)), "test1.png"))
finally:
del images.open
self.assertTrue(FileWrapper._closed)
class InconsistentGetImageDimensionsBug(unittest.TestCase):
"""
Test that get_image_dimensions() works properly after various calls
using a file handler (#11158)
"""
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_multiple_calls(self):
"""
Multiple calls of get_image_dimensions() should return the same size.
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "test.png")
with open(img_path, 'rb') as fh:
image = images.ImageFile(fh)
image_pil = Image.open(fh)
size_1 = images.get_image_dimensions(image)
size_2 = images.get_image_dimensions(image)
self.assertEqual(image_pil.size, size_1)
self.assertEqual(size_1, size_2)
@unittest.skipUnless(Image, "Pillow/PIL not installed")
def test_bug_19457(self):
"""
Regression test for #19457
get_image_dimensions fails on some pngs, while Image.size is working good on them
"""
img_path = os.path.join(os.path.dirname(upath(__file__)), "magic.png")
try:
size = images.get_image_dimensions(img_path)
except zlib.error:
self.fail("Exception raised from get_image_dimensions().")
with open(img_path, 'rb') as fh:
self.assertEqual(size, Image.open(fh).size)
class FileMoveSafeTests(unittest.TestCase):
def test_file_move_overwrite(self):
handle_a, self.file_a = tempfile.mkstemp(dir=os.environ['DJANGO_TEST_TEMP_DIR'])
handle_b, self.file_b = tempfile.mkstemp(dir=os.environ['DJANGO_TEST_TEMP_DIR'])
# file_move_safe should raise an IOError exception if destination file exists and allow_overwrite is False
self.assertRaises(IOError, lambda: file_move_safe(self.file_a, self.file_b, allow_overwrite=False))
# should allow it and continue on if allow_overwrite is True
self.assertIsNone(file_move_safe(self.file_a, self.file_b, allow_overwrite=True))
os.close(handle_a)
os.close(handle_b)
| wfxiang08/django178 | tests/files/tests.py | Python | bsd-3-clause | 7,360 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
cookiecutter.prompt
---------------------
Functions for prompting the user for project info.
"""
from __future__ import unicode_literals
import sys
from .compat import iteritems, read_response, is_string
from jinja2.environment import Environment
def prompt_for_config(context, no_input=False):
"""
Prompts the user to enter new config, using context as a source for the
field names and sample values.
:param no_input: Prompt the user at command line for manual configuration?
"""
cookiecutter_dict = {}
env = Environment()
for key, raw in iteritems(context['cookiecutter']):
if key.startswith('_'):
cookiecutter_dict[key] = raw
continue
raw = raw if is_string(raw) else str(raw)
val = env.from_string(raw).render(cookiecutter=cookiecutter_dict)
if not no_input:
prompt = '{0} (default is "{1}")? '.format(key, val)
new_val = read_response(prompt).strip()
if new_val != '':
val = new_val
cookiecutter_dict[key] = val
return cookiecutter_dict
def query_yes_no(question, default='yes'):
"""
Ask a yes/no question via `read_response()` and return their answer.
:param question: A string that is presented to the user.
:param default: The presumed answer if the user just hits <Enter>.
It must be "yes" (the default), "no" or None (meaning
an answer is required of the user).
The "answer" return value is one of "yes" or "no".
Adapted from
http://stackoverflow.com/questions/3041986/python-command-line-yes-no-input
http://code.activestate.com/recipes/577058/
"""
valid = {'yes': True, 'y': True, 'ye': True, 'no': False, 'n': False}
if default is None:
prompt = ' [y/n] '
elif default == 'yes':
prompt = ' [Y/n] '
elif default == 'no':
prompt = ' [y/N] '
else:
raise ValueError('Invalid default answer: "{0}"'.format(default))
while True:
sys.stdout.write(question + prompt)
choice = read_response().lower()
if default is not None and choice == '':
return valid[default]
elif choice in valid:
return valid[choice]
else:
sys.stdout.write('Please respond with "yes" or "no" '
'(or "y" or "n").\n')
| vincentbernat/cookiecutter | cookiecutter/prompt.py | Python | bsd-3-clause | 2,429 |
import ssl
import imp
import os
import tornado.web
import tornado.httpserver
import tornado.autoreload
import tornado.process
from DIRAC import S_OK, S_ERROR, gLogger, gConfig
from WebAppDIRAC.Core.HandlerMgr import HandlerMgr
from WebAppDIRAC.Core.TemplateLoader import TemplateLoader
from WebAppDIRAC.Lib.SessionData import SessionData
from WebAppDIRAC.Lib import Conf
from DIRAC.Core.Utilities.CFG import CFG
from DIRAC.ConfigurationSystem.Client.ConfigurationData import gConfigurationData
from DIRAC.ConfigurationSystem.Client.Helpers import CSGlobals
import DIRAC
class App( object ):
def __init__( self ):
self.__handlerMgr = HandlerMgr( Conf.rootURL() )
self.__servers = {}
self.log = gLogger.getSubLogger( "Web" )
def _logRequest( self, handler ):
status = handler.get_status()
if status < 400:
logm = self.log.notice
elif status < 500:
logm = self.log.warn
else:
logm = self.log.error
request_time = 1000.0 * handler.request.request_time()
logm( "%d %s %.2fms" % ( status, handler._request_summary(), request_time ) )
def __reloadAppCB( self ):
gLogger.notice( "\n !!!!!! Reloading web app...\n" )
def _loadWebAppCFGFiles( self ):
"""
Load WebApp/web.cfg definitions
"""
exts = []
for ext in CSGlobals.getCSExtensions():
if ext == "DIRAC":
continue
if ext[-5:] != "DIRAC":
ext = "%sDIRAC" % ext
if ext != "WebAppDIRAC":
exts.append( ext )
exts.append( "DIRAC" )
exts.append( "WebAppDIRAC" )
webCFG = CFG()
for modName in reversed( exts ):
try:
modPath = imp.find_module( modName )[1]
except ImportError:
continue
gLogger.verbose( "Found module %s at %s" % ( modName, modPath ) )
cfgPath = os.path.join( modPath, "WebApp", "web.cfg" )
if not os.path.isfile( cfgPath ):
gLogger.verbose( "Inexistant %s" % cfgPath )
continue
try:
modCFG = CFG().loadFromFile( cfgPath )
except Exception, excp:
gLogger.error( "Could not load %s: %s" % ( cfgPath, excp ) )
continue
gLogger.verbose( "Loaded %s" % cfgPath )
expl = [ Conf.BASECS ]
while len( expl ):
current = expl.pop( 0 )
if not modCFG.isSection( current ):
continue
if modCFG.getOption( "%s/AbsoluteDefinition" % current, False ):
gLogger.verbose( "%s:%s is an absolute definition" % ( modName, current ) )
try:
webCFG.deleteKey( current )
except:
pass
modCFG.deleteKey( "%s/AbsoluteDefinition" % current )
else:
for sec in modCFG[ current ].listSections():
expl.append( "%s/%s" % ( current, sec ) )
# Add the modCFG
webCFG = webCFG.mergeWith( modCFG )
gConfig.loadCFG( webCFG )
def _loadDefaultWebCFG( self ):
""" This method reloads the web.cfg file from etc/web.cfg """
modCFG = None
cfgPath = os.path.join( DIRAC.rootPath, 'etc', 'web.cfg' )
isLoaded = True
if not os.path.isfile( cfgPath ):
isLoaded = False
else:
try:
modCFG = CFG().loadFromFile( cfgPath )
except Exception, excp:
isLoaded = False
gLogger.error( "Could not load %s: %s" % ( cfgPath, excp ) )
if modCFG:
if modCFG.isSection( "/Website" ):
gLogger.warn( "%s configuration file is not correct. It is used by the old portal!" % ( cfgPath ) )
isLoaded = False
else:
gConfig.loadCFG( modCFG )
else:
isLoaded = False
return isLoaded
def bootstrap( self ):
"""
Configure and create web app
"""
self.log.always( "\n ====== Starting DIRAC web app ====== \n" )
# Load required CFG files
if not self._loadDefaultWebCFG(): # if we have a web.cfg under etc directory we use it, otherwise we use the configuration file defined by the developer
self._loadWebAppCFGFiles()
# Calculating routes
result = self.__handlerMgr.getRoutes()
if not result[ 'OK' ]:
return result
routes = result[ 'Value' ]
# Initialize the session data
SessionData.setHandlers( self.__handlerMgr.getHandlers()[ 'Value' ] )
# Create the app
tLoader = TemplateLoader( self.__handlerMgr.getPaths( "template" ) )
kw = dict( debug = Conf.devMode(), template_loader = tLoader, cookie_secret = Conf.cookieSecret(),
log_function = self._logRequest )
# Check processes if we're under a load balancert
if Conf.balancer() and Conf.numProcesses() not in ( 0, 1 ):
tornado.process.fork_processes( Conf.numProcesses(), max_restarts = 0 )
kw[ 'debug' ] = False
# Debug mode?
if kw[ 'debug' ]:
self.log.info( "Configuring in developer mode..." )
# Configure tornado app
self.__app = tornado.web.Application( routes, **kw )
self.log.notice( "Configuring HTTP on port %s" % ( Conf.HTTPPort() ) )
# Create the web servers
srv = tornado.httpserver.HTTPServer( self.__app )
port = Conf.HTTPPort()
srv.listen( port )
self.__servers[ ( 'http', port ) ] = srv
Conf.generateRevokedCertsFile() # it is used by nginx....
if Conf.HTTPS():
self.log.notice( "Configuring HTTPS on port %s" % Conf.HTTPSPort() )
sslops = dict( certfile = Conf.HTTPSCert(),
keyfile = Conf.HTTPSKey(),
cert_reqs = ssl.CERT_OPTIONAL,
ca_certs = Conf.generateCAFile() )
sslprotocol = str( Conf.SSLProrocol() )
aviableProtocols = [ i for i in dir( ssl ) if i.find( 'PROTOCOL' ) == 0]
if sslprotocol and sslprotocol != "":
if ( sslprotocol in aviableProtocols ):
sslops['ssl_version'] = getattr( ssl, sslprotocol )
else:
message = "%s protocol is not provided. The following protocols are provided: %s" % ( sslprotocol, str( aviableProtocols ) )
gLogger.warn( message )
self.log.debug( " - %s" % "\n - ".join( [ "%s = %s" % ( k, sslops[k] ) for k in sslops ] ) )
srv = tornado.httpserver.HTTPServer( self.__app, ssl_options = sslops )
port = Conf.HTTPSPort()
srv.listen( port )
self.__servers[ ( 'https', port ) ] = srv
else:
Conf.generateCAFile() # if we use Nginx we have to generate the cas as well...
return result
def run( self ):
"""
Start web servers
"""
bu = Conf.rootURL().strip( "/" )
urls = []
for proto, port in self.__servers:
urls.append( "%s://0.0.0.0:%s/%s/" % ( proto, port, bu ) )
self.log.always( "Listening on %s" % " and ".join( urls ) )
tornado.autoreload.add_reload_hook( self.__reloadAppCB )
tornado.ioloop.IOLoop.instance().start()
| atsareg/WebAppDIRAC | Core/App.py | Python | gpl-3.0 | 6,772 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import functools
import numpy as np
from tensorflow.python import pywrap_tensorflow
from tensorflow.python.eager import backprop
from tensorflow.python.eager import context
from tensorflow.python.eager import tape
from tensorflow.python.eager import test
from tensorflow.python.framework import constant_op
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import tensor_shape
from tensorflow.python.framework import test_util
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import custom_gradient
from tensorflow.python.ops import embedding_ops
from tensorflow.python.ops import gradients
from tensorflow.python.ops import math_ops
from tensorflow.python.ops import nn_grad # pylint: disable=unused-import
from tensorflow.python.ops import nn_ops
from tensorflow.python.ops import random_ops
from tensorflow.python.ops import resource_variable_ops
from tensorflow.python.ops import variables
from tensorflow.python.training import training
class BackpropTest(test.TestCase):
@test_util.run_in_graph_and_eager_modes()
def testAggregateGradients(self):
def fn(x):
ind1 = constant_op.constant(np.array([0, 1]))
ind2 = constant_op.constant(np.array([2, 3]))
ind3 = constant_op.constant(np.array([1, 3]))
# A mixture of IndexedSlices and dense tensor to aggregate.
g1 = embedding_ops.embedding_lookup(x, ind1)
g2 = embedding_ops.embedding_lookup(x, ind2)
g3 = embedding_ops.embedding_lookup(x, ind3)
g4 = math_ops.reduce_sum(x * constant_op.constant(2.0))
return g1 * g2 * g3 * g4
var_np = np.random.rand(4, 2).astype(np.float32)
var = constant_op.constant(var_np)
grad = backprop.gradients_function(fn, [0])(var)[0]
grad = self.evaluate(ops.convert_to_tensor(grad))
with context.graph_mode(), self.test_session():
tf_var = array_ops.constant(var_np, dtypes.float32)
tf_ind1 = array_ops.constant([0, 1])
tf_ind2 = array_ops.constant([2, 3])
tf_ind3 = array_ops.constant([1, 3])
tf_g1 = embedding_ops.embedding_lookup(tf_var, tf_ind1)
tf_g2 = embedding_ops.embedding_lookup(tf_var, tf_ind2)
tf_g3 = embedding_ops.embedding_lookup(tf_var, tf_ind3)
tf_g4 = math_ops.reduce_sum(tf_var * 2.0, reduction_indices=(0, 1))
tf_y = tf_g1 * tf_g2 * tf_g3 * tf_g4
tf_grad = gradients.gradients(tf_y, [tf_var])[0]
tf_dense_grad = math_ops.unsorted_segment_sum(
tf_grad.values, tf_grad.indices, tf_grad.dense_shape[0])
self.assertAllClose(grad, tf_dense_grad.eval())
def testImplicitGradWithResourceVariable(self):
x = resource_variable_ops.ResourceVariable(
initial_value=constant_op.constant(1.0), name='x')
def fn():
tape.watch_variable(x)
b = constant_op.constant(2.0)
c = math_ops.add(x.value(), b)
return math_ops.add(c, constant_op.constant(3.0))
grads_and_vars = backprop.implicit_grad(fn)()
self.assertAllEqual(grads_and_vars[0][0], 1.0)
self.assertAllEqual(id(grads_and_vars[0][1]), id(x))
def testDy(self):
def f(x):
return x
grad_fn = backprop.gradients_function(f)
self.assertAllEqual(2., grad_fn(1., dy=2.)[0])
def testErrors(self):
@custom_gradient.custom_gradient
def f(x):
def grad(_):
raise RuntimeError('x')
return x, grad
# TODO(apassos) raise the right error here
with self.assertRaises(RuntimeError):
backprop.gradients_function(f)(constant_op.constant(1.0))
def testGradientsFunctionInCustomGradient(self):
@custom_gradient.custom_gradient
def f(x):
(y,) = backprop.gradients_function(lambda x: x * x)(x)
def grad(dy):
return [2 * dy]
return y, grad
self.assertAllEqual(f(1.0), 2.0)
def testImplicitGradOverEmbeddingLookup(self):
batch_size = 8
embedding_size = 512
vocab_size = 1000
lrn_rate = 0.1
random_init = random_ops.random_uniform([vocab_size, embedding_size])
x = array_ops.ones((batch_size), dtypes.int64)
embedding = resource_variable_ops.ResourceVariable(
initial_value=random_init, dtype=dtypes.float32, name='embedding')
def f():
tape.watch_variable(embedding)
embedded_x = embedding_ops.embedding_lookup(embedding, x)
return constant_op.constant(1.0, dtypes.float32) - embedded_x
grad = backprop.implicit_grad(f)()[0][0]
opt = training.GradientDescentOptimizer(lrn_rate)
with context.graph_mode(), self.test_session():
tf_x = array_ops.ones((batch_size), dtypes.int64)
# TODO(ashankar,apassos): Change to ResourceVariable.
tf_embedding = variables.Variable(
random_init.numpy(), name='tf_embedding')
tf_embedded_x = embedding_ops.embedding_lookup(tf_embedding, tf_x)
tf_y = 1.0 - tf_embedded_x
tf_grad = gradients.gradients(tf_y, [tf_embedding])[0]
tf_opt = training.GradientDescentOptimizer(0.1)
tf_embedding.initializer.run()
self.assertAllClose(tf_grad.indices.eval(), grad.indices)
self.assertAllClose(tf_grad.values.eval(), grad.values)
tf_opt.apply_gradients([(tf_grad, tf_embedding)]).run()
expected = tf_embedding.eval()
opt.apply_gradients([(grad, embedding)])
self.assertAllClose(expected, embedding.read_value())
def testImplicitGradOrdering(self):
v0 = resource_variable_ops.ResourceVariable(1.0)
v1 = resource_variable_ops.ResourceVariable(2.0)
def f():
x = v1 * v1
y = v0 * v0
return x + y
grads = backprop.implicit_grad(f)()
ordered_variables = [x[1] for x in grads]
self.assertTrue(ordered_variables[0] is v0)
self.assertTrue(ordered_variables[1] is v1)
@test_util.assert_no_new_tensors
def testGradientNone(self):
def loss(x, l):
return math_ops.reduce_mean(
nn_ops.softmax_cross_entropy_with_logits(logits=x, labels=l),
constant_op.constant([0]))
logits = constant_op.constant([[0.0, 0.0]])
labels = constant_op.constant([[1.0, 0.0]])
# softmax_cross_entropy_with_logits returns two outputs and in this case the
# gradient wrt the second is None.
g, = backprop.gradients_function(loss, [0])(logits, labels)
self.assertAllEqual(g.numpy(), [[-0.5, 0.5]])
@test_util.run_in_graph_and_eager_modes()
def testGradientWithinTapeBlock(self):
v1 = resource_variable_ops.ResourceVariable(1.)
self.evaluate(v1.initializer)
with backprop.GradientTape() as t:
loss = 2 * v1
with self.assertRaises(RuntimeError):
t.gradient(loss, [v1])
with backprop.GradientTape(persistent=True) as t:
loss = 2 * v1
grad = t.gradient(loss, [v1])
self.assertAllEqual(self.evaluate(grad[0]), 2.0)
@test_util.assert_no_new_tensors
def testSecondGrad(self):
def first(x):
l = constant_op.constant([[0.0]])
x = nn_ops.softmax_cross_entropy_with_logits(labels=l, logits=x)
x = math_ops.reduce_sum(x, constant_op.constant([0]))
return x
def second(x):
grad = backprop.gradients_function(first, [0])(x)[0]
return math_ops.reduce_sum(grad, constant_op.constant([0]))
f = constant_op.constant([[0.1]])
grad = backprop.gradients_function(second, [0])(f)[0]
self.assertAllEqual([[0.0]], grad)
@test_util.assert_no_new_tensors
def testMakeVJP(self):
def f(x):
return x * x
wrapped_fn = backprop.make_vjp(f, persistent=False)
result, vjp = wrapped_fn(constant_op.constant(3.0))
self.assertAllEqual(result, 9.0)
self.assertAllEqual(vjp(2.0)[0], 12.0)
def testPersistentMakeVJP(self):
def f(x):
return x * x
wrapped_fn = backprop.make_vjp(f, persistent=True)
_, vjp = wrapped_fn(constant_op.constant(3.0))
vjp_result1 = vjp(2.0)[0]
vjp_result2 = vjp(2.0)[0]
self.assertAllEqual(vjp_result1, vjp_result2, 12.0)
@test_util.assert_no_new_tensors
def testGradGrad(self):
def sq(x):
return x * x
def grad(x):
value = backprop.gradients_function(sq, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(3.0))[0], 2.0)
@test_util.assert_no_new_tensors
def testGradGradExp(self):
def grad(x):
value = backprop.gradients_function(math_ops.exp, [0])(x)[0]
return value
gradgrad = backprop.gradients_function(grad, [0])
self.assertAllEqual(gradgrad(constant_op.constant(0.0))[0], 1.0)
@test_util.assert_no_new_tensors
def testStopGradient(self):
grad = backprop.gradients_function(
lambda x: array_ops.stop_gradient(math_ops.argmax(x)))
self.assertAllEqual(grad([0.0])[0], None)
@test_util.assert_no_new_tensors
def testArgmax(self):
def argmax(x):
i = math_ops.argmax(x)
return array_ops.stop_gradient(i)
grad = backprop.gradients_function(argmax)
self.assertAllEqual(grad([0.0])[0], None)
@test_util.assert_no_new_tensors
def testGPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
def fn(x):
with context.device('/gpu:0'):
b = constant_op.constant(2.0)
c = math_ops.add(x.gpu(), b)
# TODO(apassos): remove cpu below by making TensorVSPace aware
# of devices.
return math_ops.add(c, constant_op.constant(3.0)).cpu()
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testGPUImplicitGrad(self):
if not context.context().num_gpus():
self.skipTest('No GPU found')
with context.device('gpu:0'):
v = resource_variable_ops.ResourceVariable(
constant_op.constant(1.0), name='v')
def f():
with context.device('gpu:0'):
tape.watch_variable(v)
return v.read_value()
self.assertEqual(
backprop.implicit_grad(f)()[0][0].cpu().numpy(), 1.0)
@test_util.assert_no_new_tensors
def testCPU(self):
def fn(x):
b = constant_op.constant(2.0)
c = math_ops.add(x, b)
return math_ops.add(c, constant_op.constant(3.0))
grad = backprop.gradients_function(fn, [0])(constant_op.constant(1.0))[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testTensorCopyGPU2CPU2GPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
def f(a, b):
return a.cpu() + b.cpu()
with context.device('/gpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
@test_util.assert_no_new_tensors
def testEmptyParams(self):
def fn(a, b):
return a * b
x = constant_op.constant(1.0)
y = constant_op.constant(2.0)
dx, dy = backprop.gradients_function(fn)(x, y)
self.assertAllEqual(dx, y.numpy())
self.assertAllEqual(dy, x.numpy())
@test_util.assert_no_new_tensors
def testUnconnectedNone(self):
v = resource_variable_ops.ResourceVariable(
1.0, name='testUnconnectedNone')
def f():
v.read_value()
return constant_op.constant(1.0)
self.assertEqual(backprop.implicit_grad(f)()[0][0], None)
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes()
def testGradientTape(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape() as gg:
gg.watch(y)
z = 2 * y
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(self.evaluate(inner_grad), 2.0)
y += inner_grad
grad = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(grad), 6.0)
@test_util.assert_no_new_tensors
def testGradientTapeGradientCalledMultipleTimes(self):
with backprop.GradientTape() as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
g.gradient(z, [x])
with self.assertRaisesRegexp(
RuntimeError, 'GradientTape.gradient can only be called once'):
g.gradient(y, [x])
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes()
def testPersistentTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
z = y * y
dz_dx = g.gradient(z, [x])[0]
self.assertEqual(self.evaluate(dz_dx), 4 * 3 * 3 * 3)
dy_dx = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(dy_dx), 2 * 3)
del g
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes()
def testPersistentNestedTape(self):
with backprop.GradientTape(persistent=True) as g:
x = constant_op.constant(3.0)
g.watch(x)
y = x * x
with backprop.GradientTape(persistent=True) as gg:
gg.watch(y)
z = 2 * y
for _ in range(2):
inner_grad = gg.gradient(z, [y])[0]
self.assertEqual(self.evaluate(inner_grad), 2.0)
y += inner_grad
del gg
grad = g.gradient(y, [x])[0]
self.assertEqual(self.evaluate(grad), 6.0)
grad = g.gradient(z, [x])[0]
self.assertEqual(self.evaluate(grad), 12.0)
del g
@test_util.assert_no_new_tensors
@test_util.run_in_graph_and_eager_modes()
def testGradientTapeVariable(self):
v = resource_variable_ops.ResourceVariable(1.0, name='v')
self.evaluate(v.initializer)
with backprop.GradientTape() as g:
y = v * v
grad = g.gradient(y, [v])[0]
self.assertAllEqual(self.evaluate(grad), 2.0)
@test_util.assert_no_new_tensors
def testEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grads_fn = backprop.val_and_grad_function(fn)
x = 2.0
y = 3.0
val, (dx, dy) = val_and_grads_fn(x, y)
self.assertAllClose(val, x * y)
self.assertAllEqual(dx, y)
self.assertAllEqual(dy, x)
@test_util.assert_no_new_tensors
def testNonEmptyParamsForValueAndGradFunction(self):
def fn(a, b):
return a * b
val_and_grad_fn = backprop.val_and_grad_function(fn, params=[1])
x = 2.0
y = 3.0
val, grads = val_and_grad_fn(x, y)
self.assertAllClose(val, x * y)
self.assertEqual(1, len(grads))
self.assertAllEqual(grads[0], x)
@test_util.assert_no_new_tensors
def testTensorCopyCPU2GPU2CPU(self):
if not context.context().num_gpus():
self.skipTest('No GPUs found')
# forward: a (cpu->gpu) -> add (gpu) -> c (gpu->cpu) -> add (cpu) -> e (cpu)
# back: e (cpu) -> add (cpu) -> c (cpu->gpu) -> add (gpu) -> grad (gpu->cpu)
def f(a, b):
with context.device('/gpu:0'):
c = math_ops.add(a.gpu(0), b.gpu(0))
return math_ops.add(c.cpu(), constant_op.constant(3.0))
with context.device('/cpu:0'):
a = constant_op.constant(1.0)
b = constant_op.constant(2.0)
grad = backprop.gradients_function(f, [0])(a, b)[0]
self.assertAllEqual(grad, 1.0)
def testGetAttrType(self):
typ = backprop.op_attr_type('Add', 'T')
self.assertEqual(typ, pywrap_tensorflow.TF_ATTR_TYPE)
def testGetAttrList(self):
typ = backprop.op_attr_type('MaxPool', 'ksize')
self.assertEqual(typ, [pywrap_tensorflow.TF_ATTR_INT])
def testMakeAttrType(self):
self.assertEqual(dtypes.float32,
backprop.make_attr(pywrap_tensorflow.TF_ATTR_TYPE, 1))
def testMakeAttrTypeList(self):
self.assertEqual([dtypes.float32],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_TYPE], [1]))
def testMulType(self):
def mul(x):
return math_ops._mul_dispatch(x, x) # pylint: disable=protected-access
self.assertAllEqual(
backprop.gradients_function(mul)(3.0)[0].numpy(),
6.0)
def testMakeAttrShape(self):
for s in ([], None, [1, 2, 3], [None, None], [1, None, 3]):
expected = tensor_shape.TensorShape(s).as_proto()
actual = backprop.make_attr(pywrap_tensorflow.TF_ATTR_SHAPE, s)
self.assertEqual(
expected,
actual,
msg=('For shape %r, expected %r != %r actual' % (s, expected,
actual)))
def testMakeAttrShapeList(self):
shape_list = [[], None, [1, 2, 3], [None, None], [1, None, 3]]
self.assertEqual(
[tensor_shape.TensorShape(s).as_proto() for s in shape_list],
backprop.make_attr([pywrap_tensorflow.TF_ATTR_SHAPE], shape_list))
def testArgsGradientFunction(self):
def f(*args):
return args[0] * args[0]
grad = backprop.gradients_function(f)
self.assertAllEqual(grad(1.0)[0], 2.0)
def testPartial(self):
def f(x, y):
return x * y
part = functools.partial(f, constant_op.constant(2.0))
self.assertAllEqual(
backprop.gradients_function(part)(constant_op.constant(1.0))[0],
2.0)
def testReturnSameThing(self):
def f(x):
return x, 2 * x
self.assertAllEqual(backprop.gradients_function(f)(1.0)[0], 3.0)
@test_util.assert_no_new_tensors
def testExceptionSafety(self):
def f(unused_x):
raise ValueError()
try:
backprop.gradients_function(f)(1.0)
except ValueError:
pass
def real_f(x):
return x * x
self.assertAllEqual(backprop.gradients_function(real_f)(1.0)[0], 2.0)
@test_util.assert_no_new_tensors
def testMultiValueConvertToTensor(self):
x = resource_variable_ops.ResourceVariable(
initial_value=array_ops.constant([1.0]), name='x')
def fn():
tape.watch_variable(x)
a = math_ops.add(x.value(), 1.0)
# Make sure convert_to_tensor works correctly with list of TensorNodes.
b = array_ops.stack([a, a], axis=0)
return math_ops.reduce_mean(b)
grad = backprop.implicit_grad(fn)()[0][0]
self.assertAllEqual([1.0], grad)
def testOutput(self):
def multiout(x):
return x + 2, x * x
x = constant_op.constant([0.0, 1.0, 2.0])
grad = backprop.gradients_function(multiout)(x)[0]
self.assertAllEqual([1.0, 3.0, 5.0], grad)
def testMultiValuePreservesIfNotDiffedAgainst(self):
def tfe_conv2d(timage, tkernel, conv2dstrides):
return nn_ops.conv2d(timage, tkernel, conv2dstrides, 'SAME')
i = constant_op.constant([[[[1.0]]]])
k = constant_op.constant([[[[2.0]]]])
s = [1, 1, 1, 1]
grad = backprop.gradients_function(tfe_conv2d, params=(0,))(i, k, s)[0]
self.assertAllEqual([[[[2.0]]]], grad)
def testSameObjectForMultipleArguments(self):
def f(x, y):
return math_ops.multiply(x, y)
g = backprop.gradients_function(f)
def np_g(x, y):
dx, dy = g(x, y)
return [dx.numpy(), dy.numpy()]
x = constant_op.constant(1.)
self.assertAllEqual([1., 1.], np_g(x, x))
x = 1.
self.assertAllEqual([1., 1.], np_g(x, x))
x = constant_op.constant([[1.]])
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
x = [[1.]]
self.assertAllEqual([[[1.]], [[1.]]], np_g(x, x))
v = resource_variable_ops.ResourceVariable(
initial_value=1., name='testSameObjectForMultipleArguments.Variable')
self.assertAllEqual([1., 1.], np_g(v, v))
@test_util.assert_no_new_tensors
def testImplicitGradientsCustomGradientAndCachedVariableValue(self):
@custom_gradient.custom_gradient
def my_square(x):
result = math_ops.square(x)
def grad(dr):
return 2 * dr * x + 1
return result, grad
x = resource_variable_ops.ResourceVariable(
initial_value=3, name='X.' + self.id())
def f():
return my_square(x)
g = backprop.implicit_grad(f)
grads_and_vars = g()
self.assertEqual(1, len(grads_and_vars))
grad, var = grads_and_vars[0]
self.assertAllEqual(7, grad)
self.assertAllEqual(x, var)
@test_util.assert_no_new_tensors
def testCustomGradient(self):
@custom_gradient.custom_gradient
def my_mul(x, y):
result = x*y
def grad(dr):
return [dr*y, dr*x]
return result, grad
lr = 0.25
x = resource_variable_ops.ResourceVariable(2., name='x')
def loss(x):
return my_mul(2., x.read_value())
loss_grads_fn = backprop.implicit_val_and_grad(loss)
losses = []
for _ in range(5):
loss, grads_and_vars = loss_grads_fn(x)
losses.append(loss.numpy())
for (grad, var) in grads_and_vars:
var.assign_sub(lr*grad)
self.assertAllEqual(losses, [4.0, 3., 2., 1., 0.])
@test_util.assert_no_new_tensors
def testCustomGradientIdentity(self):
@custom_gradient.custom_gradient
def my_identity(x):
def grad(dresult):
return [2 * dresult]
return x, grad
self.assertAllEqual(backprop.gradients_function(my_identity)(1.0)[0], 2.0)
def testDifferentiatingFunctionThatReturnsNone(self):
def fn(x, y):
result = x*y # pylint: disable=unused-variable
x = constant_op.constant(1)
y = constant_op.constant(2)
loss_grads_fn = backprop.implicit_val_and_grad(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
loss_grads_fn(x, y)
val_and_grads_fn = backprop.val_and_grad_function(fn)
with self.assertRaisesRegexp(
ValueError, 'Cannot differentiate a function that returns None; '
'did you forget to return a value from fn?'):
val_and_grads_fn(x, y)
if __name__ == '__main__':
test.main()
| Xeralux/tensorflow | tensorflow/python/eager/backprop_test.py | Python | apache-2.0 | 22,305 |
import queue
from threading import Thread
import requests
from .analyzer import Analyzer, init_start_url
import threading
from collections import namedtuple
from .structurers import ThreadSafeSet, FilterableList
__author__ = 'zz'
_sentinel = object()
Task = namedtuple('Task', ['url', 'response_gt', 'max_page'])
def singleton(cls):
_instance = None
def wrapper(*args, **kwargs):
nonlocal _instance
if not _instance:
_instance = cls(*args, **kwargs)
return _instance
return wrapper
@singleton
class Engine:
def __init__(self, tasks=None, max_thread=8):
# tasks should be a list of dict contain 'url', 'response_gt','max_page'
if tasks:
self._init_tasks(tasks)
self.max_thread = max_thread
self._init()
def _init(self):
self._task_queue = queue.Queue()
self._result_cache_queue = queue.Queue()
self._busying = ThreadSafeSet()
self._results = FilterableList()
self._thread_tasks = []
self._shutdown = False
self._shutdown_lock = threading.Lock()
self._result_lock = threading.Lock()
self._queue_timeout = 1
self._pre_work_running = False
@property
def is_running(self):
return any(t.is_alive() for t in self._thread_tasks)
@property
def is_busy(self):
if self._busying:
return True
else:
return False
@property
def results(self):
"""
:return: collect results which already pop in get_one_result method.
when engine stop, it will return the whole results
"""
if not self.is_running:
while self.get_one_result():
pass
return self._results
def _init_tasks(self, tasks):
self.init_tasks = []
simple = tasks[0]
if isinstance(simple, Task):
for task in tasks:
self.init_tasks.append(task._asdict())
else:
for task in tasks:
self.init_tasks.append(task)
for task in self.init_tasks:
task['url'] = init_start_url(task['url'])
def set_init_tasks(self, tasks):
self._reset()
self._init_tasks(tasks)
def _reset(self):
"""
reset the engine, re init the engine
"""
if self.is_running:
self.shutdown()
self._init()
def start(self):
self._pre_work_running = True
t = threading.Thread(target=self._generate_tasks)
t.start()
self._thread_tasks.append(t)
for i in range(self.max_thread):
t = Thread(target=self.worker)
t.daemon = True
t.start()
self._thread_tasks.append(t)
def _generate_tasks(self):
for task in self.init_tasks:
url = task['url']
response_gt = task['response_gt']
max_page = task['max_page']
# add the init task immediately to avoid main worker stop
self.add_task(url, response_gt, max_page)
r = self._fetch(url)
a = Analyzer(r, max_page)
while True:
if self._shutdown:
return
url = a.next_page(current_page_url=url)
if not url:
break
self.add_task(url, response_gt, max_page)
self._pre_work_running = False
def worker(self):
try:
while True:
# data is Task object
data = self._retrieve_task()
if data is _sentinel:
self._task_queue.put(data)
return
else:
url, response_gt, max_page = data
# shutdown immediately
if self._shutdown:
break
self._busying.add(url)
r = self._fetch(url)
a = Analyzer(r, max_page)
self._add_result(a.filter_divs(response_gt=response_gt))
self._busying.remove(url)
except BaseException as e:
# TODO: log error
print(type(e), e)
raise e
def shutdown(self, wait=True):
with self._shutdown_lock:
self._shutdown = True
self._task_queue.put(_sentinel)
if wait:
for t in self._thread_tasks:
t.join()
def _retrieve_task(self):
while True:
try:
t = self._task_queue.get(timeout=self._queue_timeout)
except queue.Empty:
self._detect_finish()
else:
return t
def _detect_finish(self):
if self._task_queue.empty() and not self.is_busy and not self._pre_work_running:
self._task_queue.put(_sentinel)
def _fetch(self, url):
r = requests.get(url)
return r
def _add_result(self, results):
for result in results:
self._result_cache_queue.put(result)
def add_task(self, url, response_gt, max_page):
if not url:
return
t = Task(url, response_gt, max_page)
self._task_queue.put(t)
def get_one_result(self):
"""
:return: result or None which does not mean the engine is stop!
"""
try:
result = self._result_cache_queue.get_nowait()
except queue.Empty:
result = None
if result:
with self._result_lock:
self._results.append(result)
return result
engine = Engine()
| littlezz/IslandCollection | core/engine.py | Python | mit | 5,666 |
# (c) 2014, Toshio Kuratomi <[email protected]>
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
'''
Compat module for Python2.7's unittest module
'''
import sys
# Python 2.6
if sys.version_info < (2, 7):
try:
# Need unittest2 on python2.6
from unittest2 import *
except ImportError:
print('You need unittest2 installed on python2.6.x to run tests')
else:
from unittest import *
| ravello/ansible | v2/ansible/compat/tests/unittest.py | Python | gpl-3.0 | 1,147 |
def f():
raise AssertionError('Silverlight test for throwing exception.')
| tempbottle/dlr | Src/Hosts/Silverlight/Tests/tests/regressions/fixtures/module_throw.py | Python | apache-2.0 | 79 |
#
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
"""Core PTransform subclasses, such as FlatMap, GroupByKey, and Map."""
from __future__ import absolute_import
import copy
import logging
import random
import re
import types
from builtins import map
from builtins import object
from builtins import range
from future.builtins import filter
from past.builtins import unicode
from apache_beam import coders
from apache_beam import pvalue
from apache_beam import typehints
from apache_beam.coders import typecoders
from apache_beam.internal import pickler
from apache_beam.internal import util
from apache_beam.options.pipeline_options import DebugOptions
from apache_beam.options.pipeline_options import TypeOptions
from apache_beam.portability import common_urns
from apache_beam.portability import python_urns
from apache_beam.portability.api import beam_runner_api_pb2
from apache_beam.transforms import ptransform
from apache_beam.transforms import userstate
from apache_beam.transforms.display import DisplayDataItem
from apache_beam.transforms.display import HasDisplayData
from apache_beam.transforms.ptransform import PTransform
from apache_beam.transforms.ptransform import PTransformWithSideInputs
from apache_beam.transforms.userstate import StateSpec
from apache_beam.transforms.userstate import TimerSpec
from apache_beam.transforms.window import GlobalWindows
from apache_beam.transforms.window import TimestampCombiner
from apache_beam.transforms.window import TimestampedValue
from apache_beam.transforms.window import WindowedValue
from apache_beam.transforms.window import WindowFn
from apache_beam.typehints import KV
from apache_beam.typehints import Any
from apache_beam.typehints import Iterable
from apache_beam.typehints import Union
from apache_beam.typehints import trivial_inference
from apache_beam.typehints.decorators import TypeCheckError
from apache_beam.typehints.decorators import WithTypeHints
from apache_beam.typehints.decorators import get_type_hints
from apache_beam.typehints.decorators import getfullargspec
from apache_beam.typehints.trivial_inference import element_type
from apache_beam.typehints.typehints import is_consistent_with
from apache_beam.utils import urns
__all__ = [
'DoFn',
'CombineFn',
'PartitionFn',
'ParDo',
'FlatMap',
'Map',
'Filter',
'CombineGlobally',
'CombinePerKey',
'CombineValues',
'GroupByKey',
'Partition',
'Windowing',
'WindowInto',
'Flatten',
'Create',
'Impulse',
]
# Type variables
T = typehints.TypeVariable('T')
K = typehints.TypeVariable('K')
V = typehints.TypeVariable('V')
class DoFnContext(object):
"""A context available to all methods of DoFn instance."""
pass
class DoFnProcessContext(DoFnContext):
"""A processing context passed to DoFn process() during execution.
Experimental; no backwards-compatibility guarantees.
Most importantly, a DoFn.process method will access context.element
to get the element it is supposed to process.
Attributes:
label: label of the ParDo whose element is being processed.
element: element being processed
(in process method only; always None in start_bundle and finish_bundle)
timestamp: timestamp of the element
(in process method only; always None in start_bundle and finish_bundle)
windows: windows of the element
(in process method only; always None in start_bundle and finish_bundle)
state: a DoFnState object, which holds the runner's internal state
for this element.
Not used by the pipeline code.
"""
def __init__(self, label, element=None, state=None):
"""Initialize a processing context object with an element and state.
The element represents one value from a PCollection that will be accessed
by a DoFn object during pipeline execution, and state is an arbitrary object
where counters and other pipeline state information can be passed in.
DoFnProcessContext objects are also used as inputs to PartitionFn instances.
Args:
label: label of the PCollection whose element is being processed.
element: element of a PCollection being processed using this context.
state: a DoFnState object with state to be passed in to the DoFn object.
"""
self.label = label
self.state = state
if element is not None:
self.set_element(element)
def set_element(self, windowed_value):
if windowed_value is None:
# Not currently processing an element.
if hasattr(self, 'element'):
del self.element
del self.timestamp
del self.windows
else:
self.element = windowed_value.value
self.timestamp = windowed_value.timestamp
self.windows = windowed_value.windows
class ProcessContinuation(object):
"""An object that may be produced as the last element of a process method
invocation.
Experimental; no backwards-compatibility guarantees.
If produced, indicates that there is more work to be done for the current
input element.
"""
def __init__(self, resume_delay=0):
"""Initializes a ProcessContinuation object.
Args:
resume_delay: indicates the minimum time, in seconds, that should elapse
before re-invoking process() method for resuming the invocation of the
current element.
"""
self.resume_delay = resume_delay
@staticmethod
def resume(resume_delay=0):
"""A convenient method that produces a ``ProcessContinuation``.
Args:
resume_delay: delay after which processing current element should be
resumed.
Returns: a ``ProcessContinuation`` for signalling the runner that current
input element has not been fully processed and should be resumed later.
"""
return ProcessContinuation(resume_delay=resume_delay)
class RestrictionProvider(object):
"""Provides methods for generating and manipulating restrictions.
This class should be implemented to support Splittable ``DoFn``s in Python
SDK. See https://s.apache.org/splittable-do-fn for more details about
Splittable ``DoFn``s.
To denote a ``DoFn`` class to be Splittable ``DoFn``, ``DoFn.process()``
method of that class should have exactly one parameter whose default value is
an instance of ``RestrictionProvider``.
The provided ``RestrictionProvider`` instance must provide suitable overrides
for the following methods.
* create_tracker()
* initial_restriction()
Optionally, ``RestrictionProvider`` may override default implementations of
following methods.
* restriction_coder()
* split()
** Pausing and resuming processing of an element **
As the last element produced by the iterator returned by the
``DoFn.process()`` method, a Splittable ``DoFn`` may return an object of type
``ProcessContinuation``.
If provided, ``ProcessContinuation`` object specifies that runner should
later re-invoke ``DoFn.process()`` method to resume processing the current
element and the manner in which the re-invocation should be performed. A
``ProcessContinuation`` object must only be specified as the last element of
the iterator. If a ``ProcessContinuation`` object is not provided the runner
will assume that the current input element has been fully processed.
** Updating output watermark **
``DoFn.process()`` method of Splittable ``DoFn``s could contain a parameter
with default value ``DoFn.WatermarkReporterParam``. If specified this asks the
runner to provide a function that can be used to give the runner a
(best-effort) lower bound about the timestamps of future output associated
with the current element processed by the ``DoFn``. If the ``DoFn`` has
multiple outputs, the watermark applies to all of them. Provided function must
be invoked with a single parameter of type ``Timestamp`` or as an integer that
gives the watermark in number of seconds.
"""
def create_tracker(self, restriction):
"""Produces a new ``RestrictionTracker`` for the given restriction.
Args:
restriction: an object that defines a restriction as identified by a
Splittable ``DoFn`` that utilizes the current ``RestrictionProvider``.
For example, a tuple that gives a range of positions for a Splittable
``DoFn`` that reads files based on byte positions.
Returns: an object of type ``RestrictionTracker``.
"""
raise NotImplementedError
def initial_restriction(self, element):
"""Produces an initial restriction for the given element."""
raise NotImplementedError
def split(self, element, restriction):
"""Splits the given element and restriction.
Returns an iterator of restrictions. The total set of elements produced by
reading input element for each of the returned restrictions should be the
same as the total set of elements produced by reading the input element for
the input restriction.
TODO(chamikara): give suitable hints for performing splitting, for example
number of parts or size in bytes.
"""
yield restriction
def restriction_coder(self):
"""Returns a ``Coder`` for restrictions.
Returned``Coder`` will be used for the restrictions produced by the current
``RestrictionProvider``.
Returns:
an object of type ``Coder``.
"""
return coders.registry.get_coder(object)
def get_function_arguments(obj, func):
"""Return the function arguments based on the name provided. If they have
a _inspect_function attached to the class then use that otherwise default
to the modified version of python inspect library.
"""
func_name = '_inspect_%s' % func
if hasattr(obj, func_name):
f = getattr(obj, func_name)
return f()
f = getattr(obj, func)
return getfullargspec(f)
class _DoFnParam(object):
"""DoFn parameter."""
def __init__(self, param_id):
self.param_id = param_id
def __eq__(self, other):
if type(self) == type(other):
return self.param_id == other.param_id
return False
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash(self.param_id)
def __repr__(self):
return self.param_id
class _StateDoFnParam(_DoFnParam):
"""State DoFn parameter."""
def __init__(self, state_spec):
if not isinstance(state_spec, StateSpec):
raise ValueError("DoFn.StateParam expected StateSpec object.")
self.state_spec = state_spec
self.param_id = 'StateParam(%s)' % state_spec.name
class _TimerDoFnParam(_DoFnParam):
"""Timer DoFn parameter."""
def __init__(self, timer_spec):
if not isinstance(timer_spec, TimerSpec):
raise ValueError("DoFn.TimerParam expected TimerSpec object.")
self.timer_spec = timer_spec
self.param_id = 'TimerParam(%s)' % timer_spec.name
class DoFn(WithTypeHints, HasDisplayData, urns.RunnerApiFn):
"""A function object used by a transform with custom processing.
The ParDo transform is such a transform. The ParDo.apply
method will take an object of type DoFn and apply it to all elements of a
PCollection object.
In order to have concrete DoFn objects one has to subclass from DoFn and
define the desired behavior (start_bundle/finish_bundle and process) or wrap a
callable object using the CallableWrapperDoFn class.
"""
# Parameters that can be used in the .process() method.
ElementParam = _DoFnParam('ElementParam')
SideInputParam = _DoFnParam('SideInputParam')
TimestampParam = _DoFnParam('TimestampParam')
WindowParam = _DoFnParam('WindowParam')
WatermarkReporterParam = _DoFnParam('WatermarkReporterParam')
DoFnProcessParams = [ElementParam, SideInputParam, TimestampParam,
WindowParam, WatermarkReporterParam]
# Parameters to access state and timers. Not restricted to use only in the
# .process() method. Usage: DoFn.StateParam(state_spec),
# DoFn.TimerParam(timer_spec).
StateParam = _StateDoFnParam
TimerParam = _TimerDoFnParam
@staticmethod
def from_callable(fn):
return CallableWrapperDoFn(fn)
def default_label(self):
return self.__class__.__name__
def process(self, element, *args, **kwargs):
"""Method to use for processing elements.
This is invoked by ``DoFnRunner`` for each element of a input
``PCollection``.
If specified, following default arguments are used by the ``DoFnRunner`` to
be able to pass the parameters correctly.
``DoFn.ElementParam``: element to be processed.
``DoFn.SideInputParam``: a side input that may be used when processing.
``DoFn.TimestampParam``: timestamp of the input element.
``DoFn.WindowParam``: ``Window`` the input element belongs to.
A ``RestrictionProvider`` instance: an ``iobase.RestrictionTracker`` will be
provided here to allow treatment as a Splittable `DoFn``.
``DoFn.WatermarkReporterParam``: a function that can be used to report
output watermark of Splittable ``DoFn`` implementations.
Args:
element: The element to be processed
*args: side inputs
**kwargs: other keyword arguments.
"""
raise NotImplementedError
def start_bundle(self):
"""Called before a bundle of elements is processed on a worker.
Elements to be processed are split into bundles and distributed
to workers. Before a worker calls process() on the first element
of its bundle, it calls this method.
"""
pass
def finish_bundle(self):
"""Called after a bundle of elements is processed on a worker.
"""
pass
def get_function_arguments(self, func):
return get_function_arguments(self, func)
# TODO(sourabhbajaj): Do we want to remove the responsibility of these from
# the DoFn or maybe the runner
def infer_output_type(self, input_type):
# TODO(robertwb): Side inputs types.
# TODO(robertwb): Assert compatibility with input type hint?
return self._strip_output_annotations(
trivial_inference.infer_return_type(self.process, [input_type]))
def _strip_output_annotations(self, type_hint):
annotations = (TimestampedValue, WindowedValue, pvalue.TaggedOutput)
# TODO(robertwb): These should be parameterized types that the
# type inferencer understands.
if (type_hint in annotations
or trivial_inference.element_type(type_hint) in annotations):
return Any
return type_hint
def _process_argspec_fn(self):
"""Returns the Python callable that will eventually be invoked.
This should ideally be the user-level function that is called with
the main and (if any) side inputs, and is used to relate the type
hint parameters with the input parameters (e.g., by argument name).
"""
return self.process
def is_process_bounded(self):
"""Checks if an object is a bound method on an instance."""
if not isinstance(self.process, types.MethodType):
return False # Not a method
if self.process.__self__ is None:
return False # Method is not bound
if issubclass(self.process.__self__.__class__, type) or \
self.process.__self__.__class__ is type:
return False # Method is a classmethod
return True
urns.RunnerApiFn.register_pickle_urn(python_urns.PICKLED_DOFN)
def _fn_takes_side_inputs(fn):
try:
argspec = getfullargspec(fn)
except TypeError:
# We can't tell; maybe it does.
return True
is_bound = isinstance(fn, types.MethodType) and fn.__self__ is not None
try:
varkw = argspec.varkw
kwonlyargs = argspec.kwonlyargs
except AttributeError: # Python 2
varkw = argspec.keywords
kwonlyargs = []
return (len(argspec.args) + len(kwonlyargs) > 1 + is_bound or
argspec.varargs or varkw)
class CallableWrapperDoFn(DoFn):
"""For internal use only; no backwards-compatibility guarantees.
A DoFn (function) object wrapping a callable object.
The purpose of this class is to conveniently wrap simple functions and use
them in transforms.
"""
def __init__(self, fn):
"""Initializes a CallableWrapperDoFn object wrapping a callable.
Args:
fn: A callable object.
Raises:
TypeError: if fn parameter is not a callable type.
"""
if not callable(fn):
raise TypeError('Expected a callable object instead of: %r' % fn)
self._fn = fn
if isinstance(fn, (
types.BuiltinFunctionType, types.MethodType, types.FunctionType)):
self.process = fn
else:
# For cases such as set / list where fn is callable but not a function
self.process = lambda element: fn(element)
super(CallableWrapperDoFn, self).__init__()
def display_data(self):
# If the callable has a name, then it's likely a function, and
# we show its name.
# Otherwise, it might be an instance of a callable class. We
# show its class.
display_data_value = (self._fn.__name__ if hasattr(self._fn, '__name__')
else self._fn.__class__)
return {'fn': DisplayDataItem(display_data_value,
label='Transform Function')}
def __repr__(self):
return 'CallableWrapperDoFn(%s)' % self._fn
def default_type_hints(self):
type_hints = get_type_hints(self._fn)
# If the fn was a DoFn annotated with a type-hint that hinted a return
# type compatible with Iterable[Any], then we strip off the outer
# container type due to the 'flatten' portion of FlatMap.
# TODO(robertwb): Should we require an iterable specification for FlatMap?
if type_hints.output_types:
args, kwargs = type_hints.output_types
if len(args) == 1 and is_consistent_with(args[0], Iterable[Any]):
type_hints = type_hints.copy()
type_hints.set_output_types(element_type(args[0]), **kwargs)
return type_hints
def infer_output_type(self, input_type):
return self._strip_output_annotations(
trivial_inference.infer_return_type(self._fn, [input_type]))
def _process_argspec_fn(self):
return getattr(self._fn, '_argspec_fn', self._fn)
def _inspect_process(self):
return getfullargspec(self._process_argspec_fn())
class CombineFn(WithTypeHints, HasDisplayData, urns.RunnerApiFn):
"""A function object used by a Combine transform with custom processing.
A CombineFn specifies how multiple values in all or part of a PCollection can
be merged into a single value---essentially providing the same kind of
information as the arguments to the Python "reduce" builtin (except for the
input argument, which is an instance of CombineFnProcessContext). The
combining process proceeds as follows:
1. Input values are partitioned into one or more batches.
2. For each batch, the create_accumulator method is invoked to create a fresh
initial "accumulator" value representing the combination of zero values.
3. For each input value in the batch, the add_input method is invoked to
combine more values with the accumulator for that batch.
4. The merge_accumulators method is invoked to combine accumulators from
separate batches into a single combined output accumulator value, once all
of the accumulators have had all the input value in their batches added to
them. This operation is invoked repeatedly, until there is only one
accumulator value left.
5. The extract_output operation is invoked on the final accumulator to get
the output value.
Note: If this **CombineFn** is used with a transform that has defaults,
**apply** will be called with an empty list at expansion time to get the
default value.
"""
def default_label(self):
return self.__class__.__name__
def create_accumulator(self, *args, **kwargs):
"""Return a fresh, empty accumulator for the combine operation.
Args:
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
raise NotImplementedError(str(self))
def add_input(self, accumulator, element, *args, **kwargs):
"""Return result of folding element into accumulator.
CombineFn implementors must override add_input.
Args:
accumulator: the current accumulator
element: the element to add
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
raise NotImplementedError(str(self))
def add_inputs(self, accumulator, elements, *args, **kwargs):
"""Returns the result of folding each element in elements into accumulator.
This is provided in case the implementation affords more efficient
bulk addition of elements. The default implementation simply loops
over the inputs invoking add_input for each one.
Args:
accumulator: the current accumulator
elements: the elements to add
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
for element in elements:
accumulator = self.add_input(accumulator, element, *args, **kwargs)
return accumulator
def merge_accumulators(self, accumulators, *args, **kwargs):
"""Returns the result of merging several accumulators
to a single accumulator value.
Args:
accumulators: the accumulators to merge
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
raise NotImplementedError(str(self))
def extract_output(self, accumulator, *args, **kwargs):
"""Return result of converting accumulator into the output value.
Args:
accumulator: the final accumulator value computed by this CombineFn
for the entire input key or PCollection.
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
raise NotImplementedError(str(self))
def apply(self, elements, *args, **kwargs):
"""Returns result of applying this CombineFn to the input values.
Args:
elements: the set of values to combine.
*args: Additional arguments and side inputs.
**kwargs: Additional arguments and side inputs.
"""
return self.extract_output(
self.add_inputs(
self.create_accumulator(*args, **kwargs), elements,
*args, **kwargs),
*args, **kwargs)
def for_input_type(self, input_type):
"""Returns a specialized implementation of self, if it exists.
Otherwise, returns self.
Args:
input_type: the type of input elements.
"""
return self
@staticmethod
def from_callable(fn):
return CallableWrapperCombineFn(fn)
@staticmethod
def maybe_from_callable(fn):
if isinstance(fn, CombineFn):
return fn
elif callable(fn):
return CallableWrapperCombineFn(fn)
else:
raise TypeError('Expected a CombineFn or callable, got %r' % fn)
def get_accumulator_coder(self):
return coders.registry.get_coder(object)
urns.RunnerApiFn.register_pickle_urn(python_urns.PICKLED_COMBINE_FN)
class CallableWrapperCombineFn(CombineFn):
"""For internal use only; no backwards-compatibility guarantees.
A CombineFn (function) object wrapping a callable object.
The purpose of this class is to conveniently wrap simple functions and use
them in Combine transforms.
"""
_EMPTY = object()
def __init__(self, fn):
"""Initializes a CallableFn object wrapping a callable.
Args:
fn: A callable object that reduces elements of an iterable to a single
value (like the builtins sum and max). This callable must be capable of
receiving the kind of values it generates as output in its input, and
for best results, its operation must be commutative and associative.
Raises:
TypeError: if fn parameter is not a callable type.
"""
if not callable(fn):
raise TypeError('Expected a callable object instead of: %r' % fn)
super(CallableWrapperCombineFn, self).__init__()
self._fn = fn
def display_data(self):
return {'fn_dd': self._fn}
def __repr__(self):
return "CallableWrapperCombineFn(%s)" % self._fn
def create_accumulator(self, *args, **kwargs):
return self._EMPTY
def add_input(self, accumulator, element, *args, **kwargs):
if accumulator is self._EMPTY:
return element
return self._fn([accumulator, element], *args, **kwargs)
def add_inputs(self, accumulator, elements, *args, **kwargs):
if accumulator is self._EMPTY:
return self._fn(elements, *args, **kwargs)
elif isinstance(elements, (list, tuple)):
return self._fn([accumulator] + list(elements), *args, **kwargs)
def union():
yield accumulator
for e in elements:
yield e
return self._fn(union(), *args, **kwargs)
def merge_accumulators(self, accumulators, *args, **kwargs):
filter_fn = lambda x: x is not self._EMPTY
class ReiterableNonEmptyAccumulators(object):
def __iter__(self):
return filter(filter_fn, accumulators)
# It's (weakly) assumed that self._fn is associative.
return self._fn(ReiterableNonEmptyAccumulators(), *args, **kwargs)
def extract_output(self, accumulator, *args, **kwargs):
return self._fn(()) if accumulator is self._EMPTY else accumulator
def default_type_hints(self):
fn_hints = get_type_hints(self._fn)
if fn_hints.input_types is None:
return fn_hints
else:
# fn(Iterable[V]) -> V becomes CombineFn(V) -> V
input_args, input_kwargs = fn_hints.input_types
if not input_args:
if len(input_kwargs) == 1:
input_args, input_kwargs = tuple(input_kwargs.values()), {}
else:
raise TypeError('Combiner input type must be specified positionally.')
if not is_consistent_with(input_args[0], Iterable[Any]):
raise TypeCheckError(
'All functions for a Combine PTransform must accept a '
'single argument compatible with: Iterable[Any]. '
'Instead a function with input type: %s was received.'
% input_args[0])
input_args = (element_type(input_args[0]),) + input_args[1:]
# TODO(robertwb): Assert output type is consistent with input type?
hints = fn_hints.copy()
hints.set_input_types(*input_args, **input_kwargs)
return hints
def for_input_type(self, input_type):
# Avoid circular imports.
from apache_beam.transforms import cy_combiners
if self._fn is any:
return cy_combiners.AnyCombineFn()
elif self._fn is all:
return cy_combiners.AllCombineFn()
else:
known_types = {
(sum, int): cy_combiners.SumInt64Fn(),
(min, int): cy_combiners.MinInt64Fn(),
(max, int): cy_combiners.MaxInt64Fn(),
(sum, float): cy_combiners.SumFloatFn(),
(min, float): cy_combiners.MinFloatFn(),
(max, float): cy_combiners.MaxFloatFn(),
}
return known_types.get((self._fn, input_type), self)
class PartitionFn(WithTypeHints):
"""A function object used by a Partition transform.
A PartitionFn specifies how individual values in a PCollection will be placed
into separate partitions, indexed by an integer.
"""
def default_label(self):
return self.__class__.__name__
def partition_for(self, element, num_partitions, *args, **kwargs):
"""Specify which partition will receive this element.
Args:
element: An element of the input PCollection.
num_partitions: Number of partitions, i.e., output PCollections.
*args: optional parameters and side inputs.
**kwargs: optional parameters and side inputs.
Returns:
An integer in [0, num_partitions).
"""
pass
class CallableWrapperPartitionFn(PartitionFn):
"""For internal use only; no backwards-compatibility guarantees.
A PartitionFn object wrapping a callable object.
Instances of this class wrap simple functions for use in Partition operations.
"""
def __init__(self, fn):
"""Initializes a PartitionFn object wrapping a callable.
Args:
fn: A callable object, which should accept the following arguments:
element - element to assign to a partition.
num_partitions - number of output partitions.
and may accept additional arguments and side inputs.
Raises:
TypeError: if fn is not a callable type.
"""
if not callable(fn):
raise TypeError('Expected a callable object instead of: %r' % fn)
self._fn = fn
def partition_for(self, element, num_partitions, *args, **kwargs):
return self._fn(element, num_partitions, *args, **kwargs)
class ParDo(PTransformWithSideInputs):
"""A :class:`ParDo` transform.
Processes an input :class:`~apache_beam.pvalue.PCollection` by applying a
:class:`DoFn` to each element and returning the accumulated results into an
output :class:`~apache_beam.pvalue.PCollection`. The type of the elements is
not fixed as long as the :class:`DoFn` can deal with it. In reality the type
is restrained to some extent because the elements sometimes must be persisted
to external storage. See the :meth:`.expand()` method comments for a
detailed description of all possible arguments.
Note that the :class:`DoFn` must return an iterable for each element of the
input :class:`~apache_beam.pvalue.PCollection`. An easy way to do this is to
use the ``yield`` keyword in the process method.
Args:
pcoll (~apache_beam.pvalue.PCollection):
a :class:`~apache_beam.pvalue.PCollection` to be processed.
fn (DoFn): a :class:`DoFn` object to be applied to each element
of **pcoll** argument.
*args: positional arguments passed to the :class:`DoFn` object.
**kwargs: keyword arguments passed to the :class:`DoFn` object.
Note that the positional and keyword arguments will be processed in order
to detect :class:`~apache_beam.pvalue.PCollection` s that will be computed as
side inputs to the transform. During pipeline execution whenever the
:class:`DoFn` object gets executed (its :meth:`DoFn.process()` method gets
called) the :class:`~apache_beam.pvalue.PCollection` arguments will be
replaced by values from the :class:`~apache_beam.pvalue.PCollection` in the
exact positions where they appear in the argument lists.
"""
def __init__(self, fn, *args, **kwargs):
super(ParDo, self).__init__(fn, *args, **kwargs)
# TODO(robertwb): Change all uses of the dofn attribute to use fn instead.
self.dofn = self.fn
self.output_tags = set()
if not isinstance(self.fn, DoFn):
raise TypeError('ParDo must be called with a DoFn instance.')
# Validate the DoFn by creating a DoFnSignature
from apache_beam.runners.common import DoFnSignature
self._signature = DoFnSignature(self.fn)
def default_type_hints(self):
return self.fn.get_type_hints()
def infer_output_type(self, input_type):
return trivial_inference.element_type(
self.fn.infer_output_type(input_type))
def make_fn(self, fn):
if isinstance(fn, DoFn):
return fn
return CallableWrapperDoFn(fn)
def _process_argspec_fn(self):
return self.fn._process_argspec_fn()
def display_data(self):
return {'fn': DisplayDataItem(self.fn.__class__,
label='Transform Function'),
'fn_dd': self.fn}
def expand(self, pcoll):
# In the case of a stateful DoFn, warn if the key coder is not
# deterministic.
if self._signature.is_stateful_dofn():
kv_type_hint = pcoll.element_type
if kv_type_hint and kv_type_hint != typehints.Any:
coder = coders.registry.get_coder(kv_type_hint)
if not coder.is_kv_coder():
raise ValueError(
'Input elements to the transform %s with stateful DoFn must be '
'key-value pairs.' % self)
key_coder = coder.key_coder()
else:
key_coder = coders.registry.get_coder(typehints.Any)
if not key_coder.is_deterministic():
logging.warning(
'Key coder %s for transform %s with stateful DoFn may not '
'be deterministic. This may cause incorrect behavior for complex '
'key types. Consider adding an input type hint for this transform.',
key_coder, self)
return pvalue.PCollection(pcoll.pipeline)
def with_outputs(self, *tags, **main_kw):
"""Returns a tagged tuple allowing access to the outputs of a
:class:`ParDo`.
The resulting object supports access to the
:class:`~apache_beam.pvalue.PCollection` associated with a tag
(e.g. ``o.tag``, ``o[tag]``) and iterating over the available tags
(e.g. ``for tag in o: ...``).
Args:
*tags: if non-empty, list of valid tags. If a list of valid tags is given,
it will be an error to use an undeclared tag later in the pipeline.
**main_kw: dictionary empty or with one key ``'main'`` defining the tag to
be used for the main output (which will not have a tag associated with
it).
Returns:
~apache_beam.pvalue.DoOutputsTuple: An object of type
:class:`~apache_beam.pvalue.DoOutputsTuple` that bundles together all
the outputs of a :class:`ParDo` transform and allows accessing the
individual :class:`~apache_beam.pvalue.PCollection` s for each output
using an ``object.tag`` syntax.
Raises:
~exceptions.TypeError: if the **self** object is not a
:class:`~apache_beam.pvalue.PCollection` that is the result of a
:class:`ParDo` transform.
~exceptions.ValueError: if **main_kw** contains any key other than
``'main'``.
"""
main_tag = main_kw.pop('main', None)
if main_kw:
raise ValueError('Unexpected keyword arguments: %s' %
list(main_kw))
return _MultiParDo(self, tags, main_tag)
def _pardo_fn_data(self):
si_tags_and_types = None
windowing = None
return self.fn, self.args, self.kwargs, si_tags_and_types, windowing
def to_runner_api_parameter(self, context):
assert isinstance(self, ParDo), \
"expected instance of ParDo, but got %s" % self.__class__
picked_pardo_fn_data = pickler.dumps(self._pardo_fn_data())
state_specs, timer_specs = userstate.get_dofn_specs(self.fn)
return (
common_urns.primitives.PAR_DO.urn,
beam_runner_api_pb2.ParDoPayload(
do_fn=beam_runner_api_pb2.SdkFunctionSpec(
environment_id=context.default_environment_id(),
spec=beam_runner_api_pb2.FunctionSpec(
urn=python_urns.PICKLED_DOFN_INFO,
payload=picked_pardo_fn_data)),
state_specs={spec.name: spec.to_runner_api(context)
for spec in state_specs},
timer_specs={spec.name: spec.to_runner_api(context)
for spec in timer_specs},
# It'd be nice to name these according to their actual
# names/positions in the orignal argument list, but such a
# transformation is currently irreversible given how
# remove_objects_from_args and insert_values_in_args
# are currently implemented.
side_inputs={
"side%s" % ix: si.to_runner_api(context)
for ix, si in enumerate(self.side_inputs)}))
@PTransform.register_urn(
common_urns.primitives.PAR_DO.urn, beam_runner_api_pb2.ParDoPayload)
def from_runner_api_parameter(pardo_payload, context):
assert pardo_payload.do_fn.spec.urn == python_urns.PICKLED_DOFN_INFO
fn, args, kwargs, si_tags_and_types, windowing = pickler.loads(
pardo_payload.do_fn.spec.payload)
if si_tags_and_types:
raise NotImplementedError('explicit side input data')
elif windowing:
raise NotImplementedError('explicit windowing')
result = ParDo(fn, *args, **kwargs)
# This is an ordered list stored as a dict (see the comments in
# to_runner_api_parameter above).
indexed_side_inputs = [
(int(re.match('side([0-9]+)(-.*)?$', tag).group(1)),
pvalue.AsSideInput.from_runner_api(si, context))
for tag, si in pardo_payload.side_inputs.items()]
result.side_inputs = [si for _, si in sorted(indexed_side_inputs)]
return result
def runner_api_requires_keyed_input(self):
return userstate.is_stateful_dofn(self.fn)
class _MultiParDo(PTransform):
def __init__(self, do_transform, tags, main_tag):
super(_MultiParDo, self).__init__(do_transform.label)
self._do_transform = do_transform
self._tags = tags
self._main_tag = main_tag
def expand(self, pcoll):
_ = pcoll | self._do_transform
return pvalue.DoOutputsTuple(
pcoll.pipeline, self._do_transform, self._tags, self._main_tag)
def FlatMap(fn, *args, **kwargs): # pylint: disable=invalid-name
""":func:`FlatMap` is like :class:`ParDo` except it takes a callable to
specify the transformation.
The callable must return an iterable for each element of the input
:class:`~apache_beam.pvalue.PCollection`. The elements of these iterables will
be flattened into the output :class:`~apache_beam.pvalue.PCollection`.
Args:
fn (callable): a callable object.
*args: positional arguments passed to the transform callable.
**kwargs: keyword arguments passed to the transform callable.
Returns:
~apache_beam.pvalue.PCollection:
A :class:`~apache_beam.pvalue.PCollection` containing the
:func:`FlatMap` outputs.
Raises:
~exceptions.TypeError: If the **fn** passed as argument is not a callable.
Typical error is to pass a :class:`DoFn` instance which is supported only
for :class:`ParDo`.
"""
label = 'FlatMap(%s)' % ptransform.label_from_callable(fn)
if not callable(fn):
raise TypeError(
'FlatMap can be used only with callable objects. '
'Received %r instead.' % (fn))
pardo = ParDo(CallableWrapperDoFn(fn), *args, **kwargs)
pardo.label = label
return pardo
def Map(fn, *args, **kwargs): # pylint: disable=invalid-name
""":func:`Map` is like :func:`FlatMap` except its callable returns only a
single element.
Args:
fn (callable): a callable object.
*args: positional arguments passed to the transform callable.
**kwargs: keyword arguments passed to the transform callable.
Returns:
~apache_beam.pvalue.PCollection:
A :class:`~apache_beam.pvalue.PCollection` containing the
:func:`Map` outputs.
Raises:
~exceptions.TypeError: If the **fn** passed as argument is not a callable.
Typical error is to pass a :class:`DoFn` instance which is supported only
for :class:`ParDo`.
"""
if not callable(fn):
raise TypeError(
'Map can be used only with callable objects. '
'Received %r instead.' % (fn))
if _fn_takes_side_inputs(fn):
wrapper = lambda x, *args, **kwargs: [fn(x, *args, **kwargs)]
else:
wrapper = lambda x: [fn(x)]
label = 'Map(%s)' % ptransform.label_from_callable(fn)
# TODO. What about callable classes?
if hasattr(fn, '__name__'):
wrapper.__name__ = fn.__name__
# Proxy the type-hint information from the original function to this new
# wrapped function.
get_type_hints(wrapper).input_types = get_type_hints(fn).input_types
output_hint = get_type_hints(fn).simple_output_type(label)
if output_hint:
get_type_hints(wrapper).set_output_types(typehints.Iterable[output_hint])
# pylint: disable=protected-access
wrapper._argspec_fn = fn
# pylint: enable=protected-access
pardo = FlatMap(wrapper, *args, **kwargs)
pardo.label = label
return pardo
def Filter(fn, *args, **kwargs): # pylint: disable=invalid-name
""":func:`Filter` is a :func:`FlatMap` with its callable filtering out
elements.
Args:
fn (callable): a callable object.
*args: positional arguments passed to the transform callable.
**kwargs: keyword arguments passed to the transform callable.
Returns:
~apache_beam.pvalue.PCollection:
A :class:`~apache_beam.pvalue.PCollection` containing the
:func:`Filter` outputs.
Raises:
~exceptions.TypeError: If the **fn** passed as argument is not a callable.
Typical error is to pass a :class:`DoFn` instance which is supported only
for :class:`ParDo`.
"""
if not callable(fn):
raise TypeError(
'Filter can be used only with callable objects. '
'Received %r instead.' % (fn))
wrapper = lambda x, *args, **kwargs: [x] if fn(x, *args, **kwargs) else []
label = 'Filter(%s)' % ptransform.label_from_callable(fn)
# TODO: What about callable classes?
if hasattr(fn, '__name__'):
wrapper.__name__ = fn.__name__
# Proxy the type-hint information from the function being wrapped, setting the
# output type to be the same as the input type.
get_type_hints(wrapper).input_types = get_type_hints(fn).input_types
output_hint = get_type_hints(fn).simple_output_type(label)
if (output_hint is None
and get_type_hints(wrapper).input_types
and get_type_hints(wrapper).input_types[0]):
output_hint = get_type_hints(wrapper).input_types[0]
if output_hint:
get_type_hints(wrapper).set_output_types(typehints.Iterable[output_hint])
# pylint: disable=protected-access
wrapper._argspec_fn = fn
# pylint: enable=protected-access
pardo = FlatMap(wrapper, *args, **kwargs)
pardo.label = label
return pardo
def _combine_payload(combine_fn, context):
return beam_runner_api_pb2.CombinePayload(
combine_fn=combine_fn.to_runner_api(context),
accumulator_coder_id=context.coders.get_id(
combine_fn.get_accumulator_coder()))
class CombineGlobally(PTransform):
"""A :class:`CombineGlobally` transform.
Reduces a :class:`~apache_beam.pvalue.PCollection` to a single value by
progressively applying a :class:`CombineFn` to portions of the
:class:`~apache_beam.pvalue.PCollection` (and to intermediate values created
thereby). See documentation in :class:`CombineFn` for details on the specifics
on how :class:`CombineFn` s are applied.
Args:
pcoll (~apache_beam.pvalue.PCollection):
a :class:`~apache_beam.pvalue.PCollection` to be reduced into a single
value.
fn (callable): a :class:`CombineFn` object that will be called to
progressively reduce the :class:`~apache_beam.pvalue.PCollection` into
single values, or a callable suitable for wrapping by
:class:`~apache_beam.transforms.core.CallableWrapperCombineFn`.
*args: positional arguments passed to the :class:`CombineFn` object.
**kwargs: keyword arguments passed to the :class:`CombineFn` object.
Raises:
~exceptions.TypeError: If the output type of the input
:class:`~apache_beam.pvalue.PCollection` is not compatible
with ``Iterable[A]``.
Returns:
~apache_beam.pvalue.PCollection: A single-element
:class:`~apache_beam.pvalue.PCollection` containing the main output of
the :class:`CombineGlobally` transform.
Note that the positional and keyword arguments will be processed in order
to detect :class:`~apache_beam.pvalue.PValue` s that will be computed as side
inputs to the transform.
During pipeline execution whenever the :class:`CombineFn` object gets executed
(i.e. any of the :class:`CombineFn` methods get called), the
:class:`~apache_beam.pvalue.PValue` arguments will be replaced by their
actual value in the exact position where they appear in the argument lists.
"""
has_defaults = True
as_view = False
fanout = None
def __init__(self, fn, *args, **kwargs):
if not (isinstance(fn, CombineFn) or callable(fn)):
raise TypeError(
'CombineGlobally can be used only with combineFn objects. '
'Received %r instead.' % (fn))
super(CombineGlobally, self).__init__()
self.fn = fn
self.args = args
self.kwargs = kwargs
def display_data(self):
return {'combine_fn':
DisplayDataItem(self.fn.__class__, label='Combine Function'),
'combine_fn_dd':
self.fn}
def default_label(self):
return 'CombineGlobally(%s)' % ptransform.label_from_callable(self.fn)
def _clone(self, **extra_attributes):
clone = copy.copy(self)
clone.__dict__.update(extra_attributes)
return clone
def with_fanout(self, fanout):
return self._clone(fanout=fanout)
def with_defaults(self, has_defaults=True):
return self._clone(has_defaults=has_defaults)
def without_defaults(self):
return self.with_defaults(False)
def as_singleton_view(self):
return self._clone(as_view=True)
def expand(self, pcoll):
def add_input_types(transform):
type_hints = self.get_type_hints()
if type_hints.input_types:
return transform.with_input_types(type_hints.input_types[0][0])
return transform
combine_per_key = CombinePerKey(self.fn, *self.args, **self.kwargs)
if self.fanout:
combine_per_key = combine_per_key.with_hot_key_fanout(self.fanout)
combined = (pcoll
| 'KeyWithVoid' >> add_input_types(
Map(lambda v: (None, v)).with_output_types(
KV[None, pcoll.element_type]))
| 'CombinePerKey' >> combine_per_key
| 'UnKey' >> Map(lambda k_v: k_v[1]))
if not self.has_defaults and not self.as_view:
return combined
if self.has_defaults:
combine_fn = (
self.fn if isinstance(self.fn, CombineFn)
else CombineFn.from_callable(self.fn))
default_value = combine_fn.apply([], *self.args, **self.kwargs)
else:
default_value = pvalue.AsSingleton._NO_DEFAULT # pylint: disable=protected-access
view = pvalue.AsSingleton(combined, default_value=default_value)
if self.as_view:
return view
else:
if pcoll.windowing.windowfn != GlobalWindows():
raise ValueError(
"Default values are not yet supported in CombineGlobally() if the "
"output PCollection is not windowed by GlobalWindows. "
"Instead, use CombineGlobally().without_defaults() to output "
"an empty PCollection if the input PCollection is empty, "
"or CombineGlobally().as_singleton_view() to get the default "
"output of the CombineFn if the input PCollection is empty.")
def typed(transform):
# TODO(robertwb): We should infer this.
if combined.element_type:
return transform.with_output_types(combined.element_type)
return transform
return (pcoll.pipeline
| 'DoOnce' >> Create([None])
| 'InjectDefault' >> typed(Map(lambda _, s: s, view)))
class CombinePerKey(PTransformWithSideInputs):
"""A per-key Combine transform.
Identifies sets of values associated with the same key in the input
PCollection, then applies a CombineFn to condense those sets to single
values. See documentation in CombineFn for details on the specifics on how
CombineFns are applied.
Args:
pcoll: input pcollection.
fn: instance of CombineFn to apply to all values under the same key in
pcoll, or a callable whose signature is ``f(iterable, *args, **kwargs)``
(e.g., sum, max).
*args: arguments and side inputs, passed directly to the CombineFn.
**kwargs: arguments and side inputs, passed directly to the CombineFn.
Returns:
A PObject holding the result of the combine operation.
"""
def with_hot_key_fanout(self, fanout):
"""A per-key combine operation like self but with two levels of aggregation.
If a given key is produced by too many upstream bundles, the final
reduction can become a bottleneck despite partial combining being lifted
pre-GroupByKey. In these cases it can be helpful to perform intermediate
partial aggregations in parallel and then re-group to peform a final
(per-key) combine. This is also useful for high-volume keys in streaming
where combiners are not generally lifted for latency reasons.
Note that a fanout greater than 1 requires the data to be sent through
two GroupByKeys, and a high fanout can also result in more shuffle data
due to less per-bundle combining. Setting the fanout for a key at 1 or less
places values on the "cold key" path that skip the intermediate level of
aggregation.
Args:
fanout: either an int, for a constant-degree fanout, or a callable
mapping keys to a key-specific degree of fanout
Returns:
A per-key combining PTransform with the specified fanout.
"""
from apache_beam.transforms.combiners import curry_combine_fn
return _CombinePerKeyWithHotKeyFanout(
curry_combine_fn(self.fn, self.args, self.kwargs),
fanout)
def display_data(self):
return {'combine_fn':
DisplayDataItem(self.fn.__class__, label='Combine Function'),
'combine_fn_dd':
self.fn}
def make_fn(self, fn):
self._fn_label = ptransform.label_from_callable(fn)
return fn if isinstance(fn, CombineFn) else CombineFn.from_callable(fn)
def default_label(self):
return '%s(%s)' % (self.__class__.__name__, self._fn_label)
def _process_argspec_fn(self):
return lambda element, *args, **kwargs: None
def expand(self, pcoll):
args, kwargs = util.insert_values_in_args(
self.args, self.kwargs, self.side_inputs)
return pcoll | GroupByKey() | 'Combine' >> CombineValues(
self.fn, *args, **kwargs)
def default_type_hints(self):
hints = self.fn.get_type_hints().copy()
if hints.input_types:
K = typehints.TypeVariable('K')
args, kwargs = hints.input_types
args = (typehints.Tuple[K, args[0]],) + args[1:]
hints.set_input_types(*args, **kwargs)
else:
K = typehints.Any
if hints.output_types:
main_output_type = hints.simple_output_type('')
hints.set_output_types(typehints.Tuple[K, main_output_type])
return hints
def to_runner_api_parameter(self, context):
if self.args or self.kwargs:
from apache_beam.transforms.combiners import curry_combine_fn
combine_fn = curry_combine_fn(self.fn, self.args, self.kwargs)
else:
combine_fn = self.fn
return (
common_urns.composites.COMBINE_PER_KEY.urn,
_combine_payload(combine_fn, context))
@PTransform.register_urn(
common_urns.composites.COMBINE_PER_KEY.urn,
beam_runner_api_pb2.CombinePayload)
def from_runner_api_parameter(combine_payload, context):
return CombinePerKey(
CombineFn.from_runner_api(combine_payload.combine_fn, context))
def runner_api_requires_keyed_input(self):
return True
# TODO(robertwb): Rename to CombineGroupedValues?
class CombineValues(PTransformWithSideInputs):
def make_fn(self, fn):
return fn if isinstance(fn, CombineFn) else CombineFn.from_callable(fn)
def expand(self, pcoll):
args, kwargs = util.insert_values_in_args(
self.args, self.kwargs, self.side_inputs)
input_type = pcoll.element_type
key_type = None
if input_type is not None:
key_type, _ = input_type.tuple_types
runtime_type_check = (
pcoll.pipeline._options.view_as(TypeOptions).runtime_type_check)
return pcoll | ParDo(
CombineValuesDoFn(key_type, self.fn, runtime_type_check),
*args, **kwargs)
def to_runner_api_parameter(self, context):
if self.args or self.kwargs:
from apache_beam.transforms.combiners import curry_combine_fn
combine_fn = curry_combine_fn(self.fn, self.args, self.kwargs)
else:
combine_fn = self.fn
return (
common_urns.combine_components.COMBINE_GROUPED_VALUES.urn,
_combine_payload(combine_fn, context))
@PTransform.register_urn(
common_urns.combine_components.COMBINE_GROUPED_VALUES.urn,
beam_runner_api_pb2.CombinePayload)
def from_runner_api_parameter(combine_payload, context):
return CombineValues(
CombineFn.from_runner_api(combine_payload.combine_fn, context))
class CombineValuesDoFn(DoFn):
"""DoFn for performing per-key Combine transforms."""
def __init__(self, input_pcoll_type, combinefn, runtime_type_check):
super(CombineValuesDoFn, self).__init__()
self.combinefn = combinefn
self.runtime_type_check = runtime_type_check
def process(self, element, *args, **kwargs):
# Expected elements input to this DoFn are 2-tuples of the form
# (key, iter), with iter an iterable of all the values associated with key
# in the input PCollection.
if self.runtime_type_check:
# Apply the combiner in a single operation rather than artificially
# breaking it up so that output type violations manifest as TypeCheck
# errors rather than type errors.
return [
(element[0],
self.combinefn.apply(element[1], *args, **kwargs))]
# Add the elements into three accumulators (for testing of merge).
elements = list(element[1])
accumulators = []
for k in range(3):
if len(elements) <= k:
break
accumulators.append(
self.combinefn.add_inputs(
self.combinefn.create_accumulator(*args, **kwargs),
elements[k::3],
*args, **kwargs))
# Merge the accumulators.
accumulator = self.combinefn.merge_accumulators(
accumulators, *args, **kwargs)
# Convert accumulator to the final result.
return [(element[0],
self.combinefn.extract_output(accumulator, *args, **kwargs))]
def default_type_hints(self):
hints = self.combinefn.get_type_hints().copy()
if hints.input_types:
K = typehints.TypeVariable('K')
args, kwargs = hints.input_types
args = (typehints.Tuple[K, typehints.Iterable[args[0]]],) + args[1:]
hints.set_input_types(*args, **kwargs)
else:
K = typehints.Any
if hints.output_types:
main_output_type = hints.simple_output_type('')
hints.set_output_types(typehints.Tuple[K, main_output_type])
return hints
class _CombinePerKeyWithHotKeyFanout(PTransform):
def __init__(self, combine_fn, fanout):
self._fanout_fn = (
(lambda key: fanout) if isinstance(fanout, int) else fanout)
self._combine_fn = combine_fn
def expand(self, pcoll):
from apache_beam.transforms.trigger import AccumulationMode
combine_fn = self._combine_fn
fanout_fn = self._fanout_fn
class SplitHotCold(DoFn):
def start_bundle(self):
# Spreading a hot key across all possible sub-keys for all bundles
# would defeat the goal of not overwhelming downstream reducers
# (as well as making less efficient use of PGBK combining tables).
# Instead, each bundle independently makes a consistent choice about
# which "shard" of a key to send its intermediate results.
self._nonce = int(random.getrandbits(31))
def process(self, element):
key, value = element
fanout = fanout_fn(key)
if fanout <= 1:
# Boolean indicates this is not an accumulator.
yield (key, (False, value)) # cold
else:
yield pvalue.TaggedOutput('hot', ((self._nonce % fanout, key), value))
class PreCombineFn(CombineFn):
@staticmethod
def extract_output(accumulator):
# Boolean indicates this is an accumulator.
return (True, accumulator)
create_accumulator = combine_fn.create_accumulator
add_input = combine_fn.add_input
merge_accumulators = combine_fn.merge_accumulators
class PostCombineFn(CombineFn):
@staticmethod
def add_input(accumulator, element):
is_accumulator, value = element
if is_accumulator:
return combine_fn.merge_accumulators([accumulator, value])
else:
return combine_fn.add_input(accumulator, value)
create_accumulator = combine_fn.create_accumulator
merge_accumulators = combine_fn.merge_accumulators
extract_output = combine_fn.extract_output
def StripNonce(nonce_key_value):
(_, key), value = nonce_key_value
return key, value
cold, hot = pcoll | ParDo(SplitHotCold()).with_outputs('hot', main='cold')
cold.element_type = typehints.Any # No multi-output type hints.
precombined_hot = (
hot
# Avoid double counting that may happen with stacked accumulating mode.
| 'WindowIntoDiscarding' >> WindowInto(
pcoll.windowing, accumulation_mode=AccumulationMode.DISCARDING)
| CombinePerKey(PreCombineFn())
| Map(StripNonce)
| 'WindowIntoOriginal' >> WindowInto(pcoll.windowing))
return (
(cold, precombined_hot)
| Flatten()
| CombinePerKey(PostCombineFn()))
@typehints.with_input_types(typehints.KV[K, V])
@typehints.with_output_types(typehints.KV[K, typehints.Iterable[V]])
class GroupByKey(PTransform):
"""A group by key transform.
Processes an input PCollection consisting of key/value pairs represented as a
tuple pair. The result is a PCollection where values having a common key are
grouped together. For example (a, 1), (b, 2), (a, 3) will result into
(a, [1, 3]), (b, [2]).
The implementation here is used only when run on the local direct runner.
"""
class ReifyWindows(DoFn):
def process(self, element, window=DoFn.WindowParam,
timestamp=DoFn.TimestampParam):
try:
k, v = element
except TypeError:
raise TypeCheckError('Input to GroupByKey must be a PCollection with '
'elements compatible with KV[A, B]')
return [(k, WindowedValue(v, timestamp, [window]))]
def infer_output_type(self, input_type):
key_type, value_type = trivial_inference.key_value_types(input_type)
return Iterable[KV[key_type, typehints.WindowedValue[value_type]]]
def expand(self, pcoll):
# This code path is only used in the local direct runner. For Dataflow
# runner execution, the GroupByKey transform is expanded on the service.
input_type = pcoll.element_type
if input_type is not None:
# Initialize type-hints used below to enforce type-checking and to pass
# downstream to further PTransforms.
key_type, value_type = trivial_inference.key_value_types(input_type)
# Enforce the input to a GBK has a KV element type.
pcoll.element_type = KV[key_type, value_type]
typecoders.registry.verify_deterministic(
typecoders.registry.get_coder(key_type),
'GroupByKey operation "%s"' % self.label)
reify_output_type = KV[key_type, typehints.WindowedValue[value_type]]
gbk_input_type = (
KV[key_type, Iterable[typehints.WindowedValue[value_type]]])
gbk_output_type = KV[key_type, Iterable[value_type]]
# pylint: disable=bad-continuation
return (pcoll
| 'ReifyWindows' >> (ParDo(self.ReifyWindows())
.with_output_types(reify_output_type))
| 'GroupByKey' >> (_GroupByKeyOnly()
.with_input_types(reify_output_type)
.with_output_types(gbk_input_type))
| ('GroupByWindow' >> _GroupAlsoByWindow(pcoll.windowing)
.with_input_types(gbk_input_type)
.with_output_types(gbk_output_type)))
else:
# The input_type is None, run the default
return (pcoll
| 'ReifyWindows' >> ParDo(self.ReifyWindows())
| 'GroupByKey' >> _GroupByKeyOnly()
| 'GroupByWindow' >> _GroupAlsoByWindow(pcoll.windowing))
def infer_output_type(self, input_type):
key_type, value_type = trivial_inference.key_value_types(input_type)
return KV[key_type, Iterable[value_type]]
def to_runner_api_parameter(self, unused_context):
return common_urns.primitives.GROUP_BY_KEY.urn, None
@PTransform.register_urn(common_urns.primitives.GROUP_BY_KEY.urn, None)
def from_runner_api_parameter(unused_payload, unused_context):
return GroupByKey()
def runner_api_requires_keyed_input(self):
return True
@typehints.with_input_types(typehints.KV[K, V])
@typehints.with_output_types(typehints.KV[K, typehints.Iterable[V]])
class _GroupByKeyOnly(PTransform):
"""A group by key transform, ignoring windows."""
def infer_output_type(self, input_type):
key_type, value_type = trivial_inference.key_value_types(input_type)
return KV[key_type, Iterable[value_type]]
def expand(self, pcoll):
self._check_pcollection(pcoll)
return pvalue.PCollection(pcoll.pipeline)
@typehints.with_input_types(typehints.KV[K, typehints.Iterable[V]])
@typehints.with_output_types(typehints.KV[K, typehints.Iterable[V]])
class _GroupAlsoByWindow(ParDo):
"""The GroupAlsoByWindow transform."""
def __init__(self, windowing):
super(_GroupAlsoByWindow, self).__init__(
_GroupAlsoByWindowDoFn(windowing))
self.windowing = windowing
def expand(self, pcoll):
self._check_pcollection(pcoll)
return pvalue.PCollection(pcoll.pipeline)
class _GroupAlsoByWindowDoFn(DoFn):
# TODO(robertwb): Support combiner lifting.
def __init__(self, windowing):
super(_GroupAlsoByWindowDoFn, self).__init__()
self.windowing = windowing
def infer_output_type(self, input_type):
key_type, windowed_value_iter_type = trivial_inference.key_value_types(
input_type)
value_type = windowed_value_iter_type.inner_type.inner_type
return Iterable[KV[key_type, Iterable[value_type]]]
def start_bundle(self):
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.transforms.trigger import create_trigger_driver
# pylint: enable=wrong-import-order, wrong-import-position
self.driver = create_trigger_driver(self.windowing, True)
def process(self, element):
k, vs = element
return self.driver.process_entire_key(k, vs)
class Partition(PTransformWithSideInputs):
"""Split a PCollection into several partitions.
Uses the specified PartitionFn to separate an input PCollection into the
specified number of sub-PCollections.
When apply()d, a Partition() PTransform requires the following:
Args:
partitionfn: a PartitionFn, or a callable with the signature described in
CallableWrapperPartitionFn.
n: number of output partitions.
The result of this PTransform is a simple list of the output PCollections
representing each of n partitions, in order.
"""
class ApplyPartitionFnFn(DoFn):
"""A DoFn that applies a PartitionFn."""
def process(self, element, partitionfn, n, *args, **kwargs):
partition = partitionfn.partition_for(element, n, *args, **kwargs)
if not 0 <= partition < n:
raise ValueError(
'PartitionFn specified out-of-bounds partition index: '
'%d not in [0, %d)' % (partition, n))
# Each input is directed into the output that corresponds to the
# selected partition.
yield pvalue.TaggedOutput(str(partition), element)
def make_fn(self, fn):
return fn if isinstance(fn, PartitionFn) else CallableWrapperPartitionFn(fn)
def expand(self, pcoll):
n = int(self.args[0])
return pcoll | ParDo(
self.ApplyPartitionFnFn(), self.fn, *self.args,
**self.kwargs).with_outputs(*[str(t) for t in range(n)])
class Windowing(object):
def __init__(self, windowfn, triggerfn=None, accumulation_mode=None,
timestamp_combiner=None):
global AccumulationMode, DefaultTrigger # pylint: disable=global-variable-not-assigned
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.transforms.trigger import AccumulationMode, DefaultTrigger
# pylint: enable=wrong-import-order, wrong-import-position
if triggerfn is None:
triggerfn = DefaultTrigger()
if accumulation_mode is None:
if triggerfn == DefaultTrigger():
accumulation_mode = AccumulationMode.DISCARDING
else:
raise ValueError(
'accumulation_mode must be provided for non-trivial triggers')
if not windowfn.get_window_coder().is_deterministic():
raise ValueError(
'window fn (%s) does not have a determanistic coder (%s)' % (
windowfn, windowfn.get_window_coder()))
self.windowfn = windowfn
self.triggerfn = triggerfn
self.accumulation_mode = accumulation_mode
self.timestamp_combiner = (
timestamp_combiner or TimestampCombiner.OUTPUT_AT_EOW)
self._is_default = (
self.windowfn == GlobalWindows() and
self.triggerfn == DefaultTrigger() and
self.accumulation_mode == AccumulationMode.DISCARDING and
self.timestamp_combiner == TimestampCombiner.OUTPUT_AT_EOW)
def __repr__(self):
return "Windowing(%s, %s, %s, %s)" % (self.windowfn, self.triggerfn,
self.accumulation_mode,
self.timestamp_combiner)
def __eq__(self, other):
if type(self) == type(other):
if self._is_default and other._is_default:
return True
return (
self.windowfn == other.windowfn
and self.triggerfn == other.triggerfn
and self.accumulation_mode == other.accumulation_mode
and self.timestamp_combiner == other.timestamp_combiner)
return False
def __ne__(self, other):
# TODO(BEAM-5949): Needed for Python 2 compatibility.
return not self == other
def __hash__(self):
return hash((self.windowfn, self.accumulation_mode,
self.timestamp_combiner))
def is_default(self):
return self._is_default
def to_runner_api(self, context):
return beam_runner_api_pb2.WindowingStrategy(
window_fn=self.windowfn.to_runner_api(context),
# TODO(robertwb): Prohibit implicit multi-level merging.
merge_status=(beam_runner_api_pb2.MergeStatus.NEEDS_MERGE
if self.windowfn.is_merging()
else beam_runner_api_pb2.MergeStatus.NON_MERGING),
window_coder_id=context.coders.get_id(
self.windowfn.get_window_coder()),
trigger=self.triggerfn.to_runner_api(context),
accumulation_mode=self.accumulation_mode,
output_time=self.timestamp_combiner,
# TODO(robertwb): Support EMIT_IF_NONEMPTY
closing_behavior=beam_runner_api_pb2.ClosingBehavior.EMIT_ALWAYS,
OnTimeBehavior=beam_runner_api_pb2.OnTimeBehavior.FIRE_ALWAYS,
allowed_lateness=0)
@staticmethod
def from_runner_api(proto, context):
# pylint: disable=wrong-import-order, wrong-import-position
from apache_beam.transforms.trigger import TriggerFn
return Windowing(
windowfn=WindowFn.from_runner_api(proto.window_fn, context),
triggerfn=TriggerFn.from_runner_api(proto.trigger, context),
accumulation_mode=proto.accumulation_mode,
timestamp_combiner=proto.output_time)
@typehints.with_input_types(T)
@typehints.with_output_types(T)
class WindowInto(ParDo):
"""A window transform assigning windows to each element of a PCollection.
Transforms an input PCollection by applying a windowing function to each
element. Each transformed element in the result will be a WindowedValue
element with the same input value and timestamp, with its new set of windows
determined by the windowing function.
"""
class WindowIntoFn(DoFn):
"""A DoFn that applies a WindowInto operation."""
def __init__(self, windowing):
self.windowing = windowing
def process(self, element, timestamp=DoFn.TimestampParam,
window=DoFn.WindowParam):
context = WindowFn.AssignContext(timestamp, element=element,
window=window)
new_windows = self.windowing.windowfn.assign(context)
yield WindowedValue(element, context.timestamp, new_windows)
def __init__(self, windowfn, **kwargs):
"""Initializes a WindowInto transform.
Args:
windowfn: Function to be used for windowing
trigger: (optional) Trigger used for windowing, or None for default.
accumulation_mode: (optional) Accumulation mode used for windowing,
required for non-trivial triggers.
timestamp_combiner: (optional) Timestamp combniner used for windowing,
or None for default.
"""
if isinstance(windowfn, Windowing):
# Overlay windowing with kwargs.
windowing = windowfn
windowfn = windowing.windowfn
# Use windowing to fill in defaults for kwargs.
kwargs = dict(dict(
trigger=windowing.triggerfn,
accumulation_mode=windowing.accumulation_mode,
timestamp_combiner=windowing.timestamp_combiner), **kwargs)
# Use kwargs to simulate keyword-only arguments.
triggerfn = kwargs.pop('trigger', None)
accumulation_mode = kwargs.pop('accumulation_mode', None)
timestamp_combiner = kwargs.pop('timestamp_combiner', None)
if kwargs:
raise ValueError('Unexpected keyword arguments: %s' % list(kwargs))
self.windowing = Windowing(
windowfn, triggerfn, accumulation_mode, timestamp_combiner)
super(WindowInto, self).__init__(self.WindowIntoFn(self.windowing))
def get_windowing(self, unused_inputs):
return self.windowing
def infer_output_type(self, input_type):
return input_type
def expand(self, pcoll):
input_type = pcoll.element_type
if input_type is not None:
output_type = input_type
self.with_input_types(input_type)
self.with_output_types(output_type)
return super(WindowInto, self).expand(pcoll)
def to_runner_api_parameter(self, context):
return (
common_urns.primitives.ASSIGN_WINDOWS.urn,
self.windowing.to_runner_api(context))
@staticmethod
def from_runner_api_parameter(proto, context):
windowing = Windowing.from_runner_api(proto, context)
return WindowInto(
windowing.windowfn,
trigger=windowing.triggerfn,
accumulation_mode=windowing.accumulation_mode,
timestamp_combiner=windowing.timestamp_combiner)
PTransform.register_urn(
common_urns.primitives.ASSIGN_WINDOWS.urn,
# TODO(robertwb): Update WindowIntoPayload to include the full strategy.
# (Right now only WindowFn is used, but we need this to reconstitute the
# WindowInto transform, and in the future will need it at runtime to
# support meta-data driven triggers.)
# TODO(robertwb): Use a reference rather than embedding?
beam_runner_api_pb2.WindowingStrategy,
WindowInto.from_runner_api_parameter)
# Python's pickling is broken for nested classes.
WindowIntoFn = WindowInto.WindowIntoFn
class Flatten(PTransform):
"""Merges several PCollections into a single PCollection.
Copies all elements in 0 or more PCollections into a single output
PCollection. If there are no input PCollections, the resulting PCollection
will be empty (but see also kwargs below).
Args:
**kwargs: Accepts a single named argument "pipeline", which specifies the
pipeline that "owns" this PTransform. Ordinarily Flatten can obtain this
information from one of the input PCollections, but if there are none (or
if there's a chance there may be none), this argument is the only way to
provide pipeline information and should be considered mandatory.
"""
def __init__(self, **kwargs):
super(Flatten, self).__init__()
self.pipeline = kwargs.pop('pipeline', None)
if kwargs:
raise ValueError('Unexpected keyword arguments: %s' % list(kwargs))
def _extract_input_pvalues(self, pvalueish):
try:
pvalueish = tuple(pvalueish)
except TypeError:
raise ValueError('Input to Flatten must be an iterable. '
'Got a value of type %s instead.' % type(pvalueish))
return pvalueish, pvalueish
def expand(self, pcolls):
for pcoll in pcolls:
self._check_pcollection(pcoll)
result = pvalue.PCollection(self.pipeline)
result.element_type = typehints.Union[
tuple(pcoll.element_type for pcoll in pcolls)]
return result
def get_windowing(self, inputs):
if not inputs:
# TODO(robertwb): Return something compatible with every windowing?
return Windowing(GlobalWindows())
return super(Flatten, self).get_windowing(inputs)
def to_runner_api_parameter(self, context):
return common_urns.primitives.FLATTEN.urn, None
@staticmethod
def from_runner_api_parameter(unused_parameter, unused_context):
return Flatten()
PTransform.register_urn(
common_urns.primitives.FLATTEN.urn, None, Flatten.from_runner_api_parameter)
class Create(PTransform):
"""A transform that creates a PCollection from an iterable."""
def __init__(self, values):
"""Initializes a Create transform.
Args:
values: An object of values for the PCollection
"""
super(Create, self).__init__()
if isinstance(values, (unicode, str, bytes)):
raise TypeError('PTransform Create: Refusing to treat string as '
'an iterable. (string=%r)' % values)
elif isinstance(values, dict):
values = values.items()
self.values = tuple(values)
def to_runner_api_parameter(self, context):
# Required as this is identified by type in PTransformOverrides.
# TODO(BEAM-3812): Use an actual URN here.
return self.to_runner_api_pickled(context)
def infer_output_type(self, unused_input_type):
if not self.values:
return Any
return Union[[trivial_inference.instance_to_type(v) for v in self.values]]
def get_output_type(self):
return (self.get_type_hints().simple_output_type(self.label) or
self.infer_output_type(None))
def expand(self, pbegin):
assert isinstance(pbegin, pvalue.PBegin)
# Must guard against this as some legacy runners don't implement impulse.
debug_options = pbegin.pipeline._options.view_as(DebugOptions)
fn_api = (debug_options.experiments
and 'beam_fn_api' in debug_options.experiments)
if fn_api:
coder = typecoders.registry.get_coder(self.get_output_type())
serialized_values = [coder.encode(v) for v in self.values]
# Avoid the "redistributing" reshuffle for 0 and 1 element Creates.
# These special cases are often used in building up more complex
# transforms (e.g. Write).
class MaybeReshuffle(PTransform):
def expand(self, pcoll):
if len(serialized_values) > 1:
from apache_beam.transforms.util import Reshuffle
return pcoll | Reshuffle()
else:
return pcoll
return (
pbegin
| Impulse()
| FlatMap(lambda _: serialized_values)
| MaybeReshuffle()
| Map(coder.decode).with_output_types(self.get_output_type()))
else:
self.pipeline = pbegin.pipeline
from apache_beam.io import iobase
coder = typecoders.registry.get_coder(self.get_output_type())
source = self._create_source_from_iterable(self.values, coder)
return (pbegin.pipeline
| iobase.Read(source).with_output_types(self.get_output_type()))
def get_windowing(self, unused_inputs):
return Windowing(GlobalWindows())
@staticmethod
def _create_source_from_iterable(values, coder):
return Create._create_source(list(map(coder.encode, values)), coder)
@staticmethod
def _create_source(serialized_values, coder):
from apache_beam.transforms.create_source import _CreateSource
return _CreateSource(serialized_values, coder)
class Impulse(PTransform):
"""Impulse primitive."""
def expand(self, pbegin):
if not isinstance(pbegin, pvalue.PBegin):
raise TypeError(
'Input to Impulse transform must be a PBegin but found %s' % pbegin)
return pvalue.PCollection(pbegin.pipeline)
def get_windowing(self, inputs):
return Windowing(GlobalWindows())
def infer_output_type(self, unused_input_type):
return bytes
def to_runner_api_parameter(self, unused_context):
return common_urns.primitives.IMPULSE.urn, None
@PTransform.register_urn(common_urns.primitives.IMPULSE.urn, None)
def from_runner_api_parameter(unused_parameter, unused_context):
return Impulse()
| mxm/incubator-beam | sdks/python/apache_beam/transforms/core.py | Python | apache-2.0 | 76,746 |
#!/usr/bin/python
import sqlite3
import accounts
import json
import time
#-----------------------------------------------------------------------------
class DB:
def __init__(self, config):
self.conn = sqlite3.connect(
config['DATABASE_FILE_ABS'],
isolation_level = "IMMEDIATE",
)
self.create_tables()
#-------------------------------------------------------
# creating missing tables {{{
def has_table(self, name):
with self.conn as cursor:
result = cursor.execute(
'''
SELECT count(*)
FROM sqlite_master
WHERE type = 'table' AND name = ?
''',
(name,)
).fetchone()
return result[0] > 0
def create_tables(self):
with self.conn as cursor:
if not self.has_table("pending"):
cursor.execute("""
CREATE TABLE pending (
token TEXT PRIMARY KEY,
nick TEXT,
email TEXT,
firstname TEXT,
lastname TEXT,
crypt_password TEXT
)
""")
if not self.has_table("password_reset"):
cursor.execute("""
CREATE TABLE password_reset (
nick TEXT PRIMARY KEY,
token TEXT
)
""")
if not self.has_table("email_change"):
cursor.execute("""
CREATE TABLE email_change (
nick TEXT PRIMARY KEY,
email TEXT,
token TEXT
)
""")
if not self.has_table("session"):
cursor.execute("""
CREATE TABLE session (
session_id TEXT PRIMARY KEY,
variables TEXT,
last_access INTEGER
)
""")
# }}}
#-------------------------------------------------------
# register forms {{{
def save_form(self, token, nick, email, firstname, lastname, password):
with self.conn as cursor:
cursor.execute(
"""
INSERT INTO pending
(token, nick, email, firstname, lastname, crypt_password)
VALUES
(?, ?, ?, ?, ?, ?)
""",
(token, nick, email, firstname, lastname, password)
)
def load_form(self, token):
with self.conn as cursor:
result = cursor.execute(
"""
SELECT nick, email, firstname, lastname, crypt_password
FROM pending
WHERE token = ?
""",
(token,)
).fetchone()
# None | (nick, email, firstname, lastname, crypt_password)
return result
def delete_all_forms_for(self, nick):
# XXX: deleting all attempts for `nick' registration
with self.conn as cursor:
cursor.execute("DELETE FROM pending WHERE nick = ?", (nick,))
# }}}
#-------------------------------------------------------
# password reset confirmations {{{
def save_reset_password_token(self, token, nick):
with self.conn as cursor:
cursor.execute(
"""
DELETE FROM password_reset WHERE nick = ?
""",
(nick,)
)
cursor.execute(
"""
INSERT INTO password_reset (nick, token) VALUES (?, ?)
""",
(nick, token)
)
def load_reset_password_token(self, token):
with self.conn as cursor:
result = cursor.execute(
"""
SELECT nick
FROM password_reset
WHERE token = ?
""",
(token,)
).fetchone()
if result is None:
return None
return result[0] # return just nick
def delete_reset_password_token(self, token):
with self.conn as cursor:
cursor.execute(
"""
DELETE FROM password_reset WHERE token = ?
""",
(token,)
)
# }}}
#-------------------------------------------------------
# e-mail change confirmations {{{
def save_email_change_token(self, token, nick, email):
with self.conn as cursor:
cursor.execute(
"""
DELETE FROM email_change WHERE nick = ?
""",
(nick,)
)
cursor.execute(
"""
INSERT INTO email_change (nick, email, token) VALUES (?, ?, ?)
""",
(nick, email, token)
)
def load_email_change_token(self, token):
with self.conn as cursor:
result = cursor.execute(
"""
SELECT nick, email
FROM email_change
WHERE token = ?
""",
(token,)
).fetchone()
if result is None:
return None
return result # (nick, email)
def delete_email_change_token(self, token):
with self.conn as cursor:
cursor.execute(
"""
DELETE FROM email_change WHERE token = ?
""",
(token,)
)
# }}}
#-------------------------------------------------------
# HTTP sessions {{{
def save_session(self, session_id, variables):
access_time = time.time()
session_data = json.dumps(variables, sort_keys = True)
with self.conn as cursor:
result = cursor.execute(
"""
INSERT OR IGNORE INTO session VALUES (?, ?, ?)
""",
(session_id, session_data, access_time)
)
if result.rowcount == 0:
cursor.execute(
"""
UPDATE session
SET variables = ?, last_access = ?
WHERE session_id = ?
""",
(session_data, access_time, session_id)
)
def delete_session(self, session_id):
with self.conn as cursor:
cursor.execute("DELETE FROM session WHERE session_id = ?",
(session_id,))
def load_session(self, session_id):
with self.conn as cursor:
result = cursor.execute(
"""
SELECT variables FROM session WHERE session_id = ?
""",
(session_id,)
).fetchone()
if result is None:
return {}
else:
return json.loads(result[0])
# }}}
#-------------------------------------------------------
#-----------------------------------------------------------------------------
# vim:ft=python:foldmethod=marker
| HackerspaceWroclaw/rejestruj | rejestruj/fdb.py | Python | gpl-3.0 | 7,382 |
from wedo2.bluetooth.bluetooth_io import BluetoothIO
from wedo2.input_output.data_format import DataFormat
from wedo2.input_output.input_format import InputFormat, InputFormatUnit
from wedo2.bluetooth.connect_info import ConnectInfo
from wedo2.utils import byte_utils
class LegoService(object):
def __init__(self, connect_info, io):
assert connect_info is not None, "Cannot instantiate service with null ConnectInfo"
assert io is not None, "Cannot instantiate service with null IO"
self.connect_info = connect_info
self.io = io
self.valid_data_formats = set()
self.input_format = self.get_default_input_format()
self.value_data = None
def create_service(connect_info, io):
return LegoService(connect_info, io)
def verify_data(self, *args):
if len(args) == 1:
data = args[0]
if data is not None and len(self.valid_data_formats) != 0:
d_format = self.data_format_for_input_format(self.input_format)
if d_format is None:
raise Exception("Did not find a valid input data format")
self.verify_data(data, d_format)
elif len(args) == 2:
data = args[0]
d_format = args[1]
if len(data) != (d_format.dataset_size * d_format.dataset_count):
raise Exception("Package sizes don't add up. Something is wrong")
def data_format_for_input_format(self, i_format):
for d_format in self.valid_data_formats:
if d_format.mode == i_format.mode and d_format.unit == i_format.unit:
return d_format
return None
def get_service_name(self):
return "Undefined"
def get_default_input_format(self):
return None
def update_input_format(self, new_format):
self.io.write_input_format(new_format, self.connect_info.connect_id)
self.input_format = new_format
def get_input_format_mode(self):
if self.input_format is not None:
return self.input_format.mode
elif self.get_default_input_format() is not None:
return self.get_default_input_format().mode
return 0
def update_current_input_format_with_new_mode(self, new_mode):
if self.input_format is not None:
self.update_input_format(self.input_format.input_format_by_setting_mode(new_mode))
elif self.get_default_input_format() is not None:
self.update_input_format(self.get_default_input_format().input_format_by_setting_mode(new_mode))
else:
print("Couldn't update input format")
def add_valid_data_format(self, d_format):
assert d_format is not None, "DataFormat cannot be None"
self.valid_data_formats.add(d_format)
def remove_valid_data_format(self, d_format):
assert d_format is not None, "DataFormat cannot be None"
if len(self.valid_data_formats == 0):
return
self.valid_data_formats.remove(d_format)
# 0 or 1 argument
def get_number_from_value_data(self, *args):
if len(args) == 0:
return self.get_number_from_value_data(self.value_data)
else: # len(args) == 1
data = args[0]
values_as_numbers = self.get_numbers_from_value_data_set(data)
if values_as_numbers is None:
return None
if len(values_as_numbers) != 1:
return None
return values_as_numbers[0]
# 0 or 1 argument
def get_numbers_from_value_data_set(self, *args):
if len(args) == 0:
return self.get_numbers_from_value_data_set(self.value_data)
else: # len(args) == 1
data_set = args[0]
if data_set is None:
return None
d_format = self.data_format_for_input_format(self.input_format)
if d_format is None:
print("d_format was None")
return None
try:
self.verify_data(data_set, d_format)
result_array = []
current_index = 0
for i in range(0, d_format.dataset_count):
current_index = i * d_format.dataset_size
data_set_bytes = bytearray(data_set[current_index: current_index + d_format.dataset_size])
if d_format.unit == InputFormatUnit.INPUT_FORMAT_UNIT_RAW or \
d_format.unit == InputFormatUnit.INPUT_FORMAT_UNIT_PERCENTAGE:
result_array.append(self.get_integer_from_data(data_set_bytes))
else:
result_array.append(self.get_float_from_data(data_set_bytes))
return result_array
except:
return None
def get_float_from_data(self, data):
if len(data) > 4:
return 0
return byte_utils.get_float(data)
def get_integer_from_data(self, data):
if len(data) == 1:
return data[0]
elif len(data) == 2:
return byte_utils.get_short(data)
elif len(data) == 4:
return byte_utils.get_int(data)
else:
return 0
def __eq__(self, obj):
if obj is None:
return False
elif self.connect_info != obj.connect_info:
return False
else:
return True
def __ne__(self, obj):
return not self.__eq__(obj)
def __hash__(self):
return hash(str(self))
| jannopet/LEGO-WeDo-2.0-Python-SDK | wedo2/services/lego_service.py | Python | mit | 5,639 |
""" Adapter for pysqlite backend """
from current.db.pysqlite.pysqlite import PySqliteDB
## END OF LINE ##
| jjneely/current | current/db/pysqlite/__init__.py | Python | gpl-2.0 | 109 |
"""The tests for the analytics ."""
from unittest.mock import AsyncMock, Mock, PropertyMock, patch
import aiohttp
import pytest
from homeassistant.components.analytics.analytics import Analytics
from homeassistant.components.analytics.const import (
ANALYTICS_ENDPOINT_URL,
ANALYTICS_ENDPOINT_URL_DEV,
ATTR_BASE,
ATTR_DIAGNOSTICS,
ATTR_PREFERENCES,
ATTR_STATISTICS,
ATTR_USAGE,
)
from homeassistant.components.api import ATTR_UUID
from homeassistant.const import ATTR_DOMAIN
from homeassistant.loader import IntegrationNotFound
from homeassistant.setup import async_setup_component
MOCK_UUID = "abcdefg"
MOCK_VERSION = "1970.1.0"
MOCK_VERSION_DEV = "1970.1.0.dev0"
MOCK_VERSION_NIGHTLY = "1970.1.0.dev19700101"
async def test_no_send(hass, caplog, aioclient_mock):
"""Test send when no preferences are defined."""
analytics = Analytics(hass)
with patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=False),
):
assert not analytics.preferences[ATTR_BASE]
await analytics.send_analytics()
assert "Nothing to submit" in caplog.text
assert len(aioclient_mock.mock_calls) == 0
async def test_load_with_supervisor_diagnostics(hass):
"""Test loading with a supervisor that has diagnostics enabled."""
analytics = Analytics(hass)
assert not analytics.preferences[ATTR_DIAGNOSTICS]
with patch(
"homeassistant.components.hassio.get_supervisor_info",
side_effect=Mock(return_value={"diagnostics": True}),
), patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
):
await analytics.load()
assert analytics.preferences[ATTR_DIAGNOSTICS]
async def test_load_with_supervisor_without_diagnostics(hass):
"""Test loading with a supervisor that has not diagnostics enabled."""
analytics = Analytics(hass)
analytics._data[ATTR_PREFERENCES][ATTR_DIAGNOSTICS] = True
assert analytics.preferences[ATTR_DIAGNOSTICS]
with patch(
"homeassistant.components.hassio.get_supervisor_info",
side_effect=Mock(return_value={"diagnostics": False}),
), patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
):
await analytics.load()
assert not analytics.preferences[ATTR_DIAGNOSTICS]
async def test_failed_to_send(hass, caplog, aioclient_mock):
"""Test failed to send payload."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=400)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
assert analytics.preferences[ATTR_BASE]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
assert (
f"Sending analytics failed with statuscode 400 from {ANALYTICS_ENDPOINT_URL}"
in caplog.text
)
async def test_failed_to_send_raises(hass, caplog, aioclient_mock):
"""Test raises when failed to send payload."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, exc=aiohttp.ClientError())
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
assert analytics.preferences[ATTR_BASE]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
assert "Error sending analytics" in caplog.text
async def test_send_base(hass, caplog, aioclient_mock):
"""Test send base preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
assert analytics.preferences[ATTR_BASE]
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
):
hex.return_value = MOCK_UUID
await analytics.send_analytics()
assert f"'uuid': '{MOCK_UUID}'" in caplog.text
assert f"'version': '{MOCK_VERSION}'" in caplog.text
assert "'installation_type':" in caplog.text
assert "'integration_count':" not in caplog.text
assert "'integrations':" not in caplog.text
async def test_send_base_with_supervisor(hass, caplog, aioclient_mock):
"""Test send base preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
assert analytics.preferences[ATTR_BASE]
with patch(
"homeassistant.components.hassio.get_supervisor_info",
side_effect=Mock(
return_value={"supported": True, "healthy": True, "arch": "amd64"}
),
), patch(
"homeassistant.components.hassio.get_os_info",
side_effect=Mock(return_value={"board": "blue", "version": "123"}),
), patch(
"homeassistant.components.hassio.get_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.get_host_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"uuid.UUID.hex", new_callable=PropertyMock
) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
):
hex.return_value = MOCK_UUID
await analytics.load()
await analytics.send_analytics()
assert f"'uuid': '{MOCK_UUID}'" in caplog.text
assert f"'version': '{MOCK_VERSION}'" in caplog.text
assert (
"'supervisor': {'healthy': True, 'supported': True, 'arch': 'amd64'}"
in caplog.text
)
assert "'operating_system': {'board': 'blue', 'version': '123'}" in caplog.text
assert "'installation_type':" in caplog.text
assert "'integration_count':" not in caplog.text
assert "'integrations':" not in caplog.text
async def test_send_usage(hass, caplog, aioclient_mock):
"""Test send usage preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_USAGE]
hass.config.components = ["default_config"]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
assert "'integrations': ['default_config']" in caplog.text
assert "'integration_count':" not in caplog.text
async def test_send_usage_with_supervisor(hass, caplog, aioclient_mock):
"""Test send usage with supervisor preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_USAGE]
hass.config.components = ["default_config"]
with patch(
"homeassistant.components.hassio.get_supervisor_info",
side_effect=Mock(
return_value={
"healthy": True,
"supported": True,
"arch": "amd64",
"addons": [{"slug": "test_addon"}],
}
),
), patch(
"homeassistant.components.hassio.get_os_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.get_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.get_host_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.async_get_addon_info",
side_effect=AsyncMock(
return_value={
"slug": "test_addon",
"protected": True,
"version": "1",
"auto_update": False,
}
),
), patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
):
await analytics.send_analytics()
assert (
"'addons': [{'slug': 'test_addon', 'protected': True, 'version': '1', 'auto_update': False}]"
in caplog.text
)
assert "'addon_count':" not in caplog.text
async def test_send_statistics(hass, caplog, aioclient_mock):
"""Test send statistics preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_STATISTICS: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_STATISTICS]
hass.config.components = ["default_config"]
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
assert (
"'state_count': 0, 'automation_count': 0, 'integration_count': 1, 'user_count': 0"
in caplog.text
)
assert "'integrations':" not in caplog.text
async def test_send_statistics_one_integration_fails(hass, caplog, aioclient_mock):
"""Test send statistics preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_STATISTICS: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_STATISTICS]
hass.config.components = ["default_config"]
with patch(
"homeassistant.components.analytics.analytics.async_get_integration",
side_effect=IntegrationNotFound("any"),
), patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
post_call = aioclient_mock.mock_calls[0]
assert "uuid" in post_call[2]
assert post_call[2]["integration_count"] == 0
async def test_send_statistics_async_get_integration_unknown_exception(
hass, caplog, aioclient_mock
):
"""Test send statistics preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_STATISTICS: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_STATISTICS]
hass.config.components = ["default_config"]
with pytest.raises(ValueError), patch(
"homeassistant.components.analytics.analytics.async_get_integration",
side_effect=ValueError,
), patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
async def test_send_statistics_with_supervisor(hass, caplog, aioclient_mock):
"""Test send statistics preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_STATISTICS: True})
assert analytics.preferences[ATTR_BASE]
assert analytics.preferences[ATTR_STATISTICS]
with patch(
"homeassistant.components.hassio.get_supervisor_info",
side_effect=Mock(
return_value={
"healthy": True,
"supported": True,
"arch": "amd64",
"addons": [{"slug": "test_addon"}],
}
),
), patch(
"homeassistant.components.hassio.get_os_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.get_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.get_host_info",
side_effect=Mock(return_value={}),
), patch(
"homeassistant.components.hassio.async_get_addon_info",
side_effect=AsyncMock(
return_value={
"slug": "test_addon",
"protected": True,
"version": "1",
"auto_update": False,
}
),
), patch(
"homeassistant.components.hassio.is_hassio",
side_effect=Mock(return_value=True),
), patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
):
await analytics.send_analytics()
assert "'addon_count': 1" in caplog.text
assert "'integrations':" not in caplog.text
async def test_reusing_uuid(hass, aioclient_mock):
"""Test reusing the stored UUID."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
analytics._data[ATTR_UUID] = "NOT_MOCK_UUID"
await analytics.save_preferences({ATTR_BASE: True})
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
):
# This is not actually called but that in itself prove the test
hex.return_value = MOCK_UUID
await analytics.send_analytics()
assert analytics.uuid == "NOT_MOCK_UUID"
async def test_custom_integrations(hass, aioclient_mock, enable_custom_integrations):
"""Test sending custom integrations."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
assert await async_setup_component(hass, "test_package", {"test_package": {}})
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
with patch("homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION):
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0][2]
assert payload["custom_integrations"][0][ATTR_DOMAIN] == "test_package"
async def test_dev_url(hass, aioclient_mock):
"""Test sending payload to dev url."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
):
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0]
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
async def test_dev_url_error(hass, aioclient_mock, caplog):
"""Test sending payload to dev url that returns error."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL_DEV, status=400)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_DEV
):
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0]
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL_DEV
assert (
f"Sending analytics failed with statuscode 400 from {ANALYTICS_ENDPOINT_URL_DEV}"
in caplog.text
)
async def test_nightly_endpoint(hass, aioclient_mock):
"""Test sending payload to production url when running nightly."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True})
with patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION_NIGHTLY
):
await analytics.send_analytics()
payload = aioclient_mock.mock_calls[0]
assert str(payload[1]) == ANALYTICS_ENDPOINT_URL
async def test_send_with_no_energy(hass, aioclient_mock):
"""Test send base preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
), patch(
"homeassistant.components.analytics.analytics.energy_is_configured", AsyncMock()
) as energy_is_configured:
energy_is_configured.return_value = False
hex.return_value = MOCK_UUID
await analytics.send_analytics()
postdata = aioclient_mock.mock_calls[-1][2]
assert "energy" not in postdata
async def test_send_with_no_energy_config(hass, aioclient_mock):
"""Test send base preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
assert await async_setup_component(
hass, "energy", {"recorder": {"db_url": "sqlite://"}}
)
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
), patch(
"homeassistant.components.analytics.analytics.energy_is_configured", AsyncMock()
) as energy_is_configured:
energy_is_configured.return_value = False
hex.return_value = MOCK_UUID
await analytics.send_analytics()
postdata = aioclient_mock.mock_calls[-1][2]
assert not postdata["energy"]["configured"]
async def test_send_with_energy_config(hass, aioclient_mock):
"""Test send base preferences are defined."""
aioclient_mock.post(ANALYTICS_ENDPOINT_URL, status=200)
analytics = Analytics(hass)
await analytics.save_preferences({ATTR_BASE: True, ATTR_USAGE: True})
assert await async_setup_component(
hass, "energy", {"recorder": {"db_url": "sqlite://"}}
)
with patch("uuid.UUID.hex", new_callable=PropertyMock) as hex, patch(
"homeassistant.components.analytics.analytics.HA_VERSION", MOCK_VERSION
), patch(
"homeassistant.components.analytics.analytics.energy_is_configured", AsyncMock()
) as energy_is_configured:
energy_is_configured.return_value = True
hex.return_value = MOCK_UUID
await analytics.send_analytics()
postdata = aioclient_mock.mock_calls[-1][2]
assert postdata["energy"]["configured"]
| rohitranjan1991/home-assistant | tests/components/analytics/test_analytics.py | Python | mit | 18,101 |
#
# (c) 2016 Red Hat Inc.
#
# This file is part of Ansible
#
# Ansible is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Ansible is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Ansible. If not, see <http://www.gnu.org/licenses/>.
# Make coding more python3-ish
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
import json
from ansible.compat.tests.mock import patch
from ansible.modules.network.onyx import onyx_vlan
from units.modules.utils import set_module_args
from .onyx_module import TestOnyxModule, load_fixture
class TestOnyxVlanModule(TestOnyxModule):
module = onyx_vlan
def setUp(self):
super(TestOnyxVlanModule, self).setUp()
self.mock_get_config = patch.object(
onyx_vlan.OnyxVlanModule, "_get_vlan_config")
self.get_config = self.mock_get_config.start()
self.mock_load_config = patch(
'ansible.module_utils.network.onyx.onyx.load_config')
self.load_config = self.mock_load_config.start()
def tearDown(self):
super(TestOnyxVlanModule, self).tearDown()
self.mock_get_config.stop()
self.mock_load_config.stop()
def load_fixtures(self, commands=None, transport='cli'):
config_file = 'onyx_vlan_show.cfg'
self.get_config.return_value = load_fixture(config_file)
self.load_config.return_value = None
def test_vlan_no_change(self):
set_module_args(dict(vlan_id=20))
self.execute_module(changed=False)
def test_vlan_remove_name(self):
set_module_args(dict(vlan_id=10, name=''))
commands = ['vlan 10 no name']
self.execute_module(changed=True, commands=commands)
def test_vlan_change_name(self):
set_module_args(dict(vlan_id=10, name='test-test'))
commands = ['vlan 10 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_create(self):
set_module_args(dict(vlan_id=30))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_create_with_name(self):
set_module_args(dict(vlan_id=30, name='test-test'))
commands = ['vlan 30', 'exit', 'vlan 30 name test-test']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove(self):
set_module_args(dict(vlan_id=20, state='absent'))
commands = ['no vlan 20']
self.execute_module(changed=True, commands=commands)
def test_vlan_remove_not_exist(self):
set_module_args(dict(vlan_id=30, state='absent'))
self.execute_module(changed=False)
def test_vlan_aggregate(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate))
commands = ['vlan 30', 'exit']
self.execute_module(changed=True, commands=commands)
def test_vlan_aggregate_purge(self):
aggregate = list()
aggregate.append(dict(vlan_id=30))
aggregate.append(dict(vlan_id=20))
set_module_args(dict(aggregate=aggregate, purge=True))
commands = ['vlan 30', 'exit', 'no vlan 10', 'no vlan 1']
self.execute_module(changed=True, commands=commands)
| ravibhure/ansible | test/units/modules/network/onyx/test_onyx_vlan.py | Python | gpl-3.0 | 3,728 |
#!/usr/bin/env python
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""Javelin makes resources that should survive an upgrade.
Javelin is a tool for creating, verifying, and deleting a small set of
resources in a declarative way.
"""
import argparse
import collections
import datetime
import os
import sys
import unittest
import yaml
import tempest.auth
from tempest import config
from tempest import exceptions
from tempest.openstack.common import log as logging
from tempest.openstack.common import timeutils
from tempest.services.compute.json import flavors_client
from tempest.services.compute.json import servers_client
from tempest.services.identity.json import identity_client
from tempest.services.image.v2.json import image_client
from tempest.services.object_storage import container_client
from tempest.services.object_storage import object_client
from tempest.services.telemetry.json import telemetry_client
from tempest.services.volume.json import volumes_client
OPTS = {}
USERS = {}
RES = collections.defaultdict(list)
LOG = None
JAVELIN_START = datetime.datetime.utcnow()
class OSClient(object):
_creds = None
identity = None
servers = None
def __init__(self, user, pw, tenant):
_creds = tempest.auth.KeystoneV2Credentials(
username=user,
password=pw,
tenant_name=tenant)
_auth = tempest.auth.KeystoneV2AuthProvider(_creds)
self.identity = identity_client.IdentityClientJSON(_auth)
self.servers = servers_client.ServersClientJSON(_auth)
self.objects = object_client.ObjectClient(_auth)
self.containers = container_client.ContainerClient(_auth)
self.images = image_client.ImageClientV2JSON(_auth)
self.flavors = flavors_client.FlavorsClientJSON(_auth)
self.telemetry = telemetry_client.TelemetryClientJSON(_auth)
self.volumes = volumes_client.VolumesClientJSON(_auth)
def load_resources(fname):
"""Load the expected resources from a yaml flie."""
return yaml.load(open(fname, 'r'))
def keystone_admin():
return OSClient(OPTS.os_username, OPTS.os_password, OPTS.os_tenant_name)
def client_for_user(name):
LOG.debug("Entering client_for_user")
if name in USERS:
user = USERS[name]
LOG.debug("Created client for user %s" % user)
return OSClient(user['name'], user['pass'], user['tenant'])
else:
LOG.error("%s not found in USERS: %s" % (name, USERS))
###################
#
# TENANTS
#
###################
def create_tenants(tenants):
"""Create tenants from resource definition.
Don't create the tenants if they already exist.
"""
admin = keystone_admin()
_, body = admin.identity.list_tenants()
existing = [x['name'] for x in body]
for tenant in tenants:
if tenant not in existing:
admin.identity.create_tenant(tenant)
else:
LOG.warn("Tenant '%s' already exists in this environment" % tenant)
def destroy_tenants(tenants):
admin = keystone_admin()
for tenant in tenants:
tenant_id = admin.identity.get_tenant_by_name(tenant)['id']
r, body = admin.identity.delete_tenant(tenant_id)
##############
#
# USERS
#
##############
def _users_for_tenant(users, tenant):
u_for_t = []
for user in users:
for n in user:
if user[n]['tenant'] == tenant:
u_for_t.append(user[n])
return u_for_t
def _tenants_from_users(users):
tenants = set()
for user in users:
for n in user:
tenants.add(user[n]['tenant'])
return tenants
def _assign_swift_role(user):
admin = keystone_admin()
resp, roles = admin.identity.list_roles()
role = next(r for r in roles if r['name'] == 'Member')
LOG.debug(USERS[user])
try:
admin.identity.assign_user_role(
USERS[user]['tenant_id'],
USERS[user]['id'],
role['id'])
except exceptions.Conflict:
# don't care if it's already assigned
pass
def create_users(users):
"""Create tenants from resource definition.
Don't create the tenants if they already exist.
"""
global USERS
LOG.info("Creating users")
admin = keystone_admin()
for u in users:
try:
tenant = admin.identity.get_tenant_by_name(u['tenant'])
except exceptions.NotFound:
LOG.error("Tenant: %s - not found" % u['tenant'])
continue
try:
admin.identity.get_user_by_username(tenant['id'], u['name'])
LOG.warn("User '%s' already exists in this environment"
% u['name'])
except exceptions.NotFound:
admin.identity.create_user(
u['name'], u['pass'], tenant['id'],
"%s@%s" % (u['name'], tenant['id']),
enabled=True)
def destroy_users(users):
admin = keystone_admin()
for user in users:
tenant_id = admin.identity.get_tenant_by_name(user['tenant'])['id']
user_id = admin.identity.get_user_by_username(tenant_id,
user['name'])['id']
r, body = admin.identity.delete_user(user_id)
def collect_users(users):
global USERS
LOG.info("Collecting users")
admin = keystone_admin()
for u in users:
tenant = admin.identity.get_tenant_by_name(u['tenant'])
u['tenant_id'] = tenant['id']
USERS[u['name']] = u
body = admin.identity.get_user_by_username(tenant['id'], u['name'])
USERS[u['name']]['id'] = body['id']
class JavelinCheck(unittest.TestCase):
def __init__(self, users, resources):
super(JavelinCheck, self).__init__()
self.users = users
self.res = resources
def runTest(self, *args):
pass
def check(self):
self.check_users()
self.check_objects()
self.check_servers()
self.check_volumes()
self.check_telemetry()
def check_users(self):
"""Check that the users we expect to exist, do.
We don't use the resource list for this because we need to validate
that things like tenantId didn't drift across versions.
"""
LOG.info("checking users")
for name, user in self.users.iteritems():
client = keystone_admin()
_, found = client.identity.get_user(user['id'])
self.assertEqual(found['name'], user['name'])
self.assertEqual(found['tenantId'], user['tenant_id'])
# also ensure we can auth with that user, and do something
# on the cloud. We don't care about the results except that it
# remains authorized.
client = client_for_user(user['name'])
resp, body = client.servers.list_servers()
self.assertEqual(resp['status'], '200')
def check_objects(self):
"""Check that the objects created are still there."""
if not self.res.get('objects'):
return
LOG.info("checking objects")
for obj in self.res['objects']:
client = client_for_user(obj['owner'])
r, contents = client.objects.get_object(
obj['container'], obj['name'])
source = _file_contents(obj['file'])
self.assertEqual(contents, source)
def check_servers(self):
"""Check that the servers are still up and running."""
if not self.res.get('servers'):
return
LOG.info("checking servers")
for server in self.res['servers']:
client = client_for_user(server['owner'])
found = _get_server_by_name(client, server['name'])
self.assertIsNotNone(
found,
"Couldn't find expected server %s" % server['name'])
r, found = client.servers.get_server(found['id'])
# get the ipv4 address
addr = found['addresses']['private'][0]['addr']
for count in range(60):
return_code = os.system("ping -c1 " + addr)
if return_code is 0:
break
self.assertNotEqual(count, 59,
"Server %s is not pingable at %s" % (
server['name'], addr))
def check_telemetry(self):
"""Check that ceilometer provides a sane sample.
Confirm that there are more than one sample and that they have the
expected metadata.
If in check mode confirm that the oldest sample available is from
before the upgrade.
"""
if not self.res.get('telemetry'):
return
LOG.info("checking telemetry")
for server in self.res['servers']:
client = client_for_user(server['owner'])
response, body = client.telemetry.list_samples(
'instance',
query=('metadata.display_name', 'eq', server['name'])
)
self.assertEqual(response.status, 200)
self.assertTrue(len(body) >= 1, 'expecting at least one sample')
self._confirm_telemetry_sample(server, body[-1])
def check_volumes(self):
"""Check that the volumes are still there and attached."""
if not self.res.get('volumes'):
return
LOG.info("checking volumes")
for volume in self.res['volumes']:
client = client_for_user(volume['owner'])
vol_body = _get_volume_by_name(client, volume['name'])
self.assertIsNotNone(
vol_body,
"Couldn't find expected volume %s" % volume['name'])
# Verify that a volume's attachment retrieved
server_id = _get_server_by_name(client, volume['server'])['id']
attachment = client.volumes.get_attachment_from_volume(vol_body)
self.assertEqual(vol_body['id'], attachment['volume_id'])
self.assertEqual(server_id, attachment['server_id'])
def _confirm_telemetry_sample(self, server, sample):
"""Check this sample matches the expected resource metadata."""
# Confirm display_name
self.assertEqual(server['name'],
sample['resource_metadata']['display_name'])
# Confirm instance_type of flavor
flavor = sample['resource_metadata'].get(
'flavor.name',
sample['resource_metadata'].get('instance_type')
)
self.assertEqual(server['flavor'], flavor)
# Confirm the oldest sample was created before upgrade.
if OPTS.mode == 'check':
oldest_timestamp = timeutils.normalize_time(
timeutils.parse_isotime(sample['timestamp']))
self.assertTrue(
oldest_timestamp < JAVELIN_START,
'timestamp should come before start of second javelin run'
)
#######################
#
# OBJECTS
#
#######################
def _file_contents(fname):
with open(fname, 'r') as f:
return f.read()
def create_objects(objects):
if not objects:
return
LOG.info("Creating objects")
for obj in objects:
LOG.debug("Object %s" % obj)
_assign_swift_role(obj['owner'])
client = client_for_user(obj['owner'])
client.containers.create_container(obj['container'])
client.objects.create_object(
obj['container'], obj['name'],
_file_contents(obj['file']))
def destroy_objects(objects):
for obj in objects:
client = client_for_user(obj['owner'])
r, body = client.objects.delete_object(obj['container'], obj['name'])
if not (200 <= int(r['status']) < 299):
raise ValueError("unable to destroy object: [%s] %s" % (r, body))
#######################
#
# IMAGES
#
#######################
def _resolve_image(image, imgtype):
name = image[imgtype]
fname = os.path.join(OPTS.devstack_base, image['imgdir'], name)
return name, fname
def _get_image_by_name(client, name):
r, body = client.images.image_list()
for image in body:
if name == image['name']:
return image
return None
def create_images(images):
if not images:
return
LOG.info("Creating images")
for image in images:
client = client_for_user(image['owner'])
# only upload a new image if the name isn't there
if _get_image_by_name(client, image['name']):
LOG.info("Image '%s' already exists" % image['name'])
continue
# special handling for 3 part image
extras = {}
if image['format'] == 'ami':
name, fname = _resolve_image(image, 'aki')
r, aki = client.images.create_image(
'javelin_' + name, 'aki', 'aki')
client.images.store_image(aki.get('id'), open(fname, 'r'))
extras['kernel_id'] = aki.get('id')
name, fname = _resolve_image(image, 'ari')
r, ari = client.images.create_image(
'javelin_' + name, 'ari', 'ari')
client.images.store_image(ari.get('id'), open(fname, 'r'))
extras['ramdisk_id'] = ari.get('id')
_, fname = _resolve_image(image, 'file')
r, body = client.images.create_image(
image['name'], image['format'], image['format'], **extras)
image_id = body.get('id')
client.images.store_image(image_id, open(fname, 'r'))
def destroy_images(images):
if not images:
return
LOG.info("Destroying images")
for image in images:
client = client_for_user(image['owner'])
response = _get_image_by_name(client, image['name'])
if not response:
LOG.info("Image '%s' does not exists" % image['name'])
continue
client.images.delete_image(response['id'])
#######################
#
# SERVERS
#
#######################
def _get_server_by_name(client, name):
r, body = client.servers.list_servers()
for server in body['servers']:
if name == server['name']:
return server
return None
def _get_flavor_by_name(client, name):
r, body = client.flavors.list_flavors()
for flavor in body:
if name == flavor['name']:
return flavor
return None
def create_servers(servers):
if not servers:
return
LOG.info("Creating servers")
for server in servers:
client = client_for_user(server['owner'])
if _get_server_by_name(client, server['name']):
LOG.info("Server '%s' already exists" % server['name'])
continue
image_id = _get_image_by_name(client, server['image'])['id']
flavor_id = _get_flavor_by_name(client, server['flavor'])['id']
resp, body = client.servers.create_server(server['name'], image_id,
flavor_id)
server_id = body['id']
client.servers.wait_for_server_status(server_id, 'ACTIVE')
def destroy_servers(servers):
if not servers:
return
LOG.info("Destroying servers")
for server in servers:
client = client_for_user(server['owner'])
response = _get_server_by_name(client, server['name'])
if not response:
LOG.info("Server '%s' does not exist" % server['name'])
continue
client.servers.delete_server(response['id'])
client.servers.wait_for_server_termination(response['id'],
ignore_error=True)
#######################
#
# VOLUMES
#
#######################
def _get_volume_by_name(client, name):
r, body = client.volumes.list_volumes()
for volume in body:
if name == volume['display_name']:
return volume
return None
def create_volumes(volumes):
if not volumes:
return
LOG.info("Creating volumes")
for volume in volumes:
client = client_for_user(volume['owner'])
# only create a volume if the name isn't here
if _get_volume_by_name(client, volume['name']):
LOG.info("volume '%s' already exists" % volume['name'])
continue
size = volume['gb']
v_name = volume['name']
resp, body = client.volumes.create_volume(size=size,
display_name=v_name)
client.volumes.wait_for_volume_status(body['id'], 'available')
def destroy_volumes(volumes):
for volume in volumes:
client = client_for_user(volume['owner'])
volume_id = _get_volume_by_name(client, volume['name'])['id']
client.volumes.detach_volume(volume_id)
client.volumes.delete_volume(volume_id)
def attach_volumes(volumes):
for volume in volumes:
client = client_for_user(volume['owner'])
server_id = _get_server_by_name(client, volume['server'])['id']
volume_id = _get_volume_by_name(client, volume['name'])['id']
device = volume['device']
client.volumes.attach_volume(volume_id, server_id, device)
#######################
#
# MAIN LOGIC
#
#######################
def create_resources():
LOG.info("Creating Resources")
# first create keystone level resources, and we need to be admin
# for those.
create_tenants(RES['tenants'])
create_users(RES['users'])
collect_users(RES['users'])
# next create resources in a well known order
create_objects(RES['objects'])
create_images(RES['images'])
create_servers(RES['servers'])
create_volumes(RES['volumes'])
attach_volumes(RES['volumes'])
def destroy_resources():
LOG.info("Destroying Resources")
# Destroy in inverse order of create
destroy_servers(RES['servers'])
destroy_images(RES['images'])
destroy_objects(RES['objects'])
destroy_volumes(RES['volumes'])
destroy_users(RES['users'])
destroy_tenants(RES['tenants'])
LOG.warn("Destroy mode incomplete")
def get_options():
global OPTS
parser = argparse.ArgumentParser(
description='Create and validate a fixed set of OpenStack resources')
parser.add_argument('-m', '--mode',
metavar='<create|check|destroy>',
required=True,
help=('One of (create, check, destroy)'))
parser.add_argument('-r', '--resources',
required=True,
metavar='resourcefile.yaml',
help='Resources definition yaml file')
parser.add_argument(
'-d', '--devstack-base',
required=True,
metavar='/opt/stack/old',
help='Devstack base directory for retrieving artifacts')
parser.add_argument(
'-c', '--config-file',
metavar='/etc/tempest.conf',
help='path to javelin2(tempest) config file')
# auth bits, letting us also just source the devstack openrc
parser.add_argument('--os-username',
metavar='<auth-user-name>',
default=os.environ.get('OS_USERNAME'),
help=('Defaults to env[OS_USERNAME].'))
parser.add_argument('--os-password',
metavar='<auth-password>',
default=os.environ.get('OS_PASSWORD'),
help=('Defaults to env[OS_PASSWORD].'))
parser.add_argument('--os-tenant-name',
metavar='<auth-tenant-name>',
default=os.environ.get('OS_TENANT_NAME'),
help=('Defaults to env[OS_TENANT_NAME].'))
OPTS = parser.parse_args()
if OPTS.mode not in ('create', 'check', 'destroy'):
print("ERROR: Unknown mode -m %s\n" % OPTS.mode)
parser.print_help()
sys.exit(1)
if OPTS.config_file:
config.CONF.set_config_path(OPTS.config_file)
def setup_logging():
global LOG
logging.setup(__name__)
LOG = logging.getLogger(__name__)
def main():
global RES
get_options()
setup_logging()
RES.update(load_resources(OPTS.resources))
if OPTS.mode == 'create':
create_resources()
# Make sure the resources we just created actually work
checker = JavelinCheck(USERS, RES)
checker.check()
elif OPTS.mode == 'check':
collect_users(RES['users'])
checker = JavelinCheck(USERS, RES)
checker.check()
elif OPTS.mode == 'destroy':
collect_users(RES['users'])
destroy_resources()
else:
LOG.error('Unknown mode %s' % OPTS.mode)
return 1
LOG.info('javelin2 successfully finished')
return 0
if __name__ == "__main__":
sys.exit(main())
| nikolay-fedotov/tempest | tempest/cmd/javelin.py | Python | apache-2.0 | 21,190 |
from setuptools import setup, Extension
from Cython.Distutils import build_ext
import numpy
import os
catchmod = Extension('pycatchmod._catchmod', ['pycatchmod/_catchmod.pyx'],
include_dirs=[numpy.get_include()])
weather_generator = Extension('pycatchmod._weather_generator', ['pycatchmod/_weather_generator.pyx'],
include_dirs=[numpy.get_include()])
with open(os.path.join(os.path.dirname(__file__), "pycatchmod", "__init__.py"), "r") as f:
for line in f:
if line.startswith("__version__"):
_, version, _ = line.split("\"")
break
# needed to compile
setup_requires = [
"cython", "numpy", "setuptools_scm"
]
# needed to run
install_requires = [
"numpy", "pandas", "click", "tables", "xlrd", "scipy", "future", "matplotlib"
]
# only needed for testing
test_requires = [
"pytest"
]
with open('README.rst') as fh:
long_description = fh.read()
setup(
name='pycatchmod',
description='Python implementation of the rainfall runoff model CATCHMOD.',
long_description= long_description,
long_description_content_type='text/x-rst',
author='James E Tomlinson',
author_email='[email protected]',
url="https://github.com/pywr/pycatchmod",
packages=['pycatchmod', "pycatchmod.io"],
install_requires=install_requires,
use_scm_version=True,
setup_requires=setup_requires,
tests_require=test_requires,
ext_modules=[catchmod, weather_generator],
cmdclass = {'build_ext': build_ext},
entry_points={
"console_scripts": [
"pycatchmod = pycatchmod.__main__:main"
]
}
)
| jetuk/pycatchmod | setup.py | Python | gpl-3.0 | 1,639 |
import os
import platform
import sys
import threading
from concurrent.futures import ThreadPoolExecutor
from os import environ, path
from threading import Timer
import grpc
import ptvsd
from getgauge import handlers, logger, processor
from getgauge.impl_loader import copy_skel_files
from getgauge.messages import runner_pb2_grpc
from getgauge.static_loader import load_files
from getgauge.util import get_step_impl_dirs
PLUGIN_JSON = 'python.json'
VERSION = 'version'
ATTACH_DEBUGGER_EVENT = 'Runner Ready for Debugging'
def main():
logger.info("Python: {}".format(platform.python_version()))
if sys.argv[1] == "--init":
logger.debug("Initilizing gauge project.")
copy_skel_files()
else:
load_implementations()
start()
def load_implementations():
d = get_step_impl_dirs()
logger.debug(
"Loading step implemetations from {} dirs.".format(', '.join(d)))
for impl_dir in d:
if not path.exists(impl_dir):
logger.error('can not load implementations from {}. {} does not exist.'.format(
impl_dir, impl_dir))
load_files(d)
def _handle_detached():
logger.info("No debugger attached. Stopping the execution.")
os._exit(1)
def start():
if environ.get('DEBUGGING'):
ptvsd.enable_attach(address=(
'127.0.0.1', int(environ.get('DEBUG_PORT'))))
print(ATTACH_DEBUGGER_EVENT)
t = Timer(int(environ.get("debugger_wait_time", 30)), _handle_detached)
t.start()
ptvsd.wait_for_attach()
t.cancel()
logger.debug('Starting grpc server..')
server = grpc.server(ThreadPoolExecutor(max_workers=1))
p = server.add_insecure_port('127.0.0.1:0')
handler = handlers.RunnerServiceHandler(server)
runner_pb2_grpc.add_RunnerServicer_to_server(handler, server)
logger.info('Listening on port:{}'.format(p))
server.start()
t = threading.Thread(
name="listener", target=handler.wait_for_kill_event)
t.start()
t.join()
os._exit(0)
if __name__ == '__main__':
main()
| kashishm/gauge-python | start.py | Python | mit | 2,076 |
from django.conf import settings
from django.core import management
from django.core.management.base import BaseCommand
from fabric.api import local
from optparse import make_option
class Command(BaseCommand):
help = "Redeploy the current instance"
option_list = BaseCommand.option_list + (
make_option('--servername', action='store', type='string',
dest='servername', default='staging',
help='Server name - to pick which configuration to use'),
make_option('--noreqs', action='store_true', dest='noreqs',
default=False, help='Don\'t update requirements'),
)
def handle(self, *args, **options):
server_name = options.get('servername')
noreqs = options.get('noreqs')
if not server_name in settings.DEPLOYS:
self.stderr.write('Non existant server config')
return
server = settings.DEPLOYS[server_name]
if not noreqs:
local('pip install -r requirements.txt')
management.call_command('syncdb',
interactive=False, stdout=self.stdout)
management.call_command('migrate',
interactive=False, stdout=self.stdout)
management.call_command('collectstatic',
interactive=False, stdout=self.stdout)
# Reload the app in the webserver
ws = server['WEBSERVER']
if ws == 'apache':
local('touch {0}'.format(server['WSGI_FILE'])) # reload wsgi
elif ws == 'gunicorn':
local('sudo kill -HUP `cat {0}`'.format(server['PID_FILE']))
else:
self.stderr.write('Unknown webserver type!')
| yprez/django-sitepush | sitepush/management/commands/deploy_local.py | Python | isc | 1,686 |
from motherbrain.base import models
from motherbrain.base.models.processors import Compose, transform, compose
from motherbrain.models import helpers, legacy_helpers, url
def _type(settings):
order = settings.get('order')
_map = {'chart': ['love', 'numeric'],
'draft': ['recent', 'author'],
'reference': ['color']}
for k, v in _map.iteritems():
if order in v:
return k
return 'draft'
def _comments(data):
if not isinstance(data, list):
return []
def _fmt_time(time_obj):
t = transform.MongoDateTimeToStr()
return t(time_obj)
return [dict(x, **{'created_at': _fmt_time(x.get('created_at'))})
for x in data]
def _followers(followers):
if not followers:
return []
return [dict(user_id=str(x)) for x in followers]
class Model(models.Model):
fields = (
('_id', None),
('list_hash', transform.DoNothing),
('description', transform.DoNothing),
('hash', transform.DoNothing),
('title', transform.DoNothing),
('views_amount', transform.DoNothing),
('is_secret', transform.DoNothing),
('user_id', transform.MongoOidToStr),
('creation_time', transform.MongoDateTimeToStr),
('update_time', transform.MongoDateTimeToStr),
('urls', url.Collection),
('comments', transform.DoNothing),
('comments', Compose('comments', using=_comments)),
('contributors', transform.DoNothing),
('full_url', Compose('hash', using=helpers.full_url)),
('is_unlisted', transform.DoNothing),
('last_visit', [Compose('hash', using=helpers.last_visit),
transform.MongoDateTimeToStr]),
('sections', transform.DoNothing),
('followers', Compose('followers', using=_followers)),
('followers_amount', Compose('followers', using=compose.count)),
('following', Compose('followers', 'contributors', using=helpers.is_favorited)),
('links_amount', Compose('urls', using=compose.count)),
('relist_amount', Compose('hash', using=helpers.list_relist_amount)),
('categories', transform.DoNothing),
('type', Compose('settings', using=_type, override=False)),
('slug', transform.DoNothing)
)
def __init__(self, data):
super(Model, self).__init__(self.fields, data)
def __len__(self):
return len(self.get('urls'))
def __repr__(self):
return u'List {}, {}'.format(self.get('hash'),
self.get('title'))
class Collection(models.Collection):
def __init__(self, models):
super(Collection, self).__init__(Model, models)
| urlist/urlist | motherbrain/models/urlist.py | Python | gpl-3.0 | 3,009 |
# Copyright 2013 New Dream Network, LLC (DreamHost)
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import sys
import eventlet
eventlet.monkey_patch()
from oslo.config import cfg
from neutron.agent.common import config
from neutron.agent.linux import interface
from neutron.common import config as common_config
from neutron.common import rpc as n_rpc
from neutron.common import topics
from neutron.openstack.common import service
from neutron_lbaas.services.loadbalancer.agent import agent_manager as manager
OPTS = [
cfg.IntOpt(
'periodic_interval',
default=10,
help=_('Seconds between periodic task runs')
)
]
class LbaasAgentService(n_rpc.Service):
def start(self):
super(LbaasAgentService, self).start()
self.tg.add_timer(
cfg.CONF.periodic_interval,
self.manager.run_periodic_tasks,
None,
None
)
def main():
cfg.CONF.register_opts(OPTS)
cfg.CONF.register_opts(manager.OPTS)
# import interface options just in case the driver uses namespaces
cfg.CONF.register_opts(interface.OPTS)
config.register_interface_driver_opts_helper(cfg.CONF)
config.register_agent_state_opts_helper(cfg.CONF)
config.register_root_helper(cfg.CONF)
common_config.init(sys.argv[1:])
config.setup_logging()
mgr = manager.LbaasAgentManager(cfg.CONF)
svc = LbaasAgentService(
host=cfg.CONF.host,
topic=topics.LOADBALANCER_AGENT,
manager=mgr
)
service.launch(svc).wait()
| citrix-openstack-build/neutron-lbaas | neutron_lbaas/services/loadbalancer/agent/agent.py | Python | apache-2.0 | 2,067 |
#!/usr/bin/env python
import os
import yaml
import logging
import pymysql
from connections import Connections
__dir__ = os.path.dirname(__file__)
config = yaml.safe_load(open(os.path.join(__dir__, "../default_config.yaml")))
try:
config.update(yaml.safe_load(open(os.path.join(__dir__, "../config.yaml"))))
except IOError:
# is ok if we do not have config.yaml
pass
logging.basicConfig(
filename=config['KILLER_LOG_PATH'],
level=logging.INFO,
format='%(asctime)s pid:%(process)d %(message)s'
)
logging.info("Started killer process, with limit %s", config['QUERY_TIME_LIMIT'])
conn = Connections(config)
cur = conn.replica.cursor()
try:
cur.execute('SHOW PROCESSLIST')
queries = cur.fetchall()
logging.info("Found %s queries running", len(queries))
to_kill = [q for q in queries
if q[5] > config['QUERY_TIME_LIMIT'] and q[4] != 'Sleep']
logging.info("Found %s queries to kill", len(to_kill))
for q in to_kill:
try:
cur.execute('KILL QUERY %s', q[0])
logging.info("Killed query with thread_id:%s" % q[0])
except pymysql.InternalError as e:
if e.args[0] == 1094: # Error code for 'no such thread'
logging.info('Query with thread_id:%s dead before it could be killed')
else:
raise
finally:
logging.info("Finished killer process")
cur.close()
conn.close_all()
| wikimedia/analytics-quarry-web | quarry/web/killer.py | Python | mit | 1,430 |
"""
WSGI config for todoapp project.
It exposes the WSGI callable as a module-level variable named ``application``.
For more information on this file, see
https://docs.djangoproject.com/en/1.9/howto/deployment/wsgi/
"""
import os
from django.core.wsgi import get_wsgi_application
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "todoapp.settings")
application = get_wsgi_application()
| erdem/django-selenium-example | todoapp/todoapp/wsgi.py | Python | mit | 391 |
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Fakechroot(AutotoolsPackage):
"""fakechroot runs a command in an environment were is additional
possibility to use chroot(8) command without root privileges.This
is useful for allowing users to create own chrooted environment
with possibility to install another packages without need for root
privileges."""
homepage = "https://github.com/dex4er/fakechroot"
url = "https://github.com/dex4er/fakechroot/releases/download/2.20.1/fakechroot-2.20.1.tar.gz"
version('2.20.1', sha256='5abd04323c9ddae06b5dcaa56b2da07728de3fe21007b08bd88a17b2409b32aa')
version('2.20', sha256='5da99358d2a49ddd3dd54ba2ff401d93a8fa641e3754cd058bdf53adb4b7e100')
version('2.19', sha256='39ffbbbe3a823be7450928b8e3b99ae4cb339c47213b2f1d8ff903e0246f2e15')
| LLNL/spack | var/spack/repos/builtin/packages/fakechroot/package.py | Python | lgpl-2.1 | 1,005 |
import branca.element as be
# currently using the static info page ava_info_static.html
class SatSkredInfoPage():
def __init__(self):
self.f = be.Figure()
def make_header(self):
pass
def add_map(self, map_name="./ava_map.html", h="600px"):
self.f.html.add_child(be.Element("<H2>Kart over detekterte skred</H2>"))
# self.f.html.add_child(be.Element('''<iframe src="./ava_map.html:text/html;charset=utf-8;base64,CiAgICAuL2F2YtYXAuaHRtbA==" width="100%" style="border:none !important;" height="600px"></iframe>''')
self.f.html.add_child(be.Element('<iframe src="./ava_map.html", height="600px"></iframe>')
# self.f.html.add_child(be.IFrame(map_name, height=h))
def save(self, filename="ava_info.html"):
self.f.render()
self.f.save(filename)
if __name__ == "__main__":
page = SatSkredInfoPage()
page.add_map()
page.save() | kmunve/APS | aps/satskred/import_skreddb/make_satskred_infopage.py | Python | mit | 917 |
#!/usr/bin/python
#
# Copyright 2015 Google Inc. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""This code example creates a click-to-download ad in a given ad group.
This type of ad is also known as an app promotion ad. To list ad groups, run
get_ad_groups.py.
The LoadFromStorage method is pulling credentials and properties from a
"googleads.yaml" file. By default, it looks for this file in your home
directory. For more information, see the "Caching authentication information"
section of our README.
Tags: AdGroupAdService.mutate
Api: AdWordsOnly
"""
__author__ = 'Joseph DiLallo'
from googleads import adwords
AD_GROUP_ID = 'INSERT_AD_GROUP_ID_HERE'
def main(client, ad_group_id):
# Initialize appropriate service.
ad_group_ad_service = client.GetService('AdGroupAdService', version='v201506')
# Create the template elements for the ad. You can refer to
# https://developers.google.com/adwords/api/docs/appendix/templateads
# for the list of available template fields.
ad_data = {
'uniqueName': 'adData',
'fields': [
{
'name': 'headline',
'fieldText': 'Enjoy your drive in Mars',
'type': 'TEXT'
},
{
'name': 'description1',
'fieldText': 'Realistic physics simulation',
'type': 'TEXT'
},
{
'name': 'description2',
'fieldText': 'Race against players online',
'type': 'TEXT'
},
{
'name': 'appId',
'fieldText': 'com.example.demogame',
'type': 'TEXT'
},
{
'name': 'appStore',
'fieldText': '2',
'type': 'ENUM'
}
]
}
# Create click to download ad.
click_to_download_app_ad = {
'xsi_type': 'TemplateAd',
'name': 'Ad for demo game',
'templateId': '353',
'finalUrls': [
'http://play.google.com/store/apps/details?id=com.example.demogame'
],
'displayUrl': 'play.google.com',
'templateElements': [ad_data]
}
# Create ad group ad.
ad_group_ad = {
'adGroupId': ad_group_id,
'ad': click_to_download_app_ad,
# Optional.
'status': 'PAUSED'
}
# Add ad.
ads = ad_group_ad_service.mutate([
{'operator': 'ADD', 'operand': ad_group_ad}
])
# Display results.
if 'value' in ads:
for ad in ads['value']:
print ('Added new click-to-download ad to ad group ID \'%s\' '
'with URL \'%s\'.' % (ad['ad']['id'], ad['ad']['finalUrls'][0]))
else:
print 'No ads were added.'
if __name__ == '__main__':
# Initialize client object.
adwords_client = adwords.AdWordsClient.LoadFromStorage()
main(adwords_client, AD_GROUP_ID)
| coxmediagroup/googleads-python-lib | examples/adwords/v201506/advanced_operations/add_click_to_download_ad.py | Python | apache-2.0 | 3,318 |
"""
Result backend for Easy-Job which use python logging
available options to use
* log_level: level of logging could be logging.DEBUG logging.ERROR or any other log level defined in logging module
default: logging.DEBUG
* logger: name of the logger to use, be cautious , the logger must already defined
default: no default value available, you must specify either logger or logger_instance
* logger_instance: for testing purposes you can provide the logger object itself
default: None
message_template: string template to log function and result , you can use {task_id} and {result} in your template
default: "{task_id} -> {result}"
"""
from . import BaseResultBackend
import logging
__author__ = 'Apollo'
class LogResultBackend(BaseResultBackend):
def __init__(self, log_level=logging.DEBUG, logger=None, logger_instance=None, message_template=None):
if logger_instance is not None:
self.logger = logger_instance
elif logger is not None:
self.logger = logging.getLogger(logger)
else:
raise ValueError("No logger or logger_instance specified")
self.log_level = log_level
self.message_template = message_template or "{task_id} -> {result}"
def store(self, task_id, result, *args, **kwargs):
self.logger.log(
level=self.log_level,
msg=self.message_template.format(
task_id=task_id,
result=result
)
)
| inb-co/easy-job | easy_job/result_backends/log.py | Python | mit | 1,521 |
from django import forms
from crispy_forms.helper import FormHelper, Layout
from crispy_forms.layout import Field, Div, Row, HTML
from crispy_forms.bootstrap import FormActions, TabHolder, Tab
# from ..models import Pais
from django.utils.translation import ugettext_lazy as _
from apps.utils.forms import smtSave, btnCancel, btnReset
from django.utils.text import capfirst, get_text_list
from unicodedata import normalize
"""
class PaisForm(forms.ModelForm):
class Meta:
model = Pais
exclude = ('usuario',)
widgets = {
'codigo': forms.TextInput(attrs={'class': 'form-control', 'required':'true', 'placeholder': 'Ingrese codigo'}),
'nombre': forms.TextInput(attrs={'class': 'form-control', 'required':'true', 'placeholder': 'Ingrese nombre'})
'departamento': forms.Select(attrs={'class': 'form-control', 'required':'true'})
""" | upeu-jul-20161-epis-ads2/MedicFast | apps/atencion/forms/PaisForm.py | Python | bsd-3-clause | 894 |
from subprocess import Popen, PIPE
from tempfile import NamedTemporaryFile
class Statistic(object):
#TODO: move to a FileStatistic or something like that b/c
# it only deals with files and there are other stats which don't
def set_files(self, files):
self.files = files
#can be overriden by individual stats
def set_config(self, conf):
pass
#TODO: move to some utils class
def get_result_from_shell(self, cmd):
p = Popen(cmd, shell=True, stdout=PIPE)
(stdout_data, stderr_data) = p.communicate()
return stdout_data
#TODO: move to some utils class
def write_filenames_to_temp_file(self):
filestr = "\n".join(self.files)
f = NamedTemporaryFile(mode='w', delete=False)
f.write(filestr)
f.close()
return f.name
| markdrago/caboose | src/stats/statistic.py | Python | mit | 832 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2010 United States Government as represented by the
# Administrator of the National Aeronautics and Space Administration.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""
Base classes for view based unit tests.
"""
import mox
import nova_adminclient as adminclient
from django import test
from django.conf import settings
from django.contrib.auth import models as auth_models
from django_nova import manager
from django_nova import shortcuts
TEST_PROJECT = 'test'
TEST_USER = 'test'
TEST_REGION = 'test'
class BaseViewTests(test.TestCase):
def setUp(self):
self.mox = mox.Mox()
def tearDown(self):
self.mox.UnsetStubs()
def assertRedirectsNoFollow(self, response, expected_url):
self.assertEqual(response._headers['location'],
('Location', settings.TESTSERVER + expected_url))
self.assertEqual(response.status_code, 302)
def authenticateTestUser(self):
user = auth_models.User.objects.create_user(TEST_USER,
'[email protected]',
password='test')
login = self.client.login(username=TEST_USER, password='test')
self.failUnless(login, 'Unable to login')
return user
class BaseProjectViewTests(BaseViewTests):
def setUp(self):
super(BaseProjectViewTests, self).setUp()
project = adminclient.ProjectInfo()
project.projectname = TEST_PROJECT
project.projectManagerId = TEST_USER
self.user = self.authenticateTestUser()
self.region = adminclient.RegionInfo(name=TEST_REGION,
endpoint='http://test:8773/')
self.project = manager.ProjectManager(self.user.username,
project,
self.region)
self.mox.StubOutWithMock(shortcuts, 'get_project_or_404')
shortcuts.get_project_or_404(mox.IgnoreArg(),
'test').AndReturn(self.project)
def create_key_pair_choices(self, key_names):
return [(k, k) for k in key_names]
def create_instance_type_choices(self):
return [('m1.medium', 'm1.medium'),
('m1.large', 'm1.large')]
def create_instance_choices(self, instance_ids):
return [(id, id) for id in instance_ids]
def create_available_volume_choices(self, volumes):
return [(v.id, '%s %s - %dGB' % (v.id, v.displayName, v.size)) \
for v in volumes]
| sleepsonthefloor/openstack-dashboard | django-nova/src/django_nova/tests/view_tests/base.py | Python | apache-2.0 | 3,195 |
#!/usr/bin/env python
#
# Generate seeds.txt from Pieter's DNS seeder
#
NSEEDS=512
MAX_SEEDS_PER_ASN=2
MIN_BLOCKS = 200000
# These are hosts that have been observed to be behaving strangely (e.g.
# aggressively connecting to every node).
SUSPICIOUS_HOSTS = set([
"127.0.0.1" - наш ип основной ноды
])
import re
import sys
import dns.resolver
PATTERN_IPV4 = re.compile(r"^((\d{1,3})\.(\d{1,3})\.(\d{1,3})\.(\d{1,3})):9887$") тут изменяем порт
PATTERN_AGENT = re.compile(r"^(\/Satoshi:0.8.6\/|\/Satoshi:0.9.(2|3)\/|\/Core:0.1(0|1|2).\d{1,2}.\d{1,2}\/)$")
def parseline(line):
sline = line.split()
if len(sline) < 11:
return None
# Match only IPv4
m = PATTERN_IPV4.match(sline[0])
if m is None:
return None
# Do IPv4 sanity check
ip = 0
for i in range(0,4):
if int(m.group(i+2)) < 0 or int(m.group(i+2)) > 255:
return None
ip = ip + (int(m.group(i+2)) << (8*(3-i)))
if ip == 0:
return None
# Skip bad results.
if sline[1] == 0:
return None
# Extract uptime %.
uptime30 = float(sline[7][:-1])
# Extract Unix timestamp of last success.
lastsuccess = int(sline[2])
# Extract protocol version.
version = int(sline[10])
# Extract user agent.
agent = sline[11][1:-1]
# Extract service flags.
service = int(sline[9], 16)
# Extract blocks.
blocks = int(sline[8])
# Construct result.
return {
'ip': m.group(1),
'ipnum': ip,
'uptime': uptime30,
'lastsuccess': lastsuccess,
'version': version,
'agent': agent,
'service': service,
'blocks': blocks,
}
# Based on Greg Maxwell's seed_filter.py
def filterbyasn(ips, max_per_asn, max_total):
result = []
asn_count = {}
for ip in ips:
if len(result) == max_total:
break
try:
asn = int([x.to_text() for x in dns.resolver.query('.'.join(reversed(ip['ip'].split('.'))) + '.origin.asn.cymru.com', 'TXT').response.answer][0].split('\"')[1].split(' ')[0])
if asn not in asn_count:
asn_count[asn] = 0
if asn_count[asn] == max_per_asn:
continue
asn_count[asn] += 1
result.append(ip)
except:
sys.stderr.write('ERR: Could not resolve ASN for "' + ip['ip'] + '"\n')
return result
def main():
lines = sys.stdin.readlines()
ips = [parseline(line) for line in lines]
# Skip entries with valid IPv4 address.
ips = [ip for ip in ips if ip is not None]
# Skip entries from suspicious hosts.
ips = [ip for ip in ips if ip['ip'] not in SUSPICIOUS_HOSTS]
# Enforce minimal number of blocks.
ips = [ip for ip in ips if ip['blocks'] >= MIN_BLOCKS]
# Require service bit 1.
ips = [ip for ip in ips if (ip['service'] & 1) == 1]
# Require at least 50% 30-day uptime.
ips = [ip for ip in ips if ip['uptime'] > 50]
# Require a known and recent user agent.
ips = [ip for ip in ips if PATTERN_AGENT.match(ip['agent'])]
# Sort by availability (and use last success as tie breaker)
ips.sort(key=lambda x: (x['uptime'], x['lastsuccess'], x['ip']), reverse=True)
# Look up ASNs and limit results, both per ASN and globally.
ips = filterbyasn(ips, MAX_SEEDS_PER_ASN, NSEEDS)
# Sort the results by IP address (for deterministic output).
ips.sort(key=lambda x: (x['ipnum']))
for ip in ips:
print ip['ip']
if __name__ == '__main__':
main()
| freelion93/mtucicoin | contrib/seeds/makeseeds.py | Python | mit | 3,563 |
# Created By: Virgil Dupras
# Created On: 2008-07-21
# Copyright 2015 Hardcoded Software (http://www.hardcoded.net)
#
# This software is licensed under the "GPLv3" License as described in the "LICENSE" file,
# which should be included with this package. The terms are also available at
# http://www.gnu.org/licenses/gpl-3.0.html
from hscommon.gui.text_field import TextField
class SearchField(TextField):
def __init__(self, mainwindow):
TextField.__init__(self)
self.document = mainwindow.document
def _update(self, newvalue):
self.document.filter_string = newvalue
def refresh(self):
self._text = self._value = self.document.filter_string
self.view.refresh()
| fokusov/moneyguru | core/gui/search_field.py | Python | gpl-3.0 | 729 |
# -*- coding: utf-8 -*-
#
# Copyright (C) 2000-2005 by Yasushi Saito ([email protected])
#
# Jockey is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by the
# Free Software Foundation; either version 2, or (at your option) any
# later version.
#
# Jockey is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or
# FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License
# for more details.
#
class T(object):
def get_canvas_pos(self, size, val, min, max):
"""
Compute the screen location at which a sample value would be drawn.
``size`` is the width or height of the chart, in points.
``val`` is the sample value.
``min`` and ``max`` are the minimum and maximum sample values that
are to be displayed over the length of ``size``.
For example, suppose the width of a chart is 200 points and the
minimum and maximum X values in the sample data are 100 and 150
respectively. When Pychart wants to draw a sample point at the X
value of 120, it will call
area.T.x_coord.get_canvas_pos(size = 200, val = 120, min = 100, max = 150).
"""
raise Exception
def get_tics(self, min, max, interval):
"""Generate the list of places for drawing tick marks."""
raise Exception
def get_min_max(self, min, max, interval):
"""Compute the min/max values to be displayed in the chart.
Parameters ``min`` and ``max`` are the minimum and maximum values
of the sample data passed to the plots. Parameter ``interval`` is
the value of attribute area.T.x_grid_interval (or y_grid_interval).
It is None if these attributes are non-specified.
This method must return tuple (dmin, dmax, dinterval).
dmin should be ``min`` rounded down to some good number.
dmax should be ``max`` rounded up to some good number.
dinterval should be ``interval`` if it is non-None. Otherwise, the
method must compute some good value.
"""
raise Exception
| BorgERP/borg-erp-6of3 | server/openerp/pychart/coord.py | Python | agpl-3.0 | 2,226 |
###########################################################
#
# Copyright (c) 2009, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
__all__ = ["ProjectConfigWdg", "UserConfigWdg", "UserPanelWdg"]
from pyasm.common import Common, Environment
from pyasm.search import Search, SearchKey, SearchType
from pyasm.biz import Project
from pyasm.web import DivWdg, Table, WebContainer, SpanWdg
from pyasm.widget import ThumbWdg, IconWdg, CheckboxWdg
from tactic.ui.container import SmartMenu
from tactic.ui.common import BaseRefreshWdg
from tactic.ui.widget import IconButtonWdg, SingleButtonWdg, ActionButtonWdg
class ProjectConfigWdg(BaseRefreshWdg):
ARGS_KEYS = {
}
def get_help_alias(my):
return 'project-startup-configuration'
def get_panel_wdg(my, td, panel):
title = panel.get("title")
widget = panel.get("widget")
width = panel.get("width")
#height = panel.get("height")
#if not height:
# height = "250px"
#td.add_style("height: %s" % height)
if width:
td.add_style("width: %s" % width)
td.add_border()
div = DivWdg()
div.add_style("padding: 5px")
#div.add_style("padding: 10px")
title_wdg = DivWdg()
div.add(title_wdg)
title_wdg.add_style("padding: 5px")
#title_wdg.add_style("margin: -12px -12px 10px -12px")
title_wdg.add_style("margin: -6px -7px 5px -7px")
title_wdg.add_style("font-weight: bold")
title_wdg.add_style("font-size: 14px")
if title:
title_wdg.add_color("background", "background", -5)
title_wdg.add_color("color", "color", -10)
title_wdg.add_border()
title_wdg.add(title)
from tactic.ui.app import HelpButtonWdg
help_wdg = HelpButtonWdg(alias=my.get_help_alias())
help_wdg.add_style("float: right")
help_wdg.add_style("margin-top: -5px")
title_wdg.add(help_wdg)
else:
title_wdg.add_style("height: 10px")
if widget:
div.add(widget)
return div
def get_panels(my):
panels = []
search_type_panel = DivWdg()
search_type_panel.add_style("padding-top: 3px")
search_type_panel.add_style("overflow-y: auto")
search_type_panel.add( SearchTypePanel() )
search_type_panel.add_style("min-height: 100px")
search_type_panel.add_style("height: 300px")
search_type_panel.add_class("spt_resizable")
panel = {
'widget': search_type_panel,
'title': 'List of Searchable Types (sTypes)',
'width': '50%'
}
panels.append(panel)
from tactic.ui.container import TabWdg
config_xml = '''
<config>
<tab>
<element name="Help">
<display class='tactic.ui.app.HelpContentWideWdg'>
<alias>main</alias>
<width>1000px</width>
</display>
</element>
</tab>
</config>
'''
div = DivWdg()
tab = TabWdg(show_add=False, config_xml=config_xml, tab_offset=5)
div.add(tab)
div.add_style("margin: 0px -6px -6px -6px")
panel = {
'widget': div,
#'title': 'Data',
'title': None,
'width': '100%',
}
panels.append(panel)
return panels
def get_display(my):
# set the sobjects to all the widgets then preprocess
for widget in my.widgets:
widget.set_sobjects(my.sobjects)
widget.set_parent_wdg(my)
# preprocess the elements
widget.preprocess()
top = my.top
my.set_as_panel(top)
inner = DivWdg()
top.add(inner)
inner.add_color("background", "background")
inner.add_color("color", "color")
inner.add_class("spt_dashboard_top")
title = DivWdg()
inner.add(title)
title.add(my.get_title())
title.add_style("font-size: 18px")
title.add_style("font-weight: bold")
title.add_style("text-align: center")
title.add_style("padding: 10px")
#title.add_style("margin: -10px -10px 10px -10px")
title.add_color("background", "background3")
#table = Table()
from tactic.ui.container import ResizableTableWdg
table = ResizableTableWdg()
inner.add(table)
table.set_max_width()
panels = my.get_panels()
for panel in panels:
title = panel.get('title')
#if title in ['Data', None]:
# tr, td = table.add_row_cell()
#else:
# td = table.add_cell()
tr = table.add_row()
td = table.add_cell(resize=False)
td.add_style("min-height: 100px")
td.add_style("vertical-align: top")
panel = my.get_panel_wdg(td, panel)
td.add(panel)
return top
def get_title(my):
return "Project Configuration"
class UserConfigWdg(ProjectConfigWdg):
def get_title(my):
return "Manage Users"
def get_help_alias(my):
return 'project-startup-manage-users'
def get_panels(my):
panels = []
show_security = my.kwargs.get("show_security")
user_panel = DivWdg()
user_panel.add_style("padding-top: 3px")
user_panel.add_style("overflow-y: auto")
user_panel.add( UserPanelWdg(show_security=show_security) )
user_panel.add_style("min-height: 100px")
user_panel.add_style("height: 300px")
user_panel.add_class("spt_resizable")
panel = {
'widget': user_panel,
'title': 'List of Users',
}
panels.append(panel)
from tactic.ui.container import TabWdg
config_xml = []
config_xml.append('''
<config>
<tab>
''')
"""
<element name="Help">
<display class='tactic.ui.app.HelpContentWideWdg'>
<alias>main</alias>
<width>1000px</width>
</display>
</element>''')
"""
config_xml.append('''
<element name="Groups">
<display class='tactic.ui.startup.UserSecurityWdg'/>
</element>
''')
config_xml.append('''
</tab>
</config>
''')
config_xml = "\n".join(config_xml)
tab = TabWdg(show_add=False, config_xml=config_xml)
panel = {
'widget': tab,
#'title': 'Data',
'title': None,
'width': '100%',
'height': '100%'
}
panels.append(panel)
return panels
class SearchTypePanel(BaseRefreshWdg):
def get_display(my):
web = WebContainer.get_web()
show_multi_project = web.get_form_value('show_multi_project')
project = Project.get()
search_type_objs = project.get_search_types(include_multi_project=show_multi_project)
top = my.top
top.add_class("spt_panel_stype_list_top")
#top.add_style("min-width: 400px")
#top.add_style("max-width: 1000px")
#top.add_style("width: 100%")
top.center()
button = SingleButtonWdg(title="Advanced Setup", icon=IconWdg.ADVANCED)
top.add(button)
button.add_style("float: right")
button.add_style("margin-top: -8px")
button.add_behavior( {
'type': 'click_up',
'cbjs_action': '''
var class_name = 'tactic.ui.app.ProjectStartWdg';
spt.tab.set_main_body_tab()
spt.tab.add_new("project_setup", "Project Setup", class_name)
'''
} )
button = SingleButtonWdg(title="Add", tip="Add New Searchable Type (sType)", icon=IconWdg.ADD)
top.add(button)
button.add_style("float: left")
button.add_style("margin-top: -8px")
button.add_behavior( {
'type': 'click_up',
'cbjs_action': '''
var class_name = 'tactic.ui.app.SearchTypeCreatorWdg';
var kwargs = {
};
var popup = spt.panel.load_popup("Create New Searchable Type", class_name, kwargs);
var top = bvr.src_el.getParent(".spt_panel_stype_list_top");
popup.on_register_cbk = function() {
spt.panel.refresh(top);
}
'''
} )
cb = CheckboxWdg('show_multi_project', label=' show multi-project')
if show_multi_project:
cb.set_checked()
cb.add_behavior( {
'type': 'click_up',
'cbjs_action': '''
var panel = bvr.src_el.getParent('.spt_panel_stype_list_top')
spt.panel.refresh(panel, {show_multi_project: bvr.src_el.checked});
'''
})
span = SpanWdg(css='small')
top.add(span)
top.add(cb)
top.add("<br clear='all'/>")
#search_type_objs = []
if not search_type_objs:
arrow_div = DivWdg()
top.add(arrow_div)
icon = IconWdg("Click to Add", IconWdg.ARROW_UP_LEFT_32)
icon.add_style("margin-top: -20")
icon.add_style("margin-left: -15")
icon.add_style("position: absolute")
arrow_div.add(icon)
arrow_div.add(" "*5)
arrow_div.add("<b>Click to Add</b>")
arrow_div.add_style("position: relative")
arrow_div.add_style("margin-top: 5px")
arrow_div.add_style("margin-left: 20px")
arrow_div.add_style("float: left")
arrow_div.add_style("padding: 25px")
arrow_div.set_box_shadow("0px 5px 20px")
arrow_div.set_round_corners(30)
arrow_div.add_color("background", "background")
div = DivWdg()
top.add(div)
div.add_border()
div.add_style("min-height: 180px")
div.add_style("width: 600px")
div.add_style("margin: 30px auto")
div.add_style("padding: 20px")
div.add_color("background", "background3")
icon = IconWdg( "WARNING", IconWdg.WARNING )
div.add(icon)
div.add("<b>No Searchable Types have been created</b>")
div.add("<br/><br/>")
div.add("Searchables Types contain lists of items that are managed in this project. Each item will automatically have the ability to have files checked into it, track tasks and status and record work hours.")
div.add("<br/>"*2)
div.add("For more information, read the help docs: ")
from tactic.ui.app import HelpButtonWdg
help = HelpButtonWdg(alias="main")
div.add(help)
div.add("<br/>")
div.add("Click on the 'Add' button above to start adding new types.")
return top
div = DivWdg()
top.add(div)
#div.add_style("max-height: 300px")
#div.add_style("overflow-y: auto")
table = Table()
div.add(table)
table.add_style("margin-top: 10px")
table.set_max_width()
# group mouse over
table.add_relay_behavior( {
'type': "mouseover",
'bvr_match_class': 'spt_row',
'cbjs_action': "spt.mouse.table_layout_hover_over({}, {src_el: bvr.src_el, add_color_modifier: -2})"
} )
table.add_relay_behavior( {
'type': "mouseout",
'bvr_match_class': 'spt_row',
'cbjs_action': "spt.mouse.table_layout_hover_out({}, {src_el: bvr.src_el})"
} )
tr = table.add_row()
tr.add_color("color", "color")
tr.add_gradient("background", "background", -10)
th = table.add_header("")
th.add_style("text-align: left")
th = table.add_header("Title")
th.add_style("text-align: left")
th = table.add_header("# Items")
th.add_style("text-align: left")
th = table.add_header("View")
th.add_style("text-align: left")
th = table.add_header("Add")
th.add_style("text-align: left")
th = table.add_header("Import")
th.add_style("text-align: left")
th = table.add_header("Custom Columns")
th.add_style("text-align: left")
th = table.add_header("Workflow")
th.add_style("text-align: left")
th = table.add_header("Notifications")
th.add_style("text-align: left")
th = table.add_header("Triggers")
th.add_style("text-align: left")
th = table.add_header("Edit")
th.add_style("text-align: left")
#th = table.add_header("Security")
#th.add_style("text-align: left")
for i, search_type_obj in enumerate(search_type_objs):
tr = table.add_row()
tr.add_class("spt_row")
if not i or not i%2:
tr.add_color("background", "background3")
else:
tr.add_color("background", "background", -2 )
thumb = ThumbWdg()
thumb.set_sobject(search_type_obj)
thumb.set_icon_size(30)
td = table.add_cell(thumb)
search_type = search_type_obj.get_value("search_type")
title = search_type_obj.get_title()
table.add_cell(title)
try:
search = Search(search_type)
count = search.get_count()
if count:
table.add_cell("%s item/s" % count)
else:
table.add_cell(" ")
except:
td = table.add_cell("< No table >")
td.add_style("font-style: italic")
td.add_style("color: #F00")
continue
#search = Search(search_type)
#search.add_interval_filter("timestamp", "today")
#created_today = search.get_count()
#table.add_cell(created_today)
td = table.add_cell()
button = IconButtonWdg(title="View", icon=IconWdg.ZOOM)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_type': search_type,
'title': title,
'cbjs_action': '''
var class_name = 'tactic.ui.panel.ViewPanelWdg';
var kwargs = {
search_type: bvr.search_type,
view: 'table',
'simple_search_view': 'simple_search'
};
// use tab
var top = bvr.src_el.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
spt.tab.add_new(bvr.title, bvr.title, class_name, kwargs);
//spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
button.add_style("float: left")
arrow_button = IconButtonWdg(tip="More Views", icon=IconWdg.ARROWHEAD_DARK_DOWN)
arrow_button.add_style("margin-left: 20px")
td.add(arrow_button)
cbk = '''
var activator = spt.smenu.get_activator(bvr);
var class_name = bvr.class_name;
var layout = bvr.layout;
var kwargs = {
search_type: bvr.search_type,
layout: layout,
view: bvr.view,
simple_search_view: 'simple_search',
element_names: bvr.element_names,
};
// use tab
var top = activator.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
spt.tab.add_new('%s', '%s', class_name, kwargs);
''' % (title, title)
from tactic.ui.panel import SwitchLayoutMenu
SwitchLayoutMenu(search_type=search_type, activator=arrow_button, cbk=cbk, is_refresh=False)
td = table.add_cell()
button = IconButtonWdg(title="Add", icon=IconWdg.ADD)
td.add(button)
button.add_behavior( {
'type': 'listen',
'search_type': search_type,
'event_name': 'startup_save:' + search_type_obj.get_title(),
'title': search_type_obj.get_title(),
'cbjs_action': '''
var top = bvr.src_el.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
var class_name = 'tactic.ui.panel.ViewPanelWdg';
var kwargs = {
search_type: bvr.search_type,
view: 'table',
'simple_search_view': 'simple_search'
};
spt.tab.add_new(bvr.title, bvr.title, class_name, kwargs);
'''
} )
button.add_behavior( {
'type': 'click_up',
'search_type': search_type,
'title': search_type_obj.get_title(),
'cbjs_action': '''
var top = bvr.src_el.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
var class_name = 'tactic.ui.panel.EditWdg';
var kwargs = {
search_type: bvr.search_type,
view: "insert",
save_event: "startup_save:" + bvr.title
}
spt.panel.load_popup("Add New Items ("+bvr.title+")", class_name, kwargs);
var class_name = 'tactic.ui.panel.ViewPanelWdg';
var kwargs = {
search_type: bvr.search_type,
view: 'table',
'simple_search_view': 'simple_search'
};
spt.tab.add_new(bvr.title, bvr.title, class_name, kwargs);
'''
} )
"""
td = table.add_cell()
button = IconButtonWdg(title="Check-in", icon=IconWdg.PUBLISH)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_type': search_type,
'title': title,
'cbjs_action': '''
var class_name = 'tactic.ui.panel.ViewPanelWdg';
var kwargs = {
search_type: bvr.search_type,
view: 'checkin',
element_names: ['preview','code','name','description','history','general_checkin','notes']
};
// use tab
var top = bvr.src_el.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
spt.tab.add_new(bvr.title, bvr.title, class_name, kwargs);
//spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
"""
td = table.add_cell()
button = IconButtonWdg(title="Import", icon=IconWdg.IMPORT)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_type': search_type,
'title': "Import Data",
'cbjs_action': '''
var class_name = 'tactic.ui.widget.CsvImportWdg';
var kwargs = {
search_type: bvr.search_type,
};
spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Custom Columns", icon=IconWdg.COLUMNS)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_type': search_type,
'title': "Add Custom Columns",
'cbjs_action': '''
var class_name = 'tactic.ui.startup.ColumnEditWdg';
var kwargs = {
search_type: bvr.search_type,
};
spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Workflow", icon=IconWdg.PIPELINE)
button.add_style("float: left")
td.add(button)
search = Search("sthpw/pipeline")
search.add_filter("search_type", search_type)
count = search.get_count()
if count:
check = IconWdg( "Has Items", IconWdg.CHECK, width=8 )
td.add(check)
#check.add_style("margin-left: 0px")
check.add_style("margin-top: 4px")
button.add_behavior( {
'type': 'click_up',
'title': 'Workflow',
'search_type': search_type,
'cbjs_action': '''
var class_name = 'tactic.ui.startup.PipelineEditWdg';
var kwargs = {
search_type: bvr.search_type
};
spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Notifications", icon=IconWdg.MAIL)
button.add_style("float: left")
td.add(button)
search = Search("sthpw/notification")
search.add_filter("search_type", search_type)
count = search.get_count()
if count:
check = IconWdg( "Has Items", IconWdg.CHECK, width=8 )
td.add(check)
#check.add_style("margin-left: 0px")
check.add_style("margin-top: 4px")
button.add_behavior( {
'type': 'click_up',
'title': 'Trigger',
'search_type': search_type,
'cbjs_action': '''
var class_name = 'tactic.ui.tools.TriggerToolWdg';
var kwargs = {
mode: "search_type",
search_type: bvr.search_type
};
spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Triggers", icon=IconWdg.ARROW_OUT)
td.add(button)
button.add_style("float: left")
search = Search("config/trigger")
search.add_filter("search_type", search_type)
count = search.get_count()
if count:
check = IconWdg( "Has Items", IconWdg.CHECK, width=8 )
td.add(check)
#check.add_style("margin-left: 0px")
check.add_style("margin-top: 4px")
button.add_behavior( {
'type': 'click_up',
'title': 'Trigger',
'search_type': search_type,
'cbjs_action': '''
var class_name = 'tactic.ui.tools.TriggerToolWdg';
var kwargs = {
mode: "search_type",
search_type: bvr.search_type
};
spt.panel.load_popup(bvr.title, class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Edit Searchable Type", icon=IconWdg.EDIT)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_key': search_type_obj.get_search_key(),
'cbjs_action': '''
var class_name = 'tactic.ui.panel.EditWdg';
var kwargs = {
search_type: "sthpw/sobject",
view: "edit_startup",
search_key: bvr.search_key
}
spt.panel.load_popup("Edit Searchable Type", class_name, kwargs);
'''
} )
"""
td = table.add_cell()
button = IconButtonWdg(title="Security", icon=IconWdg.LOCK)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'title': 'Trigger',
'search_type': search_type,
'cbjs_action': '''
alert("security");
'''
} )
"""
columns_wdg = DivWdg()
top.add(columns_wdg)
return top
class UserPanelWdg(BaseRefreshWdg):
def get_help_alias(my):
return 'project-startup-manage-users'
def get_display(my):
search = Search("sthpw/login")
search.add_filter("login", "admin", op="!=")
logins = search.get_sobjects()
top = my.top
top.add_class("spt_panel_user_top")
top.add_style("min-width: 400px")
show_security = my.kwargs.get("show_security")
if show_security not in ['false', False]:
button = ActionButtonWdg(title="Security")
top.add(button)
button.add_style("float: right")
#button.add_style("margin-top: -8px")
button.add_behavior( {
'type': 'click_up',
'cbjs_action': '''
var class_name = 'tactic.ui.startup.SecurityWdg';
spt.tab.set_main_body_tab()
spt.tab.add_new("Security", "Security", class_name)
'''
} )
button = ActionButtonWdg(title="Add", tip="Add New User")
top.add(button)
button.add_style("float: left")
button.add_behavior( {
'type': 'click_up',
'cbjs_action': '''
var class_name = 'tactic.ui.panel.EditWdg';
var kwargs = {
search_type: "sthpw/login",
view: "edit",
show_header: false,
}
var popup = spt.panel.load_popup("Create New User", class_name, kwargs);
var top = bvr.src_el.getParent(".spt_panel_user_top");
popup.on_save_cbk = function() {
spt.panel.refresh(top);
}
'''
} )
security = Environment.get_security()
license = security.get_license()
num_left = license.get_num_licenses_left()
current_users = license.get_current_users()
max_users = license.get_max_users()
top.add('''
<span style="margin-left: 20px; margin-top: 10px">
Users
<span class="badge">%s</span>
</span>
''' % current_users)
if num_left < 1000:
top.add('''
<span style="margin-left: 20px; margin-top: 10px">
Users Left
<span class="badge">%s</span>
</span>
''' % num_left)
top.add("<br clear='all'/>")
#logins = []
if not logins:
"""
arrow_div = DivWdg()
top.add(arrow_div)
arrow_div.add("<b><<< Click to Add</b>")
arrow_div.add_style("position: relative")
arrow_div.add_style("margin-top: -35px")
arrow_div.add_style("margin-left: 35px")
arrow_div.add_style("float: left")
arrow_div.add_style("padding: 5px")
arrow_div.set_box_shadow("1px 1px 2px 2px")
arrow_div.set_round_corners(10, corners=['TL','BL'])
"""
div = DivWdg()
top.add(div)
div.add_style("text-align: center")
div.add_border()
div.add_style("min-height: 150px")
div.add_style("margin: 15px 30px 30px 30px")
div.add_style("padding: 30px 20px 0px 20px")
div.add_color("background", "background3")
icon = IconWdg( "WARNING", IconWdg.WARNING )
div.add(icon)
div.add("<b>No users have been added</b>")
div.add("<br/><br/>")
div.add("For more information, read the help docs: ")
from tactic.ui.app import HelpButtonWdg
help = HelpButtonWdg(alias=my.get_help_alias())
div.add(help)
div.add("<br/>")
div.add("Click on the 'Add' button above to start adding new users.")
return top
div = DivWdg()
top.add(div)
#div.add_style("max-height: 300px")
#div.add_style("overflow-y: auto")
table = Table()
table.set_max_width()
table.add_style("margin-top: 10px")
div.add(table)
# group mouse over
table.add_relay_behavior( {
'type': "mouseover",
'bvr_match_class': 'spt_row',
'cbjs_action': "spt.mouse.table_layout_hover_over({}, {src_el: bvr.src_el, add_color_modifier: -2})"
} )
table.add_relay_behavior( {
'type': "mouseout",
'bvr_match_class': 'spt_row',
'cbjs_action': "spt.mouse.table_layout_hover_out({}, {src_el: bvr.src_el})"
} )
tr = table.add_row()
tr.add_color("color", "color")
tr.add_color("background", "background", -10)
th = table.add_header(" ")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Login")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("First Name")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Last Name")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Display Name")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Activity")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Groups")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Security")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
th = table.add_header("Edit")
th.add_style("padding: 8px 3px")
th.add_style("text-align: left")
for i, login in enumerate(logins):
tr = table.add_row()
tr.add_class("spt_row")
if not i or not i%2:
tr.add_color("background", "background")
else:
tr.add_color("background", "background", -2 )
thumb = ThumbWdg()
thumb.set_sobject(login)
thumb.set_icon_size(45)
td = table.add_cell(thumb)
td = table.add_cell(login.get_value("login"))
td.add_style("padding: 3px")
td = table.add_cell(login.get_value("first_name"))
td.add_style("padding: 3px")
td = table.add_cell(login.get_value("last_name"))
td.add_style("padding: 3px")
td = table.add_cell(login.get_value("display_name"))
td.add_style("padding: 3px")
search_key = login.get_search_key()
login_code = login.get_code()
full_name = login.get_full_name()
td = table.add_cell()
button = IconButtonWdg(tip="Activity", icon=IconWdg.CALENDAR)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'login_code': login_code,
'full_name': full_name,
'cbjs_action': '''
var class_name = 'tactic.ui.tools.ScheduleUserToolWdg';
var kwargs = {
login: bvr.login_code
}
var title = bvr.full_name + ' Schedule';
var top = bvr.src_el.getParent(".spt_dashboard_top");
spt.tab.set_tab_top(top);
spt.tab.add_new("user_schedule", title, class_name, kwargs);
//spt.panel.load_popup("Activty", class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Groups", icon=IconWdg.GROUP_LINK)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_key': search_key,
'cbjs_action': '''
var class_name = 'tactic.ui.startup.GroupAssignWdg';
var kwargs = {
search_key: bvr.search_key
};
var popup = spt.panel.load_popup("Group Assignment", class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Security", icon=IconWdg.LOCK)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_key': search_key,
'cbjs_action': '''
var class_name = 'tactic.ui.startup.GroupSummaryWdg';
var kwargs = {
search_key: bvr.search_key
};
var popup = spt.panel.load_popup("Security Summary", class_name, kwargs);
'''
} )
td = table.add_cell()
button = IconButtonWdg(title="Edit User", icon=IconWdg.EDIT)
td.add(button)
button.add_behavior( {
'type': 'click_up',
'search_key': search_key,
'cbjs_action': '''
var top = bvr.src_el.getParent(".spt_panel_user_top");
var class_name = 'tactic.ui.panel.EditWdg';
var kwargs = {
search_type: "sthpw/login",
view: "edit",
search_key: bvr.search_key
}
var popup = spt.panel.load_popup("Create New User", class_name, kwargs);
popup.on_save_cbk = function() {
spt.panel.refresh(top);
}
'''
} )
return top
| southpawtech/TACTIC-DEV | src/tactic/ui/startup/project_config_wdg.py | Python | epl-1.0 | 34,352 |
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.utils.translation import ugettext_lazy as _
from openstack_auth import utils
import horizon
from django.conf import settings
class Admin(horizon.Dashboard):
name = _("Admin")
slug = "admin"
if getattr(settings, 'POLICY_CHECK_FUNCTION', None):
policy_rules = (('identity', 'admin_required'),
('image', 'context_is_admin'),
('volume', 'context_is_admin'),
('compute', 'context_is_admin'),
('network', 'context_is_admin'),
('orchestration', 'context_is_admin'),)
else:
permissions = (tuple(utils.get_admin_permissions()),)
horizon.register(Admin)
| yeming233/horizon | openstack_dashboard/dashboards/admin/dashboard.py | Python | apache-2.0 | 1,320 |
for _ in range(int(input())):
A, B = map(int, input().split())
print('Tuzik' if A % 2 == 0 or B % 2 == 0 else 'Vanka')
| knuu/competitive-programming | codechef/ltime26_puppygm.py | Python | mit | 127 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Michael Still
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import fixtures
from nova import test
from nova.virt.disk.mount import loop
def _fake_noop(*args, **kwargs):
return
def _fake_trycmd_losetup_works(*args, **kwargs):
return '/dev/loop0', ''
def _fake_trycmd_losetup_fails(*args, **kwards):
return '', 'doh'
class LoopTestCase(test.NoDBTestCase):
def test_get_dev(self):
tempdir = self.useFixture(fixtures.TempDir()).path
l = loop.LoopMount(None, tempdir)
self.useFixture(fixtures.MonkeyPatch('nova.utils.trycmd',
_fake_trycmd_losetup_works))
self.useFixture(fixtures.MonkeyPatch('nova.utils.execute',
_fake_noop))
# No error logged, device consumed
self.assertTrue(l.get_dev())
self.assertTrue(l.linked)
self.assertEqual('', l.error)
self.assertEqual('/dev/loop0', l.device)
# Free
l.unget_dev()
self.assertFalse(l.linked)
self.assertEqual('', l.error)
self.assertIsNone(l.device)
def test_inner_get_dev_fails(self):
tempdir = self.useFixture(fixtures.TempDir()).path
l = loop.LoopMount(None, tempdir)
self.useFixture(fixtures.MonkeyPatch('nova.utils.trycmd',
_fake_trycmd_losetup_fails))
# No error logged, device consumed
self.assertFalse(l._inner_get_dev())
self.assertFalse(l.linked)
self.assertNotEqual('', l.error)
self.assertIsNone(l.device)
# Free
l.unget_dev()
self.assertFalse(l.linked)
self.assertIsNone(l.device)
def test_get_dev_timeout(self):
tempdir = self.useFixture(fixtures.TempDir()).path
l = loop.LoopMount(None, tempdir)
self.useFixture(fixtures.MonkeyPatch('time.sleep', _fake_noop))
self.useFixture(fixtures.MonkeyPatch('nova.utils.trycmd',
_fake_trycmd_losetup_fails))
self.useFixture(fixtures.MonkeyPatch(('nova.virt.disk.mount.api.'
'MAX_DEVICE_WAIT'), -10))
# Always fail to get a device
def fake_get_dev_fails():
return False
l._inner_get_dev = fake_get_dev_fails
# Fail to get a device
self.assertFalse(l.get_dev())
def test_unget_dev(self):
tempdir = self.useFixture(fixtures.TempDir()).path
l = loop.LoopMount(None, tempdir)
self.useFixture(fixtures.MonkeyPatch('nova.utils.execute',
_fake_noop))
# This just checks that a free of something we don't have doesn't
# throw an exception
l.unget_dev()
| sacharya/nova | nova/tests/virt/disk/test_loop.py | Python | apache-2.0 | 3,418 |
# Webhooks for external integrations.
from django.utils.translation import ugettext as _
from zerver.lib.actions import check_send_stream_message
from zerver.lib.response import json_success, json_error
from zerver.decorator import REQ, has_request_variables, api_key_only_webhook_view
from zerver.lib.validator import check_dict, check_string
from zerver.models import UserProfile
from django.http import HttpRequest, HttpResponse
from typing import Dict, Any, Iterable, Optional, Text
@api_key_only_webhook_view('HelloWorld')
@has_request_variables
def api_helloworld_webhook(request, user_profile,
payload=REQ(argument_type='body'), stream=REQ(default='test'),
topic=REQ(default='Hello World')):
# type: (HttpRequest, UserProfile, Dict[str, Iterable[Dict[str, Any]]], Text, Optional[Text]) -> HttpResponse
# construct the body of the message
body = 'Hello! I am happy to be here! :smile:'
# try to add the Wikipedia article of the day
body_template = '\nThe Wikipedia featured article for today is **[{featured_title}]({featured_url})**'
body += body_template.format(**payload)
# send the message
check_send_stream_message(user_profile, request.client, stream, topic, body)
return json_success()
| amanharitsh123/zulip | zerver/webhooks/helloworld/view.py | Python | apache-2.0 | 1,295 |
from grid.models import Grid
from django.contrib.auth.models import Group, User, Permission
from package.models import Category, PackageExample, Project
from grid.models import Element, Feature, GridPackage
from core.tests import datautil
def load():
category, created = Category.objects.get_or_create(
pk=1,
slug='apps',
title='App',
description='Small components used to build projects.',
)
package1, created = Project.objects.get_or_create(
pk=1,
category=category,
repo_watchers=0,
title='Testability',
pypi_url='',
participants='malcomt,jacobian',
pypi_downloads=0,
repo_url='https://github.com/pydanny/django-la-facebook',
repo_forks=0,
slug='testability',
repo_description='Increase your testing ability with this steroid free supplement.',
)
package2, created = Project.objects.get_or_create(
pk=2,
category=category,
repo_watchers=0,
title='Supertester',
pypi_url='',
participants='thetestman',
pypi_downloads=0,
repo_url='https://github.com/pydanny/django-uni-form',
repo_forks=0,
slug='supertester',
repo_description='Test everything under the sun with one command!',
)
package3, created = Project.objects.get_or_create(
pk=3,
category=category,
repo_watchers=0,
title='Serious Testing',
pypi_url='',
participants='pydanny',
pypi_downloads=0,
repo_url='https://github.com/opencomparison/opencomparison',
repo_forks=0,
slug='serious-testing',
repo_description='Make testing as painless as waxing your legs.',
)
package4, created = Project.objects.get_or_create(
pk=4,
category=category,
repo_watchers=0,
title='Another Test',
pypi_url='',
participants='pydanny',
pypi_downloads=0,
repo_url='https://github.com/djangopackages/djangopackages',
repo_forks=0,
slug='another-test',
repo_description='Yet another test package, with no grid affiliation.',
)
grid1, created = Grid.objects.get_or_create(
pk=1,
description='A grid for testing.',
title='Testing',
is_locked=False,
slug='testing',
)
grid2, created = Grid.objects.get_or_create(
pk=2,
description='Another grid for testing.',
title='Another Testing',
is_locked=False,
slug='another-testing',
)
gridpackage1, created = GridPackage.objects.get_or_create(
pk=1,
package=package1,
grid=grid1,
)
gridpackage2, created = GridPackage.objects.get_or_create(
pk=2,
package=package1,
grid=grid1,
)
gridpackage3, created = GridPackage.objects.get_or_create(
pk=3,
package=package3,
grid=grid1,
)
gridpackage4, created = GridPackage.objects.get_or_create(
pk=4,
package=package3,
grid=grid2,
)
gridpackage5, created = GridPackage.objects.get_or_create(
pk=5,
package=package2,
grid=grid1,
)
feature1, created = Feature.objects.get_or_create(
pk=1,
title='Has tests?',
grid=grid1,
description='Does this package come with tests?',
)
feature2, created = Feature.objects.get_or_create(
pk=2,
title='Coolness?',
grid=grid1,
description='Is this package cool?',
)
element, created = Element.objects.get_or_create(
pk=1,
text='Yes',
feature=feature1,
grid_package=gridpackage1,
)
group1, created = Group.objects.get_or_create(
pk=1,
name='Moderators',
#permissions=[[u'delete_gridpackage', u'grid', u'gridpackage'], [u'delete_feature', u'grid', u'feature']],
)
group1.permissions.clear()
group1.permissions = [
Permission.objects.get(codename='delete_gridpackage'),
Permission.objects.get(codename='delete_feature')
]
user1, created = User.objects.get_or_create(
pk=1,
username='user',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login='2010-01-01 12:00:00',
email='',
date_joined='2010-01-01 12:00:00',
)
user1.set_password('user')
user1.save()
user2, created = User.objects.get_or_create(
pk=2,
username='cleaner',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=False,
last_login='2010-01-01 12:00:00',
#groups=[group1],
email='',
date_joined='2010-01-01 12:00:00',
)
user2.groups = [group1]
user2.set_password('cleaner')
user2.save()
user3, created = User.objects.get_or_create(
pk=3,
username='staff',
first_name='',
last_name='',
is_active=True,
is_superuser=False,
is_staff=True,
last_login='2010-01-01 12:00:00',
email='',
date_joined='2010-01-01 12:00:00',
)
user3.set_password('staff')
user3.save()
user4, created = User.objects.get_or_create(
pk=4,
username='admin',
first_name='',
last_name='',
is_active=True,
is_superuser=True,
is_staff=True,
last_login='2010-01-01 12:00:00',
email='',
date_joined='2010-01-01 12:00:00',
)
user4.set_password('admin')
user4.save()
packageexample, created = PackageExample.objects.get_or_create(
pk=1,
package=package1,
url='http://www.example.com/',
active=True,
title='www.example.com',
)
datautil.reset_sequences(Grid, Group, User, Permission, Category, PackageExample,
Project, Element, Feature, GridPackage)
| noisy/steemprojects.com | feeds/tests/data.py | Python | mit | 6,035 |
# Copyright (c) 2016 Hitachi Data Systems.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
"""add_share_snapshot_access
Revision ID: a77e2ad5012d
Revises: e1949a93157a
Create Date: 2016-07-15 13:32:19.417771
"""
# revision identifiers, used by Alembic.
revision = 'a77e2ad5012d'
down_revision = 'e1949a93157a'
from manila.common import constants
from manila.db.migrations import utils
from alembic import op
import sqlalchemy as sa
def upgrade():
op.create_table(
'share_snapshot_access_map',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.String(36), default='False'),
sa.Column('share_snapshot_id', sa.String(36),
sa.ForeignKey('share_snapshots.id',
name='ssam_snapshot_fk')),
sa.Column('access_type', sa.String(255)),
sa.Column('access_to', sa.String(255))
)
op.create_table(
'share_snapshot_instance_access_map',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.String(36), default='False'),
sa.Column('share_snapshot_instance_id', sa.String(36),
sa.ForeignKey('share_snapshot_instances.id',
name='ssiam_snapshot_instance_fk')),
sa.Column('access_id', sa.String(36),
sa.ForeignKey('share_snapshot_access_map.id',
name='ssam_access_fk')),
sa.Column('state', sa.String(255),
default=constants.ACCESS_STATE_QUEUED_TO_APPLY)
)
op.create_table(
'share_snapshot_instance_export_locations',
sa.Column('id', sa.String(36), primary_key=True),
sa.Column('created_at', sa.DateTime),
sa.Column('updated_at', sa.DateTime),
sa.Column('deleted_at', sa.DateTime),
sa.Column('deleted', sa.String(36), default='False'),
sa.Column('share_snapshot_instance_id', sa.String(36),
sa.ForeignKey('share_snapshot_instances.id',
name='ssiel_snapshot_instance_fk')),
sa.Column('path', sa.String(2000)),
sa.Column('is_admin_only', sa.Boolean, default=False, nullable=False)
)
op.add_column('shares',
sa.Column('mount_snapshot_support', sa.Boolean,
default=False))
connection = op.get_bind()
shares_table = utils.load_table('shares', connection)
# pylint: disable=no-value-for-parameter
op.execute(
shares_table.update().where(
shares_table.c.deleted == 'False').values({
'mount_snapshot_support': False,
})
)
def downgrade():
op.drop_table('share_snapshot_instance_export_locations')
op.drop_table('share_snapshot_instance_access_map')
op.drop_table('share_snapshot_access_map')
op.drop_column('shares', 'mount_snapshot_support')
| openstack/manila | manila/db/migrations/alembic/versions/a77e2ad5012d_add_share_snapshot_access.py | Python | apache-2.0 | 3,742 |
from pyccel.decorators import types
#==============================================================================
@types( int )
def sum_natural_numbers( n ):
x = 0
for i in range( 1, n+1 ):
x += i
return x
# ...
@types( int )
def factorial( n ):
x = 1
for i in range( 2, n+1 ):
x *= i
return x
# ...
@types( int )
def fibonacci( n ):
x = 0
y = 1
for i in range( n ):
z = x+y
x = y
y = z
return x
# ...
@types( int )
def double_loop( n ):
x = 0
for i in range( 3, 10 ):
x += 1
y = n*x
for j in range( 4, 15 ):
z = x-y
return z
# ...
@types( 'int[:,:](order=C)' )
def double_loop_on_2d_array_C( z ):
from numpy import shape
s = shape( z )
m = s[0]
n = s[1]
for i in range( m ):
for j in range( n ):
z[i,j] = i-j
# ...
@types( 'int[:,:](order=F)' )
def double_loop_on_2d_array_F( z ):
from numpy import shape
s = shape( z )
m = s[0]
n = s[1]
for i in range( m ):
for j in range( n ):
z[i,j] = i-j
# ...
@types( 'int[:,:](order=C)' )
def product_loop_on_2d_array_C( z ):
from numpy import shape
from itertools import product
s = shape( z )
m = s[0]
n = s[1]
x = [i for i in range(m)]
y = [j for j in range(n)]
for i,j in product( x, y ):
z[i,j] = i-j
# ...
@types( 'int[:,:](order=F)' )
def product_loop_on_2d_array_F( z ):
from numpy import shape
from itertools import product
s = shape( z )
m = s[0]
n = s[1]
x = [i for i in range(m)]
y = [j for j in range(n)]
for i,j in product( x, y ):
z[i,j] = i-j
# ...
@types( 'int[:]' )
def map_on_1d_array( z ):
@types( int )
def f( x ):
return x+5
res = 0
for v in map( f, z ):
res *= v
return res
# ...
@types( 'int[:]' )
def enumerate_on_1d_array( z ):
res = 0
for i,v in enumerate( z ):
res += v*i
return res
# ...
@types( int )
def zip_prod( m ):
x = [ i for i in range(m)]
y = [2*j for j in range(m)]
res = 0
for i1,i2 in zip( x, y ):
res += i1*i2
return res
| ratnania/pyccel | tests/epyccel/modules/loops.py | Python | mit | 2,225 |
# -*- coding:Utf8 -*-
# Exemple de serveur web ne délivrant plusieurs pages simples.
import cherrypy
class MonSiteWeb(object):
def index(self):
# Renvoi d'une page HTML contenant un lien vers une autre page
# (laquelle sera produite par une autre méthode) :
return '<h2>Veuillez <a href="unMessage">cliquer ici</a> '\
"pour accéder à une information d'importance cruciale.</h2>"
index.exposed = True
def unMessage(self):
# Le message incontournable :
return "<h1>La programmation, c'est génial !</h1>"
unMessage.exposed = True
cherrypy.quickstart(MonSiteWeb(), config ="tutoriel.conf")
| widowild/messcripts | exercice/python3/chap17/site_web_02.py | Python | gpl-3.0 | 661 |
#-*- encoding: utf-8 -*-
import ConfigParser
import string
import os
import sys
cf = ConfigParser.ConfigParser()
cf.read("test.conf")
# 返回所有的section
s = cf.sections()
print 'section:', s
o = cf.options("db")
print 'options:', o
v = cf.items("db")
print 'db:', v
print '-' * 60
# 可以按照类型读取出来
db_host = cf.get("db", "db_host")
db_port = cf.getint("db", "db_port")
db_user = cf.get("db", "db_user")
db_pass = cf.get("db", "db_pass")
# 返回的是整型的
threads = cf.getint("concurrent", "thread")
processors = cf.getint("concurrent", "processor")
print "db_host:", db_host
print "db_port:", db_port
print "db_user:", db_user
print "db_pass:", db_pass
print "thread:", threads
print "processor:", processors
# 修改一个值,再写回去
cf.set("db", "db_pass", "zhaowei")
cf.write(open("test.conf", "w"))
| quchunguang/test | pythonchallenge/config.py | Python | mit | 845 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from enum import Enum, EnumMeta
from six import with_metaclass
class _CaseInsensitiveEnumMeta(EnumMeta):
def __getitem__(self, name):
return super().__getitem__(name.upper())
def __getattr__(cls, name):
"""Return the enum member matching `name`
We use __getattr__ instead of descriptors or inserting into the enum
class' __dict__ in order to support `name` and `value` being both
properties for enum members (which live in the class' __dict__) and
enum members themselves.
"""
try:
return cls._member_map_[name.upper()]
except KeyError:
raise AttributeError(name)
class ActionType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The type of the action.
"""
EMAIL_CONTACTS = "EmailContacts"
AUTO_RENEW = "AutoRenew"
class DeletionRecoveryLevel(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Reflects the deletion recovery level currently in effect for keys in the current vault. If it
contains 'Purgeable' the key can be permanently deleted by a privileged user; otherwise, only
the system can purge the key, at the end of the retention interval.
"""
#: Soft-delete is not enabled for this vault. A DELETE operation results in immediate and
#: irreversible data loss.
PURGEABLE = "Purgeable"
#: Soft-delete is enabled for this vault; A privileged user may trigger an immediate, irreversible
#: deletion(purge) of a deleted entity.
RECOVERABLE_PURGEABLE = "Recoverable+Purgeable"
#: Soft-delete is enabled for this vault and purge has been disabled. A deleted entity will remain
#: in this state until recovered, or the end of the retention interval.
RECOVERABLE = "Recoverable"
#: Soft-delete is enabled for this vault, and the subscription is protected against immediate
#: deletion.
RECOVERABLE_PROTECTED_SUBSCRIPTION = "Recoverable+ProtectedSubscription"
class JsonWebKeyCurveName(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""Elliptic curve name. For valid values, see JsonWebKeyCurveName.
"""
#: The NIST P-256 elliptic curve, AKA SECG curve SECP256R1.
P256 = "P-256"
#: The NIST P-384 elliptic curve, AKA SECG curve SECP384R1.
P384 = "P-384"
#: The NIST P-521 elliptic curve, AKA SECG curve SECP521R1.
P521 = "P-521"
#: The SECG SECP256K1 elliptic curve.
SECP256_K1 = "SECP256K1"
class JsonWebKeyEncryptionAlgorithm(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""algorithm identifier
"""
RSA_OAEP = "RSA-OAEP"
RSA_OAEP256 = "RSA-OAEP-256"
RSA1_5 = "RSA1_5"
class JsonWebKeyOperation(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""JSON web key operations. For more information, see JsonWebKeyOperation.
"""
ENCRYPT = "encrypt"
DECRYPT = "decrypt"
SIGN = "sign"
VERIFY = "verify"
WRAP_KEY = "wrapKey"
UNWRAP_KEY = "unwrapKey"
class JsonWebKeySignatureAlgorithm(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""The signing/verification algorithm identifier. For more information on possible algorithm
types, see JsonWebKeySignatureAlgorithm.
"""
PS256 = "PS256"
PS384 = "PS384"
PS512 = "PS512"
RS256 = "RS256"
RS384 = "RS384"
RS512 = "RS512"
RSNULL = "RSNULL"
ES256 = "ES256"
ES384 = "ES384"
ES512 = "ES512"
ECDSA256 = "ECDSA256"
class JsonWebKeyType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
"""JsonWebKey key type (kty).
"""
EC = "EC"
EC_HSM = "EC-HSM"
RSA = "RSA"
RSA_HSM = "RSA-HSM"
OCT = "oct"
class KeyUsageType(with_metaclass(_CaseInsensitiveEnumMeta, str, Enum)):
DIGITAL_SIGNATURE = "digitalSignature"
NON_REPUDIATION = "nonRepudiation"
KEY_ENCIPHERMENT = "keyEncipherment"
DATA_ENCIPHERMENT = "dataEncipherment"
KEY_AGREEMENT = "keyAgreement"
KEY_CERT_SIGN = "keyCertSign"
C_RL_SIGN = "cRLSign"
ENCIPHER_ONLY = "encipherOnly"
DECIPHER_ONLY = "decipherOnly"
| Azure/azure-sdk-for-python | sdk/keyvault/azure-keyvault-secrets/azure/keyvault/secrets/_generated/v2016_10_01/models/_key_vault_client_enums.py | Python | mit | 4,522 |
import functools
def normalizeCode(code):
lines = [l for l in code.split('\n')]
if len(lines) < 3:
raise ValueError("Invalid number of lines ({0})".format(len(lines)))
numChars = max([len(l) for l in lines])
# assure that all lines have the same amount of characters
def adjustLine(l):
return l + ' ' * max(numChars-len(l), 0);
lines = [adjustLine(l) for l in lines]
return '\n'.join(lines)
def splitDigits(code):
lines = [l for l in code.split('\n')]
numChars = max([len(l) for l in lines])
numDigits = numChars//3
digits = ['']*numDigits
for i in range(numDigits):
digits[i] += lines[0][i*3:i*3+3] + '\n'
digits[i] += lines[1][i*3:i*3+3] + '\n'
digits[i] += lines[2][i*3:i*3+3]
return digits
__numbers = '''
_ _ _ _ _ _ _ _
| | | _| _||_||_ |_ ||_||_|
|_| ||_ _| | _||_| ||_| _|
'''[1:]#remove first newline
#create a dict that maps each digit in string representation to its number (also str to keep leading 0)
__digitMap = dict([(d,str(i)) for i,d in enumerate(splitDigits(normalizeCode(__numbers)))])
def convertDigit(digit):
try:
return __digitMap[digit]
except KeyError:
return '?'
def convertDigits(digits):
for d in splitDigits(digits):
yield convertDigit(d)
def checksum(number):
if len(number) == 9:
sum = 0
for i, n in enumerate(number):
try:
sum += int(n) * (9-i)
except ValueError:
return False;
return sum % 11 == 0
return False;
def recover(code):
recovered = []
def replace(code, i, c):
#print(code[:i] + c + code[i+1:])
number = ''.join(convertDigits(code[:i] + c + code[i+1:]))
if checksum(number):
recovered.append(number)
for i,c in enumerate(code):
if c == '_' or c == '|':
replace(code, i, ' ')
elif c == ' ':
replace(code, i, '_')
replace(code, i, '|')
return recovered
def test(code, expected):
code = normalizeCode(code)
digits = splitDigits(code)
number = ''.join([convertDigit(d) for d in digits])
actual = number
if '?' in actual or not checksum(actual):
recovered = recover(code)
if not recovered:
actual += ' ILL'
elif len(recovered) == 1:
actual = recovered[0]
else:
actual = number + ' AMB ' + str(recovered)
if (actual != expected):
print(code)
print("Got: {0}\n Expected: {1}) ".format(actual, expected))
else:
print(actual)
# for k,v in __digitMap.items():
# print(k)
# print(v)
with open('BankOCR_Test.txt', 'r') as f:
codes = [c for c in f.read().split(';\n') if c]
for code in codes:
lines = code.split('\n')
test('\n'.join(lines[0:3]), lines[3])
| M-Mueller/Python-Exercises | BankOCR.py | Python | mit | 2,891 |
# -*- coding: utf-8 -*-
# Copyright (c) 2014 Docker.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
docker_registry.core.lru
~~~~~~~~~~~~~~~~~~~~~~~~~~
Redis based LRU.
Can be activated or de-activated globally.
Drivers are largely encouraged to use it.
By default, doesn't run, until one calls init().
"""
import functools
import logging
import redis
logger = logging.getLogger(__name__)
redis_conn = None
cache_prefix = None
def init(enable=True,
host='localhost', port=6379, db=0, password=None, path='/'):
global redis_conn, cache_prefix
if not enable:
redis_conn = None
return
logging.info('Enabling storage cache on Redis')
logging.info('Redis config: {0}'.format({
'host': host,
'port': port,
'db': db,
'password': password,
'path': path
}))
redis_conn = redis.StrictRedis(host=host,
port=int(port),
db=int(db),
password=password)
cache_prefix = 'cache_path:{0}'.format(path)
def cache_key(key):
return cache_prefix + key
def set(f):
@functools.wraps(f)
def wrapper(*args):
content = args[-1]
key = args[-2]
key = cache_key(key)
redis_conn.set(key, content)
return f(*args)
if redis_conn is None:
return f
return wrapper
def get(f):
@functools.wraps(f)
def wrapper(*args):
key = args[-1]
key = cache_key(key)
content = redis_conn.get(key)
if content is not None:
return content
# Refresh cache
content = f(*args)
if content is not None:
redis_conn.set(key, content)
return content
if redis_conn is None:
return f
return wrapper
def remove(f):
@functools.wraps(f)
def wrapper(*args):
key = args[-1]
key = cache_key(key)
redis_conn.delete(key)
return f(*args)
if redis_conn is None:
return f
return wrapper
| glenux/contrib-docker-registry | depends/docker-registry-core/docker_registry/core/lru.py | Python | apache-2.0 | 2,565 |
#!/usr/bin/env python
"""
Ex 1. Construct a script that retrieves NAPALM facts from two IOS routers, two Arista switches, and one Junos device.
pynet-rtr1 (Cisco IOS) 184.105.247.70
pynet-rtr2 (Cisco IOS) 184.105.247.71
pynet-sw1 (Arista EOS) 184.105.247.72
pynet-sw2 (Arista EOS) 184.105.247.73
juniper-srx 184.105.247.76
Retrieve the 'model' number from each device and print the model to standard out.
As part of this exercise define the devices that you use in a Python file (for example my_devices.py) and import
these devices into your program. Optionally, define the devices in a YAML file and read this my_devices.yml file in.
"""
from __future__ import print_function
from __future__ import unicode_literals
from getpass import getpass
from pprint import pprint
from napalm_base import get_network_driver
from pyeapi.eapilib import CommandError
import yaml
import re
YAML_FILE = 'my_devices.yml'
def main():
with open(YAML_FILE) as f:
my_devices = yaml.load(f)
#pprint(my_devices)
pwd = getpass()
print("{:<20} {:<20} {:<20}".format("Device Type", "Hostname", "Model"))
for device_dict in my_devices:
device_dict['password'] = pwd
device_type = device_dict.pop('device_type')
driver = get_network_driver(device_type)
device=driver(**device_dict)
device.open()
facts = device.get_facts()
print('*' * 80)
print("{:<20} {:<20} {:<20}".format(device_type, device_dict['hostname'], facts['model']))
print('*' * 80)
print
if __name__ == "__main__":
main()
| jrslocum17/pynet_test | Bonus3/napalm_get_model.py | Python | apache-2.0 | 1,612 |
# -*- coding: utf8 -*-
# Copyright (c) 2014 by Ecreall under licence AGPL terms
# available on http://www.gnu.org/licenses/agpl.html
# licence: AGPL
# author: Amen Souissi
import datetime
import pytz
from persistent.list import PersistentList
from pyramid.httpexceptions import HTTPFound
from dace.util import getSite
from dace.objectofcollaboration.principal.util import (
has_role,
has_any_roles,
grant_roles,
get_current)
from dace.processinstance.activity import (
InfiniteCardinality,
ActionType)
from ..user_management.behaviors import (
global_user_processsecurity,
access_user_processsecurity)
from novaideo.content.interface import (
INovaIdeoApplication,
IWebAdvertising,
IAdvertising)
from novaideo.core import access_action, serialize_roles
from novaideo import _
def get_access_key(obj):
if 'published' in obj.state:
return ['always']
else:
result = serialize_roles(
(('Owner', obj), 'Moderator'))
return result
def seewebadvertising_processsecurity_validation(process, context):
return access_user_processsecurity(process, context) and \
('published' in context.state or \
has_any_roles(
roles=(('Owner', context), 'Moderator')))
@access_action(access_key=get_access_key)
class SeeWebAdvertising(InfiniteCardinality):
"""SeeFile is the behavior allowing access to context"""
title = _('Details')
context = IWebAdvertising
actionType = ActionType.automatic
processsecurity_validation = seewebadvertising_processsecurity_validation
def start(self, context, request, appstruct, **kw):
return {}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(context, "@@index"))
def createwebadvertising_roles_validation(process, context):
return has_role(role=('Moderator',))
class CreateWebAdvertising(InfiniteCardinality):
style_descriminator = 'admin-action'
style_picto = 'glyphicon glyphicon-picture'
style_order = 100
title = _('Create an announcement')
submission_title = _('Save')
context = INovaIdeoApplication
roles_validation = createwebadvertising_roles_validation
def start(self, context, request, appstruct, **kw):
root = getSite()
newadvertising = appstruct['_object_data']
root.addtoproperty('advertisings', newadvertising)
newadvertising.state.append('editable')
grant_roles(roles=(('Owner', newadvertising), ))
newadvertising.setproperty('author', get_current())
newadvertising.reindex()
return {'newcontext': newadvertising}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(kw['newcontext'], "@@index"))
def edit_roles_validation(process, context):
return has_any_roles(
roles=(('Owner', context), 'Moderator'))
def edit_processsecurity_validation(process, context):
return global_user_processsecurity()
def edit_state_validation(process, context):
if 'editable' in context.state:
return True
return has_role(role=('Moderator', ))
class EditWebAdvertising(InfiniteCardinality):
style = 'button' #TODO add style abstract class
style_descriminator = 'text-action'
style_picto = 'glyphicon glyphicon-pencil'
style_order = 1
submission_title = _('Save')
context = IWebAdvertising
roles_validation = edit_roles_validation
state_validation = edit_state_validation
processsecurity_validation = edit_processsecurity_validation
def start(self, context, request, appstruct, **kw):
context.modified_at = datetime.datetime.now(tz=pytz.UTC)
if context.picture:
context.rename(context.picture.__name__, context.picture.title)
context.reindex()
return {}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(context, "@@index"))
def publish_roles_validation(process, context):
return has_any_roles(roles=('Moderator',))
def publish_processsecurity_validation(process, context):
return global_user_processsecurity()
def publish_state_validation(process, context):
return 'editable' in context.state
class PublishAdvertising(InfiniteCardinality):
style = 'button' #TODO add style abstract class
style_descriminator = 'global-action'
style_interaction = 'ajax-action'
style_picto = 'glyphicon glyphicon-ok'
style_order = 5
submission_title = _('Continue')
context = IAdvertising
roles_validation = publish_roles_validation
state_validation = publish_state_validation
processsecurity_validation = publish_processsecurity_validation
def start(self, context, request, appstruct, **kw):
context.state = PersistentList(['published'])
context.modified_at = datetime.datetime.now(tz=pytz.UTC)
context.reindex()
return {}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(context, "@@index"))
def archive_roles_validation(process, context):
return has_any_roles(roles=('Moderator',))
def archive_processsecurity_validation(process, context):
return global_user_processsecurity()
def archive_state_validation(process, context):
return 'published' in context.state
class ArchiveAdvertising(InfiniteCardinality):
style = 'button' #TODO add style abstract class
style_descriminator = 'global-action'
style_interaction = 'ajax-action'
style_picto = 'glyphicon glyphicon-folder-close'
style_order = 6
submission_title = _('Continue')
context = IAdvertising
roles_validation = archive_roles_validation
state_validation = archive_state_validation
processsecurity_validation = archive_processsecurity_validation
def start(self, context, request, appstruct, **kw):
context.state = PersistentList(['archived'])
context.modified_at = datetime.datetime.now(tz=pytz.UTC)
context.reindex()
return {}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(context, "@@index"))
def remove_roles_validation(process, context):
return has_any_roles(roles=('Moderator',))
def remove_processsecurity_validation(process, context):
return global_user_processsecurity()
class RemoveAdvertising(InfiniteCardinality):
style = 'button' #TODO add style abstract class
style_descriminator = 'global-action'
style_interaction = 'ajax-action'
style_picto = 'glyphicon glyphicon-trash'
style_order = 7
submission_title = _('Continue')
context = IAdvertising
roles_validation = remove_roles_validation
processsecurity_validation = remove_processsecurity_validation
def start(self, context, request, appstruct, **kw):
root = getSite()
root.delfromproperty('advertisings', context)
return {}
def redirect(self, context, request, **kw):
root = getSite()
return HTTPFound(request.resource_url(root, ""))
def seeads_roles_validation(process, context):
return has_role(role=('Moderator',))
def seeads_processsecurity_validation(process, context):
return global_user_processsecurity()
class SeeAdvertisings(InfiniteCardinality):
style = 'button' #TODO add style abstract class
style_descriminator = 'admin-action'
style_picto = 'glyphicon glyphicon-picture'
style_order = 100
isSequential = False
context = INovaIdeoApplication
roles_validation = seeads_roles_validation
processsecurity_validation = seeads_processsecurity_validation
def start(self, context, request, appstruct, **kw):
return {}
def redirect(self, context, request, **kw):
return HTTPFound(request.resource_url(context))
#TODO behaviors
| ecreall/nova-ideo | novaideo/content/processes/advertising_management/behaviors.py | Python | agpl-3.0 | 7,840 |
from __future__ import absolute_import
from django.test import Client
from django.core.handlers.wsgi import WSGIRequest
from django.core.handlers.base import BaseHandler
from celery.utils.compat import WhateverIO
class RequestFactory(Client):
"""Class that lets you create mock Request objects for use in testing.
Usage:
rf = RequestFactory()
get_request = rf.get('/hello/')
post_request = rf.post('/submit/', {'foo': 'bar'})
This class re-uses the django.test.client.Client interface, docs here:
http://www.djangoproject.com/documentation/testing/#the-test-client
Once you have a request object you can pass it to any view function,
just as if that view had been hooked up using a URLconf.
"""
def request(self, **request):
"""Similar to parent class, but returns the request object as
soon as it has created it."""
environ = {
'HTTP_COOKIE': self.cookies,
'HTTP_USER_AGENT': 'Django UnitTest Client 1.0',
'REMOTE_ADDR': '127.0.0.1',
'PATH_INFO': '/',
'QUERY_STRING': '',
'REQUEST_METHOD': 'GET',
'SCRIPT_NAME': '',
'SERVER_NAME': 'testserver',
'SERVER_PORT': 80,
'SERVER_PROTOCOL': 'HTTP/1.1',
'wsgi.input': WhateverIO(),
}
environ.update(self.defaults)
environ.update(request)
return WSGIRequest(environ)
class MockRequest(object):
def __init__(self):
handler = BaseHandler()
handler.load_middleware()
self.request_factory = RequestFactory()
self.middleware = handler._request_middleware
def _make_request(self, request_method, *args, **kwargs):
request_method_handler = getattr(self.request_factory, request_method)
request = request_method_handler(*args, **kwargs)
[middleware_processor(request)
for middleware_processor in self.middleware]
return request
def get(self, *args, **kwargs):
return self._make_request("get", *args, **kwargs)
def post(self, *args, **kwargs):
return self._make_request("post", *args, **kwargs)
def put(self, *args, **kwargs):
return self._make_request("put", *args, **kwargs)
def delete(self, *args, **kwargs):
return self._make_request("delete", *args, **kwargs)
| mozilla/firefox-flicks | vendor-local/lib/python/djcelery/tests/req.py | Python | bsd-3-clause | 2,380 |
#!/usr/bin/env python3
"""Project Euler - Problem 52 Module"""
import pelib
def problem52():
"""Problem 52 - Permuted multiples"""
x = 1
while True:
ss_str_2x = sorted(set(str(2 * x)))
ss_str_3x = sorted(set(str(3 * x)))
ss_str_4x = sorted(set(str(4 * x)))
ss_str_5x = sorted(set(str(5 * x)))
ss_str_6x = sorted(set(str(6 * x)))
if ss_str_2x == ss_str_3x == ss_str_4x == ss_str_5x == ss_str_6x:
#print(x, ss_str_2x)
return x
x += 1
def run():
"""Default Run Method"""
return problem52()
if __name__ == '__main__':
print("Result: ", run())
| rado0x54/project-euler | python/problem0052.py | Python | mit | 651 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
import functools
def memoize(obj):
"""Memoize decorator, as seen on
`here <https://wiki.python.org/moin/PythonDecoratorLibrary#Memoize>`_
"""
cache = obj._cache = {}
@functools.wraps(obj)
def memoizer(*args, **kwargs):
key = str(args) + str(kwargs)
if key not in cache:
cache[key] = obj(*args, **kwargs)
return cache[key]
return memoizer
| galactics/space-api | beyond/utils/memoize.py | Python | gpl-3.0 | 453 |
# Copyright 2018 Creu Blanca
# License LGPL-3.0 or later (https://www.gnu.org/licenses/lgpl.html).
from odoo import api, fields, models
class ResConfigSettings(models.TransientModel):
_inherit = 'res.config.settings'
group_stock_request_order = fields.Boolean(
implied_group='stock_request.group_stock_request_order')
module_stock_request_purchase = fields.Boolean(
string='Stock Requests for Purchases')
module_stock_request_kanban = fields.Boolean(
string='Stock Requests Kanban integration')
stock_request_allow_virtual_loc = fields.Boolean(
related='company_id.stock_request_allow_virtual_loc',
readonly=False)
module_stock_request_analytic = fields.Boolean(
string='Stock Requests Analytic integration')
module_stock_request_submit = fields.Boolean(
string='Submitted state in Stock Requests')
# Dependencies
@api.onchange('stock_request_allow_virtual_loc')
def _onchange_stock_request_allow_virtual_loc(self):
if self.stock_request_allow_virtual_loc:
self.group_stock_multi_locations = True
| Vauxoo/stock-logistics-warehouse | stock_request/models/res_config_settings.py | Python | agpl-3.0 | 1,123 |
from sling import Application, logger
from sling import logger
from sling.ext import hello
import localmodule
app = Application([
hello,
])
# Other way of installing a module
app.add_module(localmodule)
# Install a Falcon middleware
class HelloMiddleware(object):
def process_request(self, req, res):
logger.info('hellomiddleware processing request...')
def process_resource(self, req, res, resource):
logger.info('hellomiddleware processing resource...')
def process_response(self, req, res, resource):
logger.info('hellomiddleware processing response...')
app.add_middleware(HelloMiddleware)
# Install a standard WSGI Middleware
from werkzeug.contrib.profiler import ProfilerMiddleware
app.add_wsgi_middleware(ProfilerMiddleware, sort_by=('cumtime',), restrictions=('/opt', 30))
# Install werkzeug debugger
from werkzeug.debug import DebuggedApplication
app.add_wsgi_middleware(DebuggedApplication, evalex=True)
wsgi = app.wsgi
if __name__ == '__main__':
app.manage()
| slinghq/sling | examples/app/app.py | Python | apache-2.0 | 1,030 |
# -*- coding: utf-8 -*-
class LineCallback(object):
def __init__(self, callback):
self.callback = callback
def Pinverified(self, pin):
self.callback("Enter PinCode '" + pin + "' to your mobile phone in 2 minutes")
def QrUrl(self, url):
self.callback("Login qrcode to your smartphone in 2 minutes\nModify By Hyunakyu\n" + url)
def default(self, str):
self.callback(str)
| Hyunakyu/FFR | LineAlpha/LineApi/LineCallback.py | Python | gpl-3.0 | 435 |
# -*- coding: utf-8 -*-
import scrapy
from rrs.items import RrsItem, FoodItem
class OnepageSpider(scrapy.Spider):
name = "onepage"
allowed_domains = ["cd.meituan.com"]
start_urls = [
'http://cd.meituan.com/category/meishi/'
]
def parse(self, response):
for sel in response.css("div.basic.cf > a"):
rrs_item = RrsItem()
rrs_item['r_name'] = sel.xpath('text()').extract()
rrs_item['r_link'] = sel.xpath('@href').extract()
#url = response.urljoin(sel.xpath('@href').extract())
yield scrapy.Request(rrs_item['r_link'][0], meta = {'rrs_item' : rrs_item},
callback = self.parse_food)
def parse_food(self, response):
rrs_item = response.meta['rrs_item']
r_foods = []
for sel in response.css("div.menu__items > table > tr > td"):
r_food = FoodItem()
r_food['f_name'] = sel.xpath('text()').extract()
r_food['f_price'] = sel.xpath('span/text()').extract()
r_foods.append(r_food)
rrs_item['r_foods'] = r_foods
yield rrs_item
| tensorchen/spider | rrs/rrs/spiders/one_page.py | Python | artistic-2.0 | 1,136 |
#
# SSSD LOCAL domain tests
#
# Copyright (c) 2015 Red Hat, Inc.
# Author: Michal Zidek <[email protected]>
#
# This is free software; you can redistribute it and/or modify it
# under the terms of the GNU General Public License as published by
# the Free Software Foundation; version 2 only
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
# General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
import os
import stat
import pwd
import time
import config
import signal
import subprocess
import pytest
from util import unindent
def stop_sssd():
pid_file = open(config.PIDFILE_PATH, "r")
pid = int(pid_file.read())
os.kill(pid, signal.SIGTERM)
while True:
try:
os.kill(pid, signal.SIGCONT)
except:
break
time.sleep(1)
def create_conf_fixture(request, contents):
"""Generate sssd.conf and add teardown for removing it"""
conf = open(config.CONF_PATH, "w")
conf.write(contents)
conf.close()
os.chmod(config.CONF_PATH, stat.S_IRUSR | stat.S_IWUSR)
request.addfinalizer(lambda: os.unlink(config.CONF_PATH))
def create_sssd_fixture(request):
"""Start sssd and add teardown for stopping it and removing state"""
if subprocess.call(["sssd", "-D", "-f"]) != 0:
raise Exception("sssd start failed")
def teardown():
try:
stop_sssd()
except:
pass
for path in os.listdir(config.DB_PATH):
os.unlink(config.DB_PATH + "/" + path)
for path in os.listdir(config.MCACHE_PATH):
os.unlink(config.MCACHE_PATH + "/" + path)
request.addfinalizer(teardown)
@pytest.fixture
def local_domain_only(request):
conf = unindent("""\
[sssd]
domains = LOCAL
services = nss
[nss]
memcache_timeout = 0
[domain/LOCAL]
id_provider = local
min_id = 10000
max_id = 20000
""").format(**locals())
create_conf_fixture(request, conf)
create_sssd_fixture(request)
return None
def assert_nonexistent_user(name):
with pytest.raises(KeyError):
pwd.getpwnam(name)
def test_wrong_LC_ALL(local_domain_only):
"""
Regression test for ticket
https://fedorahosted.org/sssd/ticket/2785
"""
subprocess.check_call(["sss_useradd", "foo", "-M"])
pwd.getpwnam("foo")
# Change the LC_ALL variable to nonexistent locale
oldvalue = os.environ.get("LC_ALL", "")
os.environ["LC_ALL"] = "nonexistent_locale"
# sss_userdel must remove the user despite wrong LC_ALL
subprocess.check_call(["sss_userdel", "foo", "-R"])
assert_nonexistent_user("foo")
os.environ["LC_LOCAL"] = oldvalue
| sgallagher/sssd | src/tests/intg/test_local_domain.py | Python | gpl-3.0 | 2,993 |
def main() -> None:
N = int(input())
ans = 3 ** N
odd = 1
for a in map(int, input().split()):
if a % 2 == 0:
odd *= 2
print(ans - odd)
if __name__ == '__main__':
main()
| knuu/competitive-programming | atcoder/corp/code-festival-2017-qualc_b.py | Python | mit | 215 |
#!/usr/bin/env python
import optparse
from sys import *
import os,sys,re
from optparse import OptionParser
import glob
import subprocess
from os import system
import linecache
import time
import datetime
import calendar
#=========================
def setupParserOptions():
parser = optparse.OptionParser()
parser.set_usage("%prog -i <instance type> --days=<days>")
parser.add_option("-i",dest="instance",type="string",metavar="STRING",
help="Amazon instance type (e.g. r3.8xlarge, t2.micro, etc.)")
parser.add_option("--days",dest="days",type="int",metavar="INT",
help="Timeframe over which to retrieve spot instance price history. Maximum is 90 days.")
parser.add_option("-d", action="store_true",dest="debug",default=False,
help="debug")
options,args = parser.parse_args()
if len(args) > 0:
parser.error("Unknown commandline options: " +str(args))
if len(sys.argv) < 3:
parser.print_help()
sys.exit()
params={}
for i in parser.option_list:
if isinstance(i.dest,str):
params[i.dest] = getattr(options,i.dest)
return params
#=============================
def getSpotHistory(params,outdir,timeFrame,currentTime):
#Inputs
instance=params['instance']
#Regions returned from command: $ ec2-describe-regions
if os.path.exists('%s/regions.txt'%(outdir)):
os.remove('%s/regions.txt' %(outdir))
cmd = 'ec2-describe-regions > %s/regions.txt' %(outdir)
subprocess.Popen(cmd,shell=True).wait()
r1 = open('%s/regions.txt' %(outdir),'r')
#Loop over all regions
for regionline in r1:
region=regionline.split()[2]
os.environ["EC2_URL"] = "%s" %(region)
#Get region name
region=region.split('.')[1]
print '\n Working on region %s' %(region)
if os.path.exists('%s/%s.txt' %(outdir,region)):
os.remove('%s/%s.txt' %(outdir,region))
#Get list of availability zones
cmd = 'ec2-describe-availability-zones --region %s > %s/%s.txt' %(region,outdir,region)
if params['debug'] is True:
print cmd
subprocess.Popen(cmd,shell=True).wait()
f1=open('%s/%s.txt' %(outdir,region),'r')
for line in f1:
zone=line.split()[1]
if os.path.exists('%s/%s_%s_%s_to_%s_spotHistory.txt' %(outdir,zone,instance,timeFrame,currentTime)):
os.remove('%s/%s_%s_%s_to_%s_spotHistory.txt' %(outdir,zone,instance,timeFrame,currentTime))
cmd = 'ec2-describe-spot-price-history -t %s -d Linux/UNIX -a %s -s %sT14:10:34-0500 > %s/%s_%s_%s_to_%s_spotHistory.txt' %(instance,zone,timeFrame,outdir,zone,instance,timeFrame,currentTime)
if params['debug'] is True:
print cmd
subprocess.Popen(cmd,shell=True).wait()
f1.close()
os.remove('%s/%s.txt' %(outdir,region))
r1.close()
os.remove('%s/regions.txt' %(outdir))
#==============================
def checkConflicts(params):
instanceList='m3.large, i2.8xlarge, c3.2xlarge, hs1.8xlarge, c1.xlarge, r3.4xlarge, g2.2xlarge, m1.small, c1.medium, m3.2xlarge, c3.8xlarge, m2.xlarge, r3.2xlarge, t1.micro, cr1.8xlarge, r3.8xlarge, cc1.4xlarge, m1.medium, r3.large, c3.xlarge, i2.xlarge, m3.medium, cc2.8xlarge, m1.large, cg1.4xlarge, i2.2xlarge, c3.large, i2.4xlarge, c3.4xlarge, r3.xlarge, m1.xlarge, hi1.4xlarge, m2.4xlarge, m2.2xlarge, m3.xlarge'.split(', ')
if not params['instance'] in instanceList:
print '\nError: Instance %s is not a valid Amazon instance type. Exiting.\n' %(params['instance'])
sys.exit()
if params['days'] >90:
print '\nA larger time frame than 90 days has been specified (%i days). Using 90 day limit instead.\n' %(params['days'])
params['days']=90
if os.path.exists('SpotHistory_%s_%s_last%02ddays' %(params['instance'],datetime.datetime.now().strftime('%Y-%m-%d'),params['days'])):
print '\nError: Directory SpotHistory_%s_%s_last%02ddays already exists. Exiting.\n' %(params['instance'],datetime.datetime.now().strftime('%Y-%m-%d'),params['days'])
sys.exit()
return params
#==============================
def getDates(prevDay):
today = datetime.datetime.now()
dtdelta=datetime.timedelta(days=-prevDay)
prevDate=today+dtdelta
return '%s' %(prevDate.strftime('%Y-%m-%d'))
#=============================
def checkAWSPath():
ec2 = subprocess.Popen("env | grep EC2_HOME", shell=True, stdout=subprocess.PIPE).stdout.read().strip()
if not ec2:
print '\n Error: No AWS CLI tools environment set for $EC2_HOME. Exiting.\n'
key=subprocess.Popen("env | grep AWS_ACCESS_KEY", shell=True, stdout=subprocess.PIPE).stdout.read().strip()
secret=subprocess.Popen("env | grep AWS_SECRET_KEY", shell=True, stdout=subprocess.PIPE).stdout.read().strip()
awsid= subprocess.Popen("env | grep AWS_USER_ID", shell=True, stdout=subprocess.PIPE).stdout.read().strip()
if not key:
print '\n Error: No AWS_ACCESS_KEY specified in the environment as $AWS_ACCESS_KEY. Exiting.\n'
sys.exit()
if not secret:
print '\n Error: No AWS_SECRET_KEY specified in the environment as $AWS_SECRET_KEY. Exiting.\n'
sys.exit()
if not awsid:
print '\n Error: No AWS_USER_ID specified in the environment as $AWS_USER_ID. Exiting.\n'
sys.exit()
#==============================
if __name__ == "__main__":
params=setupParserOptions()
checkAWSPath()
params=checkConflicts(params)
prevDate=getDates(params['days'])
outdir='SpotHistory_%s_%s_last%02ddays' %(params['instance'],datetime.datetime.now().strftime('%Y-%m-%d'),params['days'])
os.makedirs(outdir)
getSpotHistory(params,outdir,prevDate,datetime.datetime.now().strftime('%Y-%m-%d'))
| mcianfrocco/Cianfrocco-and-Leschziner-EMCloudProcessing | get_spot_histories_all_regions_all_zones.py | Python | mit | 5,680 |
from setuptools import setup
setup(
name='python-web-demo-flask',
packages=['demo'],
)
| Appdynamics/Python-Demo-App | setup.py | Python | mit | 96 |
import logging
from src import messagebus, pages
import time
import cherrypy
class WebUI():
@cherrypy.expose
def scan(self):
pages.postOnly()
pages.require("/admin/settings.edit")
import bluetoothctl
bt = bluetoothctl.Bluetoothctl()
try:
bt.start_scan()
time.sleep(15)
bt.stop_scan()
devs = bt.get_discoverable_devices()
paired = bt.get_paired_devices()
finally:
bt.close(force=True)
return pages.get_template("settings/bluetooth/scan.html").render(devs=devs, paired=paired)
@cherrypy.expose
def pair(self, mac):
pages.require("/admin/settings.edit")
pages.postOnly()
import bluetoothctl
bt = bluetoothctl.Bluetoothctl()
bt.set_agent("NoInputNoOutput")
time.sleep(0.5)
try:
# I think this horriby fussy command needs exactlt this order to work.
if not bt.pair(mac):
raise RuntimeError("Pairing failed")
if not bt.connect(mac):
raise RuntimeError("Pairing suceeded but connection failed")
if not bt.trust(mac):
raise RuntimeError("Trusting failed")
finally:
bt.close(force=True)
devs = []
paired = bt.get_paired_devices()
return pages.get_template("settings/bluetooth/scan.html").render(devs=devs, paired=paired)
@cherrypy.expose
def remove(self, mac):
pages.require("/admin/settings.edit")
pages.postOnly()
import bluetoothctl
bt = bluetoothctl.Bluetoothctl()
time.sleep(0.5)
try:
devs = bt.get_discoverable_devices()
paired = bt.get_paired_devices()
if not bt.remove(mac):
raise RuntimeError("Removing failed")
finally:
bt.close(force=True)
return pages.get_template("settings/bluetooth/scan.html").render(devs=devs, paired=paired)
| EternityForest/KaithemAutomation | kaithem/src/btadmin.py | Python | gpl-3.0 | 2,025 |
"""
sentry.models.event
~~~~~~~~~~~~~~~~~~~
:copyright: (c) 2010-2013 by the Sentry Team, see AUTHORS for more details.
:license: BSD, see LICENSE for more details.
"""
import logging
from django.db import models
from django.utils import timezone
from django.utils.datastructures import SortedDict
from django.utils.translation import ugettext_lazy as _
from sentry.constants import LOG_LEVELS, MAX_CULPRIT_LENGTH
from sentry.db.models import (
Model, NodeField, BoundedIntegerField, BoundedPositiveIntegerField,
BaseManager, sane_repr
)
from sentry.utils.cache import memoize
from sentry.utils.imports import import_string
from sentry.utils.safe import safe_execute
from sentry.utils.strings import truncatechars, strip
class Event(Model):
"""
An individual event.
"""
group = models.ForeignKey('sentry.Group', blank=True, null=True, related_name="event_set")
event_id = models.CharField(max_length=32, null=True, db_column="message_id")
project = models.ForeignKey('sentry.Project', null=True)
logger = models.CharField(
max_length=64, blank=True, default='root', db_index=True)
level = BoundedPositiveIntegerField(
choices=LOG_LEVELS.items(), default=logging.ERROR, blank=True,
db_index=True)
message = models.TextField()
culprit = models.CharField(
max_length=MAX_CULPRIT_LENGTH, blank=True, null=True,
db_column='view')
checksum = models.CharField(max_length=32, db_index=True)
num_comments = BoundedPositiveIntegerField(default=0, null=True)
platform = models.CharField(max_length=64, null=True)
datetime = models.DateTimeField(default=timezone.now, db_index=True)
time_spent = BoundedIntegerField(null=True)
server_name = models.CharField(max_length=128, db_index=True, null=True)
site = models.CharField(max_length=128, db_index=True, null=True)
data = NodeField(blank=True, null=True)
objects = BaseManager()
class Meta:
app_label = 'sentry'
db_table = 'sentry_message'
verbose_name = _('message')
verbose_name_plural = _('messages')
unique_together = ('project', 'event_id')
__repr__ = sane_repr('project_id', 'group_id', 'checksum')
def error(self):
message = strip(self.message)
if not message:
message = '<unlabeled message>'
else:
message = truncatechars(message.splitlines()[0], 100)
return message
error.short_description = _('error')
def has_two_part_message(self):
message = strip(self.message)
return '\n' in message or len(message) > 100
def message_top(self):
culprit = strip(self.culprit)
if culprit:
return culprit
return self.error()
@property
def team(self):
return self.project.team
@memoize
def ip_address(self):
http_data = self.data.get('sentry.interfaces.Http')
if http_data and 'env' in http_data:
value = http_data['env'].get('REMOTE_ADDR')
if value:
return value
user_data = self.data.get('sentry.interfaces.User')
if user_data:
value = user_data.get('ip_address')
if value:
return value
return None
@memoize
def user_ident(self):
"""
The identifier from a user is considered from several interfaces.
In order:
- User.id
- User.email
- User.username
- Http.env.REMOTE_ADDR
"""
user_data = self.data.get('sentry.interfaces.User')
if user_data:
ident = user_data.get('id')
if ident:
return 'id:%s' % (ident,)
ident = user_data.get('email')
if ident:
return 'email:%s' % (ident,)
ident = user_data.get('username')
if ident:
return 'username:%s' % (ident,)
ident = self.ip_address
if ident:
return 'ip:%s' % (ident,)
return None
@memoize
def interfaces(self):
result = []
for key, data in self.data.iteritems():
if '.' not in key:
continue
try:
cls = import_string(key)
except ImportError:
continue # suppress invalid interfaces
value = safe_execute(cls, **data)
if not value:
continue
result.append((key, value))
return SortedDict((k, v) for k, v in sorted(result, key=lambda x: x[1].get_score(), reverse=True))
def get_version(self):
if not self.data:
return
if '__sentry__' not in self.data:
return
if 'version' not in self.data['__sentry__']:
return
module = self.data['__sentry__'].get('module', 'ver')
return module, self.data['__sentry__']['version']
def get_tags(self):
try:
return [
(t, v) for t, v in self.data.get('tags') or ()
if not t.startswith('sentry:')
]
except ValueError:
# at one point Sentry allowed invalid tag sets such as (foo, bar)
# vs ((tag, foo), (tag, bar))
return []
def as_dict(self):
# We use a SortedDict to keep elements ordered for a potential JSON serializer
data = SortedDict()
data['id'] = self.event_id
data['checksum'] = self.checksum
data['project'] = self.project.slug
data['logger'] = self.logger
data['level'] = self.get_level_display()
data['culprit'] = self.culprit
data['datetime'] = self.datetime
data['time_spent'] = self.time_spent
for k, v in sorted(self.data.iteritems()):
data[k] = v
return data
@property
def size(self):
return len(unicode(vars(self)))
| rdio/sentry | src/sentry/models/event.py | Python | bsd-3-clause | 5,941 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.