code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
## \example buffers.py
# Showing how to read and write from buffers
import RMF
buf = RMF.BufferHandle()
fw = RMF.create_rmf_buffer(buf)
# do stuff
del fw
fr = RMF.open_rmf_buffer_read_only(buf)
# do more stuff
| shanot/imp | modules/rmf/dependency/RMF/examples/buffers.py | Python | gpl-3.0 | 215 |
#=======================================================================
# Author: Donovan Parks
#
# Unit tests for STAMP.
#
# Copyright 2011 Donovan Parks
#
# This file is part of STAMP.
#
# STAMP is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# STAMP is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with STAMP. If not, see <http://www.gnu.org/licenses/>.
#=======================================================================
import unittest
import sys
# test tables (positive samples 1, positive samples 2, total samples 1, total samples 2)
table1 = [10, 8, 30, 40]
table2 = [4000, 5000, 500000, 1000000]
# preferences for statistical tests
preferences = {}
preferences['Pseudocount'] = 0.5
preferences['Executable directory'] = sys.path[0]
preferences['Replicates'] = 1000
class VerifyPostHocTests(unittest.TestCase):
def testGamesHowell(self):
"""Verify computation of Games-Howell post-hoc test"""
from stamp.plugins.multiGroups.postHoc.GamesHowell import GamesHowell
gh = GamesHowell(preferences)
# ground truth found with SPSS v19. Values are not exact since the critical Q value
# are interpolated from tables in the STAMP implementation.
pValues, effectSize, lowerCI, upperCI, labels, _ = gh.run([[1,2,3,4,5],[10,20,30,40,50,60],[1,2,3,4,5,6,7]], 0.95, ['1', '2', '3'])
self.assertEqual(labels[0], '1 : 2')
self.assertAlmostEqual(effectSize[0], -32)
self.assertAlmostEqual(lowerCI[0], -56.836534205367272) # SPSS = -56.80902338101632
self.assertAlmostEqual(upperCI[0], -7.163465794632728) # SPSS = -7.190976618983683
self.assertEqual(pValues[0] == '< 0.02', True) # SPSS = 0.019165308600281317
self.assertEqual(labels[1], '1 : 3')
self.assertAlmostEqual(effectSize[1], -1.0)
self.assertAlmostEqual(lowerCI[1], -3.9627938823820417) # SPSS = -3.962591041989213
self.assertAlmostEqual(upperCI[1], 1.9627938823820417) # SPSS = 1.9625910419892132
self.assertEqual(pValues[1] == '>= 0.1', True) # SPSS = 0.6372223228477465
self.assertEqual(labels[2], '2 : 3')
self.assertAlmostEqual(effectSize[2], 31)
self.assertAlmostEqual(lowerCI[2], 6.1693311597445302) # SPSS = 6.2047330662731035
self.assertAlmostEqual(upperCI[2], 55.83066884025547) # SPSS = 55.79526693372689
self.assertEqual(pValues[2] == '< 0.05', True) # SPSS = 0.021640761239221984
def testTukeyKramer(self):
"""Verify computation of Tukey-Kramer post-hoc test"""
from stamp.plugins.multiGroups.postHoc.TukeyKramer import TukeyKramer
tk = TukeyKramer(preferences)
# ground truth found with the anova1 and multcompare function in MATLAB v7.10.0 and SPSS v19
pValues, effectSize, lowerCI, upperCI, labels, _ = tk.run([[1,2,3,4,5],[10,20,30,40,50,60],[1,2,3,4,5,6,7]], 0.95, ['1', '2', '3'])
self.assertEqual(labels[0], '1 : 2')
self.assertAlmostEqual(effectSize[0], -32)
self.assertAlmostEqual(lowerCI[0], -49.172140035619407)
self.assertAlmostEqual(upperCI[0], -14.827859964380597)
self.assertEqual(pValues[0] == '< 0.001', True) # 5.960611653675896E-4
self.assertEqual(labels[1], '1 : 3')
self.assertAlmostEqual(effectSize[1], -1.0)
self.assertAlmostEqual(lowerCI[1], -17.605245738594071)
self.assertAlmostEqual(upperCI[1], 15.605245738594071)
self.assertEqual(pValues[1] == '>= 0.1', True) # 0.9866130284213506
self.assertEqual(labels[2], '2 : 3')
self.assertAlmostEqual(effectSize[2], 31)
self.assertAlmostEqual(lowerCI[2], 15.222589067602378)
self.assertAlmostEqual(upperCI[2], 46.777410932397622)
self.assertEqual(pValues[2] == '< 0.001', True) # 3.593658536739097E-4
def testScheffe(self):
"""Verify computation of Scheffe post-hoc test"""
from stamp.plugins.multiGroups.postHoc.Scheffe import Scheffe
scheffe = Scheffe(preferences)
# ground truth example taken from http://www.mathcs.duq.edu/larget/math225/notes18.html
data = []
data.append([19.65,20.05,20.65,20.85,21.65,21.65,21.65,21.85,21.85,21.85,22.05,22.05,22.05,22.05,22.05,22.05,22.05,22.05,22.05,22.05,22.25,22.25,22.25,22.25,22.25,22.25,22.25,22.25,22.45,22.45,22.45,22.65,22.65,22.85,22.85,22.85,22.85,23.05,23.25,23.25,23.45,23.65,23.85,24.25,24.45])
data.append([21.05,21.85,22.05,22.45,22.65,23.25,23.25,23.25,23.45,23.45,23.65,23.85,24.05,24.05,24.05])
data.append([20.85,21.65,22.05,22.85,23.05,23.05,23.05,23.05,23.45,23.85,23.85,23.85,24.05,25.05])
data.append([21.05,21.85,22.05,22.05,22.05,22.25,22.45,22.45,22.65,23.05,23.05,23.05,23.05,23.05,23.25,23.85])
data.append([21.05,21.85,21.85,21.85,22.05,22.45,22.65,23.05,23.05,23.25,23.45,24.05,24.05,24.05,24.85])
data.append([19.85,20.05,20.25,20.85,20.85,20.85,21.05,21.05,21.05,21.25,21.45,22.05,22.05,22.05,22.25])
pValues, effectSize, lowerCI, upperCI, labels, _ = scheffe.run(data, 0.95, ['MeadowPipet', 'TreePipet', 'Sparrow', 'Robin', 'PiedWagtail', 'Wren'])
self.assertEqual(labels[9], 'Sparrow : Robin')
self.assertAlmostEqual(effectSize[9], 0.546428571)
self.assertAlmostEqual(lowerCI[9], -0.58049475277666573)
self.assertAlmostEqual(upperCI[9], 1.6733518956338074)
self.assertEqual(pValues[9] > 0.05, True)
self.assertEqual(labels[11], 'Sparrow : Wren')
self.assertAlmostEqual(effectSize[11], 1.9914285714285711)
self.assertAlmostEqual(lowerCI[11], 0.84710959211483861)
self.assertAlmostEqual(upperCI[11], 3.1357475507423036)
self.assertEqual(pValues[11] < 0.05, True)
# ground truth found with the anova1 and multcompare function in MATLAB v7.10.0 and SPSS v19
pValues, effectSize, lowerCI, upperCI, labels, _ = scheffe.run([[1,2,3,4,5],[10,20,30,40,50,60],[1,2,3,4,5,6,7]], 0.95, ['1', '2', '3'])
self.assertEqual(labels[0], '1 : 2')
self.assertAlmostEqual(effectSize[0], -32)
self.assertAlmostEqual(lowerCI[0], -49.941123031784372)
self.assertAlmostEqual(upperCI[0], -14.058876968215628)
self.assertAlmostEqual(pValues[0], 8.624781311637033E-4)
self.assertEqual(labels[1], '1 : 3')
self.assertAlmostEqual(effectSize[1], -1.0)
self.assertAlmostEqual(lowerCI[1], -18.348842727299797)
self.assertAlmostEqual(upperCI[1], 16.348842727299797)
self.assertAlmostEqual(pValues[1], 0.9878500418301395)
self.assertEqual(labels[2], '2 : 3')
self.assertAlmostEqual(effectSize[2], 31)
self.assertAlmostEqual(lowerCI[2], 14.51606322368572)
self.assertAlmostEqual(upperCI[2], 47.48393677631428)
self.assertAlmostEqual(pValues[2], 5.261333896968458E-4)
class VerifyStatisticalTests(unittest.TestCase):
def testANOVA(self):
"""Verify computation of ANOVA"""
from stamp.plugins.multiGroups.statisticalTests.ANOVA import ANOVA
anova = ANOVA(preferences)
# checked against http://turner.faculty.swau.edu/mathematics/math241/materials/anova/
pValue, _ = anova.hypothesisTest([[5,4,6,4,3],[5,2,2,5,6,7],[1,2,3,4,5,6,7]])
self.assertAlmostEqual(pValue, 0.88347274205)
# checked against http://faculty.vassar.edu/lowry/anova1u.html
pValue, _ = anova.hypothesisTest([[1,2,3,4,5],[10,20,30,40,50],[4,5,4], [5,5,5]])
self.assertAlmostEqual(pValue, 0.0018740823031)
pValue, _ = anova.hypothesisTest([[5,4,5,4,5],[6,5,6,5,6,5],[700,800,700]])
self.assertAlmostEqual(pValue, 0.0)
pValue, _ = anova.hypothesisTest([[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5]])
self.assertAlmostEqual(pValue, 1.0)
def testKruskalWallis(self):
"""Verify computation of Kruskal-Wallis H-test"""
from stamp.plugins.multiGroups.statisticalTests.KruskalWallis import KruskalWallis
kw = KruskalWallis(preferences)
# checked against http://faculty.vassar.edu/lowry/kw3.html
pValue, _ = kw.hypothesisTest([[5,4,6,4,3],[5,2,2,5,6,7],[1,2,3,4,5,6,7]])
self.assertAlmostEqual(pValue, 0.88173680194259985)
pValue, _ = kw.hypothesisTest([[1,2,3,4,5,6,7],[8,9,10,11,12,13,14,15],[16,17,18,19,20,21,22]])
self.assertAlmostEqual(pValue, 8.8020161301173428e-05)
pValue, _ = kw.hypothesisTest([[1,2,3,4,5],[1,2,3,4,5],[1,2,3,4,5]])
self.assertAlmostEqual(pValue, 1.0)
def testTTest(self):
"""Verify computation of t-test (equal variance assumption) """
from stamp.plugins.groups.statisticalTests.Ttest import Ttest
ttest = Ttest(preferences)
# ground truth found with t.test in R v2.13.0
oneSided, twoSided, lowerCI, upperCI, effectSize, _ = ttest.run([5,4,6,4,3],[5,2,2,5,6,7], [1,1,1,1,1], [1,1,1,1,1,1], None, 0.95)
self.assertAlmostEqual(oneSided, 0.537141726)
self.assertAlmostEqual(twoSided, 0.925716547365)
self.assertAlmostEqual(lowerCI, -245.935268272)
self.assertAlmostEqual(upperCI, 225.935268272)
self.assertAlmostEqual(effectSize, -10.0)
def testWelchTest(self):
"""Verify computation of Welsh's t-test"""
from stamp.plugins.groups.statisticalTests.Welch import Welch
ttest = Welch(preferences)
# ground truth found with t.test in R v2.13.0
oneSided, twoSided, lowerCI, upperCI, effectSize, _ = ttest.run([5,4,6,4,3],[5,2,2,5,6,7], [1,1,1,1,1], [1,1,1,1,1,1], None, 0.95)
self.assertAlmostEqual(oneSided, 0.5390501783)
self.assertAlmostEqual(twoSided, 0.9218996432)
self.assertAlmostEqual(lowerCI, -238.023177152)
self.assertAlmostEqual(upperCI, 218.023177152)
self.assertAlmostEqual(effectSize, -10.0)
oneSided, twoSided, lowerCI, upperCI, effectSize, _ = ttest.run([3.4,6.3,5.3,1.4,6.3,6.3],[3.5,6.4,5.2,1.3,6.4,6.2], [1,1,1,1,1,1], [1,1,1,1,1,1], None, 0.95)
self.assertAlmostEqual(oneSided, 0.5)
self.assertAlmostEqual(twoSided, 1.0)
self.assertAlmostEqual(lowerCI, -262.6606201199)
self.assertAlmostEqual(upperCI, 262.6606201199)
self.assertAlmostEqual(effectSize, 0.0)
oneSided, twoSided, lowerCI, upperCI, effectSize, _ = ttest.run([1,2,3,4,5,6,7,8,9,10],[10,20,30,40,50,60,70,80,90,100], [1,1,1,1,1,1,1,1,1,1], [1,1,1,1,1,1,1,1,1,1], None, 0.95)
self.assertAlmostEqual(oneSided, 0.9997146330)
self.assertAlmostEqual(twoSided, 0.0005707338)
self.assertAlmostEqual(lowerCI, -7120.16500998)
self.assertAlmostEqual(upperCI, -2779.83499002)
self.assertAlmostEqual(effectSize, -4950.0)
def testWhiteTest(self):
"""Verify computation of White's non-parametric test"""
from stamp.plugins.groups.statisticalTests.White import White
white = White(preferences)
# This is a fairly degenerate test since the non-deterministic nature of this test
# makes it difficult to verify under more general conditions
_, pValuesTwoSided, lowerCIs, upperCIs, effectSizes, _ = white.runAll([[5,5,5,5,5]], [[6,6,6,6,6,6,6,6]], [[10,10,10,10,10]], [[10,10,10,10,10,10,10,10]], "DP: bootstrap", 0.95, None)
self.assertAlmostEqual(pValuesTwoSided[0], 0.0)
self.assertAlmostEqual(lowerCIs[0], -10.0)
self.assertAlmostEqual(upperCIs[0], -10.0)
self.assertAlmostEqual(effectSizes[0], -10.0)
#def testBarnard(self):
# """Verify computation of Barnard's exact test"""
# from stamp.plugins.statisticalTests.Barnard import Barnard
# barnard = Barnard(preferences)
# Ground truth obtained from StatXact v8.0.0
# oneSided, twoSided = barnard.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
# self.assertEqual(oneSided, float('inf'))
# self.assertAlmostEqual(twoSided, 0.224594642210276)
def testChiSquare(self):
"""Verify computation of Chi-square test"""
from stamp.plugins.samples.statisticalTests.ChiSquare import ChiSquare
chiSquare = ChiSquare(preferences)
# Ground truth obtained from R version 2.10
oneSided, twoSided, _ = chiSquare.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 0.206550401252)
oneSided, twoSided, _ = chiSquare.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 2.220446049e-16)
def testChiSquareYates(self):
"""Verify computation of Chi-square test with Yates' continuity correction"""
from stamp.plugins.samples.statisticalTests.ChiSquareYates import ChiSquareYates
chiSquareYates = ChiSquareYates(preferences)
# Ground truth obtained from R version 2.10
oneSided, twoSided, _ = chiSquareYates.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 0.323739196466)
oneSided, twoSided, _ = chiSquareYates.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 2.220446049e-16)
def testDiffBetweenProp(self):
"""Verify computation of Difference between proportions test"""
from stamp.plugins.samples.statisticalTests.DiffBetweenProp import DiffBetweenProp
diffBetweenProp = DiffBetweenProp(preferences)
# Ground truth obtained from R version 2.10
oneSided, twoSided, _ = diffBetweenProp.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(oneSided, 0.103275200626)
self.assertAlmostEqual(twoSided, 0.206550401252)
oneSided, twoSided, _ = diffBetweenProp.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(oneSided, 2.220446049e-16)
self.assertAlmostEqual(twoSided, 2.220446049e-16)
def testFishers(self):
"""Verify computation of Fisher's exact test (minimum-likelihood approach)"""
from stamp.plugins.samples.statisticalTests.Fishers import Fishers
fishers = Fishers(preferences)
# Ground truth obtained from R version 2.10
oneSided, twoSided, _ = fishers.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(oneSided, 0.16187126209690825)
self.assertAlmostEqual(twoSided, 0.2715543327789185)
oneSided, twoSided, _ = fishers.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(oneSided, 2.220446049e-16)
self.assertAlmostEqual(twoSided, 2.220446049e-16)
oneSided, twoSided, _ = fishers.hypothesisTest(0.0, 0.0, 920852.999591, 953828.994346)
self.assertAlmostEqual(oneSided, 1.0)
self.assertAlmostEqual(twoSided, 1.0)
def testGTest(self):
"""Verify computation of G-test"""
from stamp.plugins.samples.statisticalTests.GTest import GTest
gTest = GTest(preferences)
# Ground truth obtained from Peter L. Hurd's R script (http://www.psych.ualberta.ca/~phurd/cruft/g.test.r)
oneSided, twoSided, _ = gTest.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 0.208248664458)
oneSided, twoSided, _ = gTest.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 2.220446049e-16)
def testGTestYates(self):
"""Verify computation of G-test with Yates' continuity correction"""
from stamp.plugins.samples.statisticalTests.GTestYates import GTestYates
gTestYates = GTestYates(preferences)
# Ground truth obtained from Peter L. Hurd's R script (http://www.psych.ualberta.ca/~phurd/cruft/g.test.r)
oneSided, twoSided, _ = gTestYates.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 0.325502240010)
oneSided, twoSided, _ = gTestYates.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertEqual(oneSided, float('inf'))
self.assertAlmostEqual(twoSided, 2.220446049e-16)
def testHypergeometric(self):
"""Verify computation of Hypergeometric test (Fisher's exact test with p-value doubling approach)"""
from stamp.plugins.samples.statisticalTests.Hypergeometric import Hypergeometric
hypergeometric = Hypergeometric(preferences)
# Ground truth obtained using the phyper() and dyper() function in R version 2.10
oneSided, twoSided, _ = hypergeometric.hypothesisTest(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(oneSided, 0.161871262097)
self.assertAlmostEqual(twoSided, 2 * 0.161871262097)
oneSided, twoSided, _ = hypergeometric.hypothesisTest(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(oneSided, 2.220446049e-16)
self.assertAlmostEqual(twoSided, 2.220446049e-16)
class VerifyEffectSizeFilters(unittest.TestCase):
def testEtaSquared(self):
"""Verify computation of eta-squared effect size filter"""
from stamp.plugins.multiGroups.effectSizeFilters.EtaSquared import EtaSquared
etaSquared = EtaSquared(preferences)
# ground truth taken from http://turner.faculty.swau.edu/mathematics/math241/materials/anova/
value = etaSquared.run([[1,2,3,4],[2,3,4],[1,2,3,4]])
self.assertAlmostEqual(value, 0.545454545 / 12.545454545)
# ground truth taken from http://faculty.vassar.edu/lowry/anova1u.html
value = etaSquared.run([[1,2,3,4,5],[10,20,30,40,50],[4,5,4], [5,5,5]])
self.assertAlmostEqual(value, 2348.27083333 / 3358.9375)
def testDiffBetweenProp(self):
"""Verify computation of Difference between proportions effect size filter"""
from stamp.plugins.samples.effectSizeFilters.DiffBetweenProp import DiffBetweenProp
diffBetweenProp = DiffBetweenProp(preferences)
# Ground truth calculated by hand
value = diffBetweenProp.run(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(value, 13.333333333)
value = diffBetweenProp.run(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(value, 0.3)
def testOddsRatio(self):
"""Verify computation of Odds ratio effect size filter"""
from stamp.plugins.samples.effectSizeFilters.OddsRatio import OddsRatio
oddsRatio = OddsRatio(preferences)
# Ground truth calculated by hand
value = oddsRatio.run(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(value, 2.0)
value = oddsRatio.run(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(value, 1.60483870968)
def testRatioProportions(self):
"""Verify computation of ratio of proportions effect size filter"""
from stamp.plugins.samples.effectSizeFilters.RatioProportions import RatioProportions
ratioProportions = RatioProportions(preferences)
# Ground truth calculated by hand
value = ratioProportions.run(table1[0], table1[1], table1[2], table1[3])
self.assertAlmostEqual(value, 1.66666666666666)
value = ratioProportions.run(table2[0], table2[1], table2[2], table2[3])
self.assertAlmostEqual(value, 1.6)
def testDiffBetweenPropGroup(self):
"""Verify computation of Difference between proportions group effect size filter"""
from stamp.plugins.groups.effectSizeFilters.DiffBetweenProp import DiffBetweenProp
diffBetweenProp = DiffBetweenProp(preferences)
# Ground truth calculated by hand
value = diffBetweenProp.run([1,2,3,4,5], [2,4,5,8,10])
self.assertAlmostEqual(value, 15.0/5 - 29.0/5)
value = diffBetweenProp.run([1],[1,1])
self.assertAlmostEqual(value, 1.0/1 - 2.0/2)
def testRatioProportionsGroup(self):
"""Verify computation of ratio of proportions group effect size filter"""
from stamp.plugins.groups.effectSizeFilters.RatioProportions import RatioProportions
ratioProportions = RatioProportions(preferences)
# Ground truth calculated by hand
value = ratioProportions.run([1,2,3,4,5], [2,4,5,8,10])
self.assertAlmostEqual(value, (15.0/5) / (29.0/5))
value = ratioProportions.run([1],[1,1])
self.assertAlmostEqual(value, (1.0/1) / (2.0/2))
class VerifyConfidenceIntervalMethods(unittest.TestCase):
def testDiffBetweenPropAsymptotic(self):
"""Verify computation of Difference between proportions asymptotic CI method"""
from stamp.plugins.samples.confidenceIntervalMethods.DiffBetweenPropAsymptotic import DiffBetweenPropAsymptotic
diffBetweenPropAsymptotic = DiffBetweenPropAsymptotic(preferences)
lowerCI, upperCI, effectSize, _ = diffBetweenPropAsymptotic.run(table1[0], table1[1], table1[2], table1[3], 0.95)
self.assertAlmostEqual(lowerCI, -7.60015319099813)
self.assertAlmostEqual(upperCI, 34.2668198576648)
self.assertAlmostEqual(effectSize, 13.333333333)
lowerCI, upperCI, effectSize, _ = diffBetweenPropAsymptotic.run(table2[0], table2[1], table2[2], table2[3], 0.95)
self.assertAlmostEqual(lowerCI, 0.271701079166334)
self.assertAlmostEqual(upperCI, 0.328298920833666)
self.assertAlmostEqual(effectSize, 0.3)
def testDiffBetweenPropAsymptoticCC(self):
"""Verify computation of Difference between proportions asymptotic CI method with continuity correction"""
from stamp.plugins.samples.confidenceIntervalMethods.DiffBetweenPropAsymptoticCC import DiffBetweenPropAsymptoticCC
diffBetweenPropAsymptoticCC = DiffBetweenPropAsymptoticCC(preferences)
lowerCI, upperCI, effectSize, _ = diffBetweenPropAsymptoticCC.run(table1[0], table1[1], table1[2], table1[3], 0.95)
self.assertAlmostEqual(lowerCI, -13.3167148125733)
self.assertAlmostEqual(upperCI, 39.98338147924)
self.assertAlmostEqual(effectSize, 13.333333333)
lowerCI, upperCI, effectSize, _ = diffBetweenPropAsymptoticCC.run(table2[0], table2[1], table2[2], table2[3], 0.95)
self.assertAlmostEqual(lowerCI, 0.271407084568653)
self.assertAlmostEqual(upperCI, 0.328592915431347)
self.assertAlmostEqual(effectSize, 0.3)
def testNewcombeWilson(self):
"""Verify computation of Newcombe-Wilson CI method"""
from stamp.plugins.samples.confidenceIntervalMethods.NewcombeWilson import NewcombeWilson
newcombeWilson = NewcombeWilson(preferences)
lowerCI, upperCI, effectSize, _ = newcombeWilson.run(table1[0], table1[1], table1[2], table1[3], 0.95)
self.assertAlmostEqual(lowerCI, -7.07911677674112)
self.assertAlmostEqual(upperCI, 33.5862638376494)
self.assertAlmostEqual(effectSize, 13.333333333)
lowerCI, upperCI, effectSize, _ = newcombeWilson.run(table2[0], table2[1], table2[2], table2[3], 0.95)
self.assertAlmostEqual(lowerCI, 0.271932757939523)
self.assertAlmostEqual(upperCI, 0.328541077116921)
self.assertAlmostEqual(effectSize, 0.3)
def testOddsRatio(self):
"""Verify computation of Odds ratio CI method"""
from stamp.plugins.samples.confidenceIntervalMethods.OddsRatio import OddsRatio
oddsRatio = OddsRatio(preferences)
# Ground truth calculated by hand
lowerCI, upperCI, effectSize, _ = oddsRatio.run(table1[0], table1[1], table1[2], table1[3], 0.95)
self.assertAlmostEqual(lowerCI, 0.676046021596)
self.assertAlmostEqual(upperCI, 5.91675695474)
self.assertAlmostEqual(effectSize, 2.0)
lowerCI, upperCI, effectSize, _ = oddsRatio.run(table2[0], table2[1], table2[2], table2[3], 0.95)
self.assertAlmostEqual(lowerCI, 1.53926774059)
self.assertAlmostEqual(upperCI, 1.6732029238)
self.assertAlmostEqual(effectSize, 1.60483870968)
def testRatioProportions(self):
"""Verify computation of Ratio of proportions CI method"""
from stamp.plugins.samples.confidenceIntervalMethods.RatioProportions import RatioProportions
ratioProportions = RatioProportions(preferences)
# Ground truth calculated by hand
lowerCI, upperCI, effectSize, _ = ratioProportions.run(table1[0], table1[1], table1[2], table1[3], 0.95)
self.assertAlmostEqual(lowerCI, 0.748767825898)
self.assertAlmostEqual(upperCI, 3.70979852726)
self.assertAlmostEqual(effectSize, 1.66666666666666)
lowerCI, upperCI, effectSize, _ = ratioProportions.run(table2[0], table2[1], table2[2], table2[3], 0.95)
self.assertAlmostEqual(lowerCI, 1.53505365781)
self.assertAlmostEqual(upperCI, 1.6676941467)
self.assertAlmostEqual(effectSize, 1.6)
class VerifyMultipleComparisonCorrectionMethods(unittest.TestCase):
pValues = [1e-6, 1e-5, 1e-4, 1e-3, 1e-2, 1e-1]
def testBenjaminiHochbergFDR(self):
"""Verify computation of Bejamini-Hochberg FDR method"""
from stamp.plugins.common.multipleComparisonCorrections.BenjaminiHochbergFDR import BenjaminiHochbergFDR
benjaminiHochbergFDR = BenjaminiHochbergFDR(preferences)
# Ground truth calculated explicitly
qValues = benjaminiHochbergFDR.correct(list(self.pValues), 0.05)
modifier = 1
for i in xrange(0, len(self.pValues)):
self.assertAlmostEqual(qValues[i], self.pValues[i]*len(self.pValues) / modifier)
modifier += 1
def testBonferroni(self):
"""Verify computation of Bonferroni method"""
from stamp.plugins.common.multipleComparisonCorrections.Bonferroni import Bonferroni
bonferroni = Bonferroni(preferences)
# Ground truth calculated explicitly
correctedValues = bonferroni.correct(list(self.pValues), 0.05)
for i in xrange(0, len(self.pValues)):
self.assertAlmostEqual(correctedValues[i], self.pValues[i]*len(self.pValues))
def testHolmBonferroni(self):
"""Verify computation of Holm-Bonferroni method"""
from stamp.plugins.common.multipleComparisonCorrections.additional.HolmBonferroni import HolmBonferroni
holmBonferroni = HolmBonferroni(preferences)
# Ground truth calculated by hand
correctedValues = holmBonferroni.correct(list(self.pValues), 0.05)
self.assertAlmostEqual(correctedValues[0], self.pValues[0])
self.assertAlmostEqual(correctedValues[1], self.pValues[1])
self.assertAlmostEqual(correctedValues[2], self.pValues[2])
self.assertAlmostEqual(correctedValues[3], self.pValues[3])
self.assertAlmostEqual(correctedValues[4], self.pValues[4])
self.assertEqual(correctedValues[5], float('inf'))
def testNoCorrection(self):
"""Verify computation of No multiple comparison correction method"""
from stamp.plugins.common.multipleComparisonCorrections.NoCorrection import NoCorrection
noCorrection = NoCorrection(preferences)
# Ground truth calculated explicitly
correctedValues = noCorrection.correct(list(self.pValues), 0.05)
for i in xrange(0, len(self.pValues)):
self.assertAlmostEqual(correctedValues[i], self.pValues[i])
def testSidak(self):
"""Verify computation of Sidak method"""
from stamp.plugins.common.multipleComparisonCorrections.Sidak import Sidak
sidak = Sidak(preferences)
# Ground truth calculated explicitly
correctedValues = sidak.correct(list(self.pValues), 0.05)
for i in xrange(0, len(self.pValues)):
self.assertAlmostEqual(correctedValues[i], 1.0 - (1.0 - self.pValues[i])**len(self.pValues))
def testStoreyFDR(self):
"""Verify computation of Storey FDR method"""
# This method is based on a bootstrapping approach and as such does not always produce
# identical results. It has been tested against the results given by the R plugin by
# Alan Dadney and John Storey (http://cran.r-project.org/web/packages/qvalue/)
pass
class VerifyOther(unittest.TestCase):
def testNormalDist(self):
"""Verify computation of normal distribution methods"""
from stamp.metagenomics.stats.distributions.NormalDist import standardNormalCDF, zScore
self.assertAlmostEqual(standardNormalCDF(-2), 0.022750131948179209)
self.assertAlmostEqual(standardNormalCDF(-1), 0.15865525393145705)
self.assertAlmostEqual(standardNormalCDF(0), 0.5)
self.assertAlmostEqual(standardNormalCDF(1), 0.84134474606854293)
self.assertAlmostEqual(standardNormalCDF(2), 0.97724986805182079)
self.assertAlmostEqual(standardNormalCDF(-1e-6), 1.0 - standardNormalCDF(1e-6))
self.assertAlmostEqual(standardNormalCDF(-1e-12), 1.0 - standardNormalCDF(1e-12))
self.assertAlmostEqual(zScore(0.90), 1.6448536269514722)
self.assertAlmostEqual(zScore(0.95), 1.959963984540054)
self.assertAlmostEqual(zScore(0.98), 2.3263478740408408)
self.assertAlmostEqual(zScore(0.99), 2.5758293035489004)
self.assertAlmostEqual(zScore(0.80), 1.2815515655446004)
if __name__ == "__main__":
unittest.main() | dparks1134/STAMP | STAMP_test.py | Python | gpl-3.0 | 27,791 |
import h5py
import argparse
import numpy as np
def process_arguments():
parser = argparse.ArgumentParser(description="Heat Equation IC generator")
parser.add_argument('-x', '--cols', type=int, default=31, help='Simulation columns')
parser.add_argument('-y', '--rows', type=int, default=31, help='Simulation rows')
parser.add_argument('-w', '--width', type=float, default=2, help='Simulation domain width')
parser.add_argument('-d', '--depth', type=float, default=2, help='Simulation domain depth (height)')
parser.add_argument('-s', '--sigma', type=float, default=.25, help='Sigma')
parser.add_argument('-n', '--nu', type=float, default=.05, help='Nu')
parser.add_argument('-g', '--generator', type=str, choices=['barbra', 'ones', 'hotcorner'], default='barbra')
parser.add_argument("-of", "--outfile", type=str, required=True, help="Path to data file to write to")
return parser.parse_args()
def ones_gen(rows, cols):
data = np.ones((rows, cols))
return data
def barbra_gen(rows, cols):
data = np.ones((rows, cols))
dx = 2.0 / (cols - 1)
dy = 2.0 / (rows - 1)
data[.5/dy:1/dy+1,.5/dx:1/dx+1] = 2
return data
def hotcorner_gen(rows, cols):
data = np.zeros((rows, cols))
data[0:rows/3,0:cols/3] = 2
return data
def main():
args = process_arguments()
if args.generator == 'barbra':
data = barbra_gen(args.rows, args.cols)
elif args.generator == 'ones':
data = ones_gen(args.rows, args.cols)
elif args.generator == 'hotcorner':
data = hotcorner_gen(args.rows, args.cols)
with h5py.File(args.outfile, 'w-') as of:
dom = of.create_group('domain')
dom.attrs['width'] = args.width
dom.attrs['depth'] = args.depth
prp = of.create_group('properties')
prp.attrs['sigma'] = args.sigma
prp.attrs['nu'] = args.nu
ds = of.create_dataset('temperature', data=data)
ds.attrs['generator'] = np.string_(args.generator)
if __name__ == '__main__':
main()
| csrhau/castle | testcases/input-gen/h5gen.py | Python | apache-2.0 | 1,940 |
import subprocess
import django
if django.VERSION[0:2] >= (1, 8):
from django.db.backends.base.client import BaseDatabaseClient
else:
from django.db.backends import BaseDatabaseClient
class CassandraDatabaseClient(BaseDatabaseClient):
executable_name = 'cqlsh'
def runshell(self):
settings_dict = self.connection.settings_dict
args = [self.executable_name]
if settings_dict['HOST']:
args.extend([settings_dict['HOST'].split(',')[0]])
if settings_dict['PORT']:
args.extend([str(settings_dict['PORT'])])
if settings_dict['USER']:
args += ["-u", settings_dict['USER']]
args += ["-k", settings_dict['NAME']]
subprocess.call(args)
| paksu/django-cassandra-engine | django_cassandra_engine/base/client.py | Python | bsd-2-clause | 739 |
from __future__ import unicode_literals
import datetime
import pickle
from decimal import Decimal
from operator import attrgetter
from django.core.exceptions import FieldError
from django.contrib.contenttypes.models import ContentType
from django.db.models import Count, Max, Avg, Sum, StdDev, Variance, F, Q
from django.test import TestCase, skipUnlessDBFeature
from django.test.utils import Approximate
from django.utils import six
from .models import (Author, Book, Publisher, Clues, Entries, HardbackBook,
ItemTag, WithManualPK, Alfa, Bravo, Charlie)
class AggregationTests(TestCase):
fixtures = ["aggregation_regress.json"]
def assertObjectAttrs(self, obj, **kwargs):
for attr, value in six.iteritems(kwargs):
self.assertEqual(getattr(obj, attr), value)
def test_aggregates_in_where_clause(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
Tests that the subselect works and returns results equivalent to a
query with the IDs listed.
Before the corresponding fix for this bug, this test passed in 1.1 and
failed in 1.2-beta (trunk).
"""
qs = Book.objects.values('contact').annotate(Max('id'))
qs = qs.order_by('contact').values_list('id__max', flat=True)
# don't do anything with the queryset (qs) before including it as a
# subquery
books = Book.objects.order_by('id')
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
def test_aggregates_in_where_clause_pre_eval(self):
"""
Regression test for #12822: DatabaseError: aggregates not allowed in
WHERE clause
Same as the above test, but evaluates the queryset for the subquery
before it's used as a subquery.
Before the corresponding fix for this bug, this test failed in both
1.1 and 1.2-beta (trunk).
"""
qs = Book.objects.values('contact').annotate(Max('id'))
qs = qs.order_by('contact').values_list('id__max', flat=True)
# force the queryset (qs) for the subquery to be evaluated in its
# current state
list(qs)
books = Book.objects.order_by('id')
qs1 = books.filter(id__in=qs)
qs2 = books.filter(id__in=list(qs))
self.assertEqual(list(qs1), list(qs2))
@skipUnlessDBFeature('supports_subqueries_in_group_by')
def test_annotate_with_extra(self):
"""
Regression test for #11916: Extra params + aggregation creates
incorrect SQL.
"""
# oracle doesn't support subqueries in group by clause
shortest_book_sql = """
SELECT name
FROM aggregation_regress_book b
WHERE b.publisher_id = aggregation_regress_publisher.id
ORDER BY b.pages
LIMIT 1
"""
# tests that this query does not raise a DatabaseError due to the full
# subselect being (erroneously) added to the GROUP BY parameters
qs = Publisher.objects.extra(select={
'name_of_shortest_book': shortest_book_sql,
}).annotate(total_books=Count('book'))
# force execution of the query
list(qs)
def test_aggregate(self):
# Ordering requests are ignored
self.assertEqual(
Author.objects.order_by("name").aggregate(Avg("age")),
{"age__avg": Approximate(37.444, places=1)}
)
# Implicit ordering is also ignored
self.assertEqual(
Book.objects.aggregate(Sum("pages")),
{"pages__sum": 3703},
)
# Baseline results
self.assertEqual(
Book.objects.aggregate(Sum('pages'), Avg('pages')),
{'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
)
# Empty values query doesn't affect grouping or results
self.assertEqual(
Book.objects.values().aggregate(Sum('pages'), Avg('pages')),
{'pages__sum': 3703, 'pages__avg': Approximate(617.166, places=2)}
)
# Aggregate overrides extra selected column
self.assertEqual(
Book.objects.extra(select={'price_per_page': 'price / pages'}).aggregate(Sum('pages')),
{'pages__sum': 3703}
)
def test_annotation(self):
# Annotations get combined with extra select clauses
obj = Book.objects.annotate(mean_auth_age=Avg("authors__age")).extra(select={"manufacture_cost": "price * .5"}).get(pk=2)
self.assertObjectAttrs(obj,
contact_id=3,
id=2,
isbn='067232959',
mean_auth_age=45.0,
name='Sams Teach Yourself Django in 24 Hours',
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=2,
rating=3.0
)
# Different DB backends return different types for the extra select computation
self.assertTrue(obj.manufacture_cost == 11.545 or obj.manufacture_cost == Decimal('11.545'))
# Order of the annotate/extra in the query doesn't matter
obj = Book.objects.extra(select={'manufacture_cost': 'price * .5'}).annotate(mean_auth_age=Avg('authors__age')).get(pk=2)
self.assertObjectAttrs(obj,
contact_id=3,
id=2,
isbn='067232959',
mean_auth_age=45.0,
name='Sams Teach Yourself Django in 24 Hours',
pages=528,
price=Decimal("23.09"),
pubdate=datetime.date(2008, 3, 3),
publisher_id=2,
rating=3.0
)
# Different DB backends return different types for the extra select computation
self.assertTrue(obj.manufacture_cost == 11.545 or obj.manufacture_cost == Decimal('11.545'))
# Values queries can be combined with annotate and extra
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'manufacture_cost': 'price * .5'}).values().get(pk=2)
manufacture_cost = obj['manufacture_cost']
self.assertTrue(manufacture_cost == 11.545 or manufacture_cost == Decimal('11.545'))
del obj['manufacture_cost']
self.assertEqual(obj, {
"contact_id": 3,
"id": 2,
"isbn": "067232959",
"mean_auth_age": 45.0,
"name": "Sams Teach Yourself Django in 24 Hours",
"pages": 528,
"price": Decimal("23.09"),
"pubdate": datetime.date(2008, 3, 3),
"publisher_id": 2,
"rating": 3.0,
})
# The order of the (empty) values, annotate and extra clauses doesn't
# matter
obj = Book.objects.values().annotate(mean_auth_age=Avg('authors__age')).extra(select={'manufacture_cost': 'price * .5'}).get(pk=2)
manufacture_cost = obj['manufacture_cost']
self.assertTrue(manufacture_cost == 11.545 or manufacture_cost == Decimal('11.545'))
del obj['manufacture_cost']
self.assertEqual(obj, {
'contact_id': 3,
'id': 2,
'isbn': '067232959',
'mean_auth_age': 45.0,
'name': 'Sams Teach Yourself Django in 24 Hours',
'pages': 528,
'price': Decimal("23.09"),
'pubdate': datetime.date(2008, 3, 3),
'publisher_id': 2,
'rating': 3.0
})
# If the annotation precedes the values clause, it won't be included
# unless it is explicitly named
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page': 'price / pages'}).values('name').get(pk=1)
self.assertEqual(obj, {
"name": 'The Definitive Guide to Django: Web Development Done Right',
})
obj = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page': 'price / pages'}).values('name', 'mean_auth_age').get(pk=1)
self.assertEqual(obj, {
'mean_auth_age': 34.5,
'name': 'The Definitive Guide to Django: Web Development Done Right',
})
# If an annotation isn't included in the values, it can still be used
# in a filter
qs = Book.objects.annotate(n_authors=Count('authors')).values('name').filter(n_authors__gt=2)
self.assertQuerysetEqual(
qs, [
{"name": 'Python Web Development with Django'}
],
lambda b: b,
)
# The annotations are added to values output if values() precedes
# annotate()
obj = Book.objects.values('name').annotate(mean_auth_age=Avg('authors__age')).extra(select={'price_per_page': 'price / pages'}).get(pk=1)
self.assertEqual(obj, {
'mean_auth_age': 34.5,
'name': 'The Definitive Guide to Django: Web Development Done Right',
})
# Check that all of the objects are getting counted (allow_nulls) and
# that values respects the amount of objects
self.assertEqual(
len(Author.objects.annotate(Avg('friends__age')).values()),
9
)
# Check that consecutive calls to annotate accumulate in the query
qs = Book.objects.values('price').annotate(oldest=Max('authors__age')).order_by('oldest', 'price').annotate(Max('publisher__num_awards'))
self.assertQuerysetEqual(
qs, [
{'price': Decimal("30"), 'oldest': 35, 'publisher__num_awards__max': 3},
{'price': Decimal("29.69"), 'oldest': 37, 'publisher__num_awards__max': 7},
{'price': Decimal("23.09"), 'oldest': 45, 'publisher__num_awards__max': 1},
{'price': Decimal("75"), 'oldest': 57, 'publisher__num_awards__max': 9},
{'price': Decimal("82.8"), 'oldest': 57, 'publisher__num_awards__max': 7}
],
lambda b: b,
)
def test_aggrate_annotation(self):
# Aggregates can be composed over annotations.
# The return type is derived from the composed aggregate
vals = Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Max('pages'), Max('price'), Sum('num_authors'), Avg('num_authors'))
self.assertEqual(vals, {
'num_authors__sum': 10,
'num_authors__avg': Approximate(1.666, places=2),
'pages__max': 1132,
'price__max': Decimal("82.80")
})
# Regression for #15624 - Missing SELECT columns when using values, annotate
# and aggregate in a single query
self.assertEqual(
Book.objects.annotate(c=Count('authors')).values('c').aggregate(Max('c')),
{'c__max': 3}
)
def test_field_error(self):
# Bad field requests in aggregates are caught and reported
self.assertRaises(
FieldError,
lambda: Book.objects.all().aggregate(num_authors=Count('foo'))
)
self.assertRaises(
FieldError,
lambda: Book.objects.all().annotate(num_authors=Count('foo'))
)
self.assertRaises(
FieldError,
lambda: Book.objects.all().annotate(num_authors=Count('authors__id')).aggregate(Max('foo'))
)
def test_more(self):
# Old-style count aggregations can be mixed with new-style
self.assertEqual(
Book.objects.annotate(num_authors=Count('authors')).count(),
6
)
# Non-ordinal, non-computed Aggregates over annotations correctly
# inherit the annotation's internal type if the annotation is ordinal
# or computed
vals = Book.objects.annotate(num_authors=Count('authors')).aggregate(Max('num_authors'))
self.assertEqual(
vals,
{'num_authors__max': 3}
)
vals = Publisher.objects.annotate(avg_price=Avg('book__price')).aggregate(Max('avg_price'))
self.assertEqual(
vals,
{'avg_price__max': 75.0}
)
# Aliases are quoted to protected aliases that might be reserved names
vals = Book.objects.aggregate(number=Max('pages'), select=Max('pages'))
self.assertEqual(
vals,
{'number': 1132, 'select': 1132}
)
# Regression for #10064: select_related() plays nice with aggregates
obj = Book.objects.select_related('publisher').annotate(num_authors=Count('authors')).values()[0]
self.assertEqual(obj, {
'contact_id': 8,
'id': 5,
'isbn': '013790395',
'name': 'Artificial Intelligence: A Modern Approach',
'num_authors': 2,
'pages': 1132,
'price': Decimal("82.8"),
'pubdate': datetime.date(1995, 1, 15),
'publisher_id': 3,
'rating': 4.0,
})
# Regression for #10010: exclude on an aggregate field is correctly
# negated
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors'))),
6
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).filter(num_authors__gt=2)),
1
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).exclude(num_authors__gt=2)),
5
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).filter(num_authors__lt=3).exclude(num_authors__lt=2)),
2
)
self.assertEqual(
len(Book.objects.annotate(num_authors=Count('authors')).exclude(num_authors__lt=2).filter(num_authors__lt=3)),
2
)
def test_aggregate_fexpr(self):
# Aggregates can be used with F() expressions
# ... where the F() is pushed into the HAVING clause
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
self.assertQuerysetEqual(
qs, [
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
{'num_books': 2, 'name': 'Prentice Hall', 'num_awards': 7}
],
lambda p: p,
)
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
self.assertQuerysetEqual(
qs, [
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
{'num_books': 0, 'name': "Jonno's House of Books", 'num_awards': 0},
{'num_books': 1, 'name': 'Sams', 'num_awards': 1}
],
lambda p: p,
)
# ... and where the F() references an aggregate
qs = Publisher.objects.annotate(num_books=Count('book')).filter(num_awards__gt=2 * F('num_books')).order_by('name').values('name', 'num_books', 'num_awards')
self.assertQuerysetEqual(
qs, [
{'num_books': 1, 'name': 'Morgan Kaufmann', 'num_awards': 9},
{'num_books': 2, 'name': 'Prentice Hall', 'num_awards': 7}
],
lambda p: p,
)
qs = Publisher.objects.annotate(num_books=Count('book')).exclude(num_books__lt=F('num_awards') / 2).order_by('name').values('name', 'num_books', 'num_awards')
self.assertQuerysetEqual(
qs, [
{'num_books': 2, 'name': 'Apress', 'num_awards': 3},
{'num_books': 0, 'name': "Jonno's House of Books", 'num_awards': 0},
{'num_books': 1, 'name': 'Sams', 'num_awards': 1}
],
lambda p: p,
)
def test_db_col_table(self):
# Tests on fields with non-default table and column names.
qs = Clues.objects.values('EntryID__Entry').annotate(Appearances=Count('EntryID'), Distinct_Clues=Count('Clue', distinct=True))
self.assertQuerysetEqual(qs, [])
qs = Entries.objects.annotate(clue_count=Count('clues__ID'))
self.assertQuerysetEqual(qs, [])
def test_boolean_conversion(self):
# Aggregates mixed up ordering of columns for backend's convert_values
# method. Refs #21126.
e = Entries.objects.create(Entry='foo')
c = Clues.objects.create(EntryID=e, Clue='bar')
qs = Clues.objects.select_related('EntryID').annotate(Count('ID'))
self.assertQuerysetEqual(
qs, [c], lambda x: x)
self.assertEqual(qs[0].EntryID, e)
self.assertIs(qs[0].EntryID.Exclude, False)
def test_empty(self):
# Regression for #10089: Check handling of empty result sets with
# aggregates
self.assertEqual(
Book.objects.filter(id__in=[]).count(),
0
)
vals = Book.objects.filter(id__in=[]).aggregate(num_authors=Count('authors'), avg_authors=Avg('authors'), max_authors=Max('authors'), max_price=Max('price'), max_rating=Max('rating'))
self.assertEqual(
vals,
{'max_authors': None, 'max_rating': None, 'num_authors': 0, 'avg_authors': None, 'max_price': None}
)
qs = Publisher.objects.filter(pk=5).annotate(num_authors=Count('book__authors'), avg_authors=Avg('book__authors'), max_authors=Max('book__authors'), max_price=Max('book__price'), max_rating=Max('book__rating')).values()
self.assertQuerysetEqual(
qs, [
{'max_authors': None, 'name': "Jonno's House of Books", 'num_awards': 0, 'max_price': None, 'num_authors': 0, 'max_rating': None, 'id': 5, 'avg_authors': None}
],
lambda p: p
)
def test_more_more(self):
# Regression for #10113 - Fields mentioned in order_by() must be
# included in the GROUP BY. This only becomes a problem when the
# order_by introduces a new join.
self.assertQuerysetEqual(
Book.objects.annotate(num_authors=Count('authors')).order_by('publisher__name', 'name'), [
"Practical Django Projects",
"The Definitive Guide to Django: Web Development Done Right",
"Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp",
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"Sams Teach Yourself Django in 24 Hours",
],
lambda b: b.name
)
# Regression for #10127 - Empty select_related() works with annotate
qs = Book.objects.filter(rating__lt=4.5).select_related().annotate(Avg('authors__age'))
self.assertQuerysetEqual(
qs, [
('Artificial Intelligence: A Modern Approach', 51.5, 'Prentice Hall', 'Peter Norvig'),
('Practical Django Projects', 29.0, 'Apress', 'James Bennett'),
('Python Web Development with Django', Approximate(30.333, places=2), 'Prentice Hall', 'Jeffrey Forcier'),
('Sams Teach Yourself Django in 24 Hours', 45.0, 'Sams', 'Brad Dayley')
],
lambda b: (b.name, b.authors__age__avg, b.publisher.name, b.contact.name)
)
# Regression for #10132 - If the values() clause only mentioned extra
# (select=) columns, those columns are used for grouping
qs = Book.objects.extra(select={'pub': 'publisher_id'}).values('pub').annotate(Count('id')).order_by('pub')
self.assertQuerysetEqual(
qs, [
{'pub': 1, 'id__count': 2},
{'pub': 2, 'id__count': 1},
{'pub': 3, 'id__count': 2},
{'pub': 4, 'id__count': 1}
],
lambda b: b
)
qs = Book.objects.extra(select={'pub': 'publisher_id', 'foo': 'pages'}).values('pub').annotate(Count('id')).order_by('pub')
self.assertQuerysetEqual(
qs, [
{'pub': 1, 'id__count': 2},
{'pub': 2, 'id__count': 1},
{'pub': 3, 'id__count': 2},
{'pub': 4, 'id__count': 1}
],
lambda b: b
)
# Regression for #10182 - Queries with aggregate calls are correctly
# realiased when used in a subquery
ids = Book.objects.filter(pages__gt=100).annotate(n_authors=Count('authors')).filter(n_authors__gt=2).order_by('n_authors')
self.assertQuerysetEqual(
Book.objects.filter(id__in=ids), [
"Python Web Development with Django",
],
lambda b: b.name
)
# Regression for #15709 - Ensure each group_by field only exists once
# per query
qs = Book.objects.values('publisher').annotate(max_pages=Max('pages')).order_by()
grouping, gb_params = qs.query.get_compiler(qs.db).get_grouping([], [])
self.assertEqual(len(grouping), 1)
def test_duplicate_alias(self):
# Regression for #11256 - duplicating a default alias raises ValueError.
self.assertRaises(ValueError, Book.objects.all().annotate, Avg('authors__age'), authors__age__avg=Avg('authors__age'))
def test_field_name_conflict(self):
# Regression for #11256 - providing an aggregate name that conflicts with a field name on the model raises ValueError
self.assertRaises(ValueError, Author.objects.annotate, age=Avg('friends__age'))
def test_m2m_name_conflict(self):
# Regression for #11256 - providing an aggregate name that conflicts with an m2m name on the model raises ValueError
self.assertRaises(ValueError, Author.objects.annotate, friends=Count('friends'))
def test_values_queryset_non_conflict(self):
# Regression for #14707 -- If you're using a values query set, some potential conflicts are avoided.
# age is a field on Author, so it shouldn't be allowed as an aggregate.
# But age isn't included in the ValuesQuerySet, so it is.
results = Author.objects.values('name').annotate(age=Count('book_contact_set')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['age'], 1)
# Same problem, but aggregating over m2m fields
results = Author.objects.values('name').annotate(age=Avg('friends__age')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['age'], 32.0)
# Same problem, but colliding with an m2m field
results = Author.objects.values('name').annotate(friends=Count('friends')).order_by('name')
self.assertEqual(len(results), 9)
self.assertEqual(results[0]['name'], 'Adrian Holovaty')
self.assertEqual(results[0]['friends'], 2)
def test_reverse_relation_name_conflict(self):
# Regression for #11256 - providing an aggregate name that conflicts with a reverse-related name on the model raises ValueError
self.assertRaises(ValueError, Author.objects.annotate, book_contact_set=Avg('friends__age'))
def test_pickle(self):
# Regression for #10197 -- Queries with aggregates can be pickled.
# First check that pickling is possible at all. No crash = success
qs = Book.objects.annotate(num_authors=Count('authors'))
pickle.dumps(qs)
# Then check that the round trip works.
query = qs.query.get_compiler(qs.db).as_sql()[0]
qs2 = pickle.loads(pickle.dumps(qs))
self.assertEqual(
qs2.query.get_compiler(qs2.db).as_sql()[0],
query,
)
def test_more_more_more(self):
# Regression for #10199 - Aggregate calls clone the original query so
# the original query can still be used
books = Book.objects.all()
books.aggregate(Avg("authors__age"))
self.assertQuerysetEqual(
books.all(), [
'Artificial Intelligence: A Modern Approach',
'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp',
'Practical Django Projects',
'Python Web Development with Django',
'Sams Teach Yourself Django in 24 Hours',
'The Definitive Guide to Django: Web Development Done Right'
],
lambda b: b.name
)
# Regression for #10248 - Annotations work with DateQuerySets
qs = Book.objects.annotate(num_authors=Count('authors')).filter(num_authors=2).dates('pubdate', 'day')
self.assertQuerysetEqual(
qs, [
datetime.date(1995, 1, 15),
datetime.date(2007, 12, 6),
],
lambda b: b
)
# Regression for #10290 - extra selects with parameters can be used for
# grouping.
qs = Book.objects.annotate(mean_auth_age=Avg('authors__age')).extra(select={'sheets': '(pages + %s) / %s'}, select_params=[1, 2]).order_by('sheets').values('sheets')
self.assertQuerysetEqual(
qs, [
150,
175,
224,
264,
473,
566
],
lambda b: int(b["sheets"])
)
# Regression for 10425 - annotations don't get in the way of a count()
# clause
self.assertEqual(
Book.objects.values('publisher').annotate(Count('publisher')).count(),
4
)
self.assertEqual(
Book.objects.annotate(Count('publisher')).values('publisher').count(),
6
)
# Note: intentionally no order_by(), that case needs tests, too.
publishers = Publisher.objects.filter(id__in=[1, 2])
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
publishers = publishers.annotate(n_books=Count("book"))
sorted_publishers = sorted(publishers, key=lambda x: x.name)
self.assertEqual(
sorted_publishers[0].n_books,
2
)
self.assertEqual(
sorted_publishers[1].n_books,
1
)
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
books = Book.objects.filter(publisher__in=publishers)
self.assertQuerysetEqual(
books, [
"Practical Django Projects",
"Sams Teach Yourself Django in 24 Hours",
"The Definitive Guide to Django: Web Development Done Right",
],
lambda b: b.name
)
self.assertEqual(
sorted(p.name for p in publishers),
[
"Apress",
"Sams"
]
)
# Regression for 10666 - inherited fields work with annotations and
# aggregations
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum('book_ptr__pages')),
{'n_pages': 2078}
)
self.assertEqual(
HardbackBook.objects.aggregate(n_pages=Sum('pages')),
{'n_pages': 2078},
)
qs = HardbackBook.objects.annotate(n_authors=Count('book_ptr__authors')).values('name', 'n_authors')
self.assertQuerysetEqual(
qs, [
{'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
{'n_authors': 1, 'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
],
lambda h: h
)
qs = HardbackBook.objects.annotate(n_authors=Count('authors')).values('name', 'n_authors')
self.assertQuerysetEqual(
qs, [
{'n_authors': 2, 'name': 'Artificial Intelligence: A Modern Approach'},
{'n_authors': 1, 'name': 'Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp'}
],
lambda h: h,
)
# Regression for #10766 - Shouldn't be able to reference an aggregate
# fields in an aggregate() call.
self.assertRaises(
FieldError,
lambda: Book.objects.annotate(mean_age=Avg('authors__age')).annotate(Avg('mean_age'))
)
def test_empty_filter_count(self):
self.assertEqual(
Author.objects.filter(id__in=[]).annotate(Count("friends")).count(),
0
)
def test_empty_filter_aggregate(self):
self.assertEqual(
Author.objects.filter(id__in=[]).annotate(Count("friends")).aggregate(Count("pk")),
{"pk__count": None}
)
def test_none_call_before_aggregate(self):
# Regression for #11789
self.assertEqual(
Author.objects.none().aggregate(Avg('age')),
{'age__avg': None}
)
def test_annotate_and_join(self):
self.assertEqual(
Author.objects.annotate(c=Count("friends__name")).exclude(friends__name="Joe").count(),
Author.objects.count()
)
def test_f_expression_annotation(self):
# Books with less than 200 pages per author.
qs = Book.objects.values("name").annotate(
n_authors=Count("authors")
).filter(
pages__lt=F("n_authors") * 200
).values_list("pk")
self.assertQuerysetEqual(
Book.objects.filter(pk__in=qs), [
"Python Web Development with Django"
],
attrgetter("name")
)
def test_values_annotate_values(self):
qs = Book.objects.values("name").annotate(
n_authors=Count("authors")
).values_list("pk", flat=True)
self.assertEqual(list(qs), list(Book.objects.values_list("pk", flat=True)))
def test_having_group_by(self):
# Test that when a field occurs on the LHS of a HAVING clause that it
# appears correctly in the GROUP BY clause
qs = Book.objects.values_list("name").annotate(
n_authors=Count("authors")
).filter(
pages__gt=F("n_authors")
).values_list("name", flat=True)
# Results should be the same, all Books have more pages than authors
self.assertEqual(
list(qs), list(Book.objects.values_list("name", flat=True))
)
def test_annotation_disjunction(self):
qs = Book.objects.annotate(n_authors=Count("authors")).filter(
Q(n_authors=2) | Q(name="Python Web Development with Django")
)
self.assertQuerysetEqual(
qs, [
"Artificial Intelligence: A Modern Approach",
"Python Web Development with Django",
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name")
)
qs = Book.objects.annotate(n_authors=Count("authors")).filter(
Q(name="The Definitive Guide to Django: Web Development Done Right") | (Q(name="Artificial Intelligence: A Modern Approach") & Q(n_authors=3))
)
self.assertQuerysetEqual(
qs, [
"The Definitive Guide to Django: Web Development Done Right",
],
attrgetter("name")
)
qs = Publisher.objects.annotate(
rating_sum=Sum("book__rating"),
book_count=Count("book")
).filter(
Q(rating_sum__gt=5.5) | Q(rating_sum__isnull=True)
).order_by('pk')
self.assertQuerysetEqual(
qs, [
"Apress",
"Prentice Hall",
"Jonno's House of Books",
],
attrgetter("name")
)
qs = Publisher.objects.annotate(
rating_sum=Sum("book__rating"),
book_count=Count("book")
).filter(
Q(pk__lt=F("book_count")) | Q(rating_sum=None)
).order_by("pk")
self.assertQuerysetEqual(
qs, [
"Apress",
"Jonno's House of Books",
],
attrgetter("name")
)
def test_quoting_aggregate_order_by(self):
qs = Book.objects.filter(
name="Python Web Development with Django"
).annotate(
authorCount=Count("authors")
).order_by("authorCount")
self.assertQuerysetEqual(
qs, [
("Python Web Development with Django", 3),
],
lambda b: (b.name, b.authorCount)
)
@skipUnlessDBFeature('supports_stddev')
def test_stddev(self):
self.assertEqual(
Book.objects.aggregate(StdDev('pages')),
{'pages__stddev': Approximate(311.46, 1)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('rating')),
{'rating__stddev': Approximate(0.60, 1)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('price')),
{'price__stddev': Approximate(24.16, 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('pages', sample=True)),
{'pages__stddev': Approximate(341.19, 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('rating', sample=True)),
{'rating__stddev': Approximate(0.66, 2)}
)
self.assertEqual(
Book.objects.aggregate(StdDev('price', sample=True)),
{'price__stddev': Approximate(26.46, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('pages')),
{'pages__variance': Approximate(97010.80, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('rating')),
{'rating__variance': Approximate(0.36, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('price')),
{'price__variance': Approximate(583.77, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('pages', sample=True)),
{'pages__variance': Approximate(116412.96, 1)}
)
self.assertEqual(
Book.objects.aggregate(Variance('rating', sample=True)),
{'rating__variance': Approximate(0.44, 2)}
)
self.assertEqual(
Book.objects.aggregate(Variance('price', sample=True)),
{'price__variance': Approximate(700.53, 2)}
)
def test_filtering_by_annotation_name(self):
# Regression test for #14476
# The name of the explicitly provided annotation name in this case
# poses no problem
qs = Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
# Neither in this case
qs = Author.objects.annotate(book_count=Count('book')).filter(book_count=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
# This case used to fail because the ORM couldn't resolve the
# automatically generated annotation name `book__count`
qs = Author.objects.annotate(Count('book')).filter(book__count=2).order_by('name')
self.assertQuerysetEqual(
qs,
['Peter Norvig'],
lambda b: b.name
)
def test_type_conversion(self):
# The database backend convert_values function should not try to covert
# CharFields to float. Refs #13844.
from django.db.models import CharField
from django.db import connection
testData = 'not_a_float_value'
testField = CharField()
self.assertEqual(
connection.ops.convert_values(testData, testField),
testData
)
def test_annotate_joins(self):
"""
Test that the base table's join isn't promoted to LOUTER. This could
cause the query generation to fail if there is an exclude() for fk-field
in the query, too. Refs #19087.
"""
qs = Book.objects.annotate(n=Count('pk'))
self.assertIs(qs.query.alias_map['aggregation_regress_book'].join_type, None)
# Check that the query executes without problems.
self.assertEqual(len(qs.exclude(publisher=-1)), 6)
@skipUnlessDBFeature("allows_group_by_pk")
def test_aggregate_duplicate_columns(self):
# Regression test for #17144
results = Author.objects.annotate(num_contacts=Count('book_contact_set'))
# There should only be one GROUP BY clause, for the `id` column.
# `name` and `age` should not be grouped on.
grouping, gb_params = results.query.get_compiler(using='default').get_grouping([], [])
self.assertEqual(len(grouping), 1)
assert 'id' in grouping[0]
assert 'name' not in grouping[0]
assert 'age' not in grouping[0]
# The query group_by property should also only show the `id`.
self.assertEqual(results.query.group_by, [('aggregation_regress_author', 'id')])
# Ensure that we get correct results.
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by('name')],
[
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 0),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 0),
('Peter Norvig', 2),
('Stuart Russell', 0),
('Wesley J. Chun', 0),
]
)
@skipUnlessDBFeature("allows_group_by_pk")
def test_aggregate_duplicate_columns_only(self):
# Works with only() too.
results = Author.objects.only('id', 'name').annotate(num_contacts=Count('book_contact_set'))
grouping, gb_params = results.query.get_compiler(using='default').get_grouping([], [])
self.assertEqual(len(grouping), 1)
assert 'id' in grouping[0]
assert 'name' not in grouping[0]
assert 'age' not in grouping[0]
# The query group_by property should also only show the `id`.
self.assertEqual(results.query.group_by, [('aggregation_regress_author', 'id')])
# Ensure that we get correct results.
self.assertEqual(
[(a.name, a.num_contacts) for a in results.order_by('name')],
[
('Adrian Holovaty', 1),
('Brad Dayley', 1),
('Jacob Kaplan-Moss', 0),
('James Bennett', 1),
('Jeffrey Forcier', 1),
('Paul Bissex', 0),
('Peter Norvig', 2),
('Stuart Russell', 0),
('Wesley J. Chun', 0),
]
)
@skipUnlessDBFeature("allows_group_by_pk")
def test_aggregate_duplicate_columns_select_related(self):
# And select_related()
results = Book.objects.select_related('contact').annotate(
num_authors=Count('authors'))
grouping, gb_params = results.query.get_compiler(using='default').get_grouping([], [])
self.assertEqual(len(grouping), 1)
assert 'id' in grouping[0]
assert 'name' not in grouping[0]
assert 'contact' not in grouping[0]
# The query group_by property should also only show the `id`.
self.assertEqual(results.query.group_by, [('aggregation_regress_book', 'id')])
# Ensure that we get correct results.
self.assertEqual(
[(b.name, b.num_authors) for b in results.order_by('name')],
[
('Artificial Intelligence: A Modern Approach', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Practical Django Projects', 1),
('Python Web Development with Django', 3),
('Sams Teach Yourself Django in 24 Hours', 1),
('The Definitive Guide to Django: Web Development Done Right', 2)
]
)
def test_reverse_join_trimming(self):
qs = Author.objects.annotate(Count('book_contact_set__contact'))
self.assertIn(' JOIN ', str(qs.query))
def test_aggregation_with_generic_reverse_relation(self):
"""
Regression test for #10870: Aggregates with joins ignore extra
filters provided by setup_joins
tests aggregations with generic reverse relations
"""
django_book = Book.objects.get(name='Practical Django Projects')
ItemTag.objects.create(object_id=django_book.id, tag='intermediate',
content_type=ContentType.objects.get_for_model(django_book))
ItemTag.objects.create(object_id=django_book.id, tag='django',
content_type=ContentType.objects.get_for_model(django_book))
# Assign a tag to model with same PK as the book above. If the JOIN
# used in aggregation doesn't have content type as part of the
# condition the annotation will also count the 'hi mom' tag for b.
wmpk = WithManualPK.objects.create(id=django_book.pk)
ItemTag.objects.create(object_id=wmpk.id, tag='hi mom',
content_type=ContentType.objects.get_for_model(wmpk))
ai_book = Book.objects.get(name__startswith='Paradigms of Artificial Intelligence')
ItemTag.objects.create(object_id=ai_book.id, tag='intermediate',
content_type=ContentType.objects.get_for_model(ai_book))
self.assertEqual(Book.objects.aggregate(Count('tags')), {'tags__count': 3})
results = Book.objects.annotate(Count('tags')).order_by('-tags__count', 'name')
self.assertEqual(
[(b.name, b.tags__count) for b in results],
[
('Practical Django Projects', 2),
('Paradigms of Artificial Intelligence Programming: Case Studies in Common Lisp', 1),
('Artificial Intelligence: A Modern Approach', 0),
('Python Web Development with Django', 0),
('Sams Teach Yourself Django in 24 Hours', 0),
('The Definitive Guide to Django: Web Development Done Right', 0)
]
)
def test_negated_aggregation(self):
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2)
).order_by('name')
expected_results = [a.name for a in expected_results]
qs = Author.objects.annotate(book_cnt=Count('book')).exclude(
Q(book_cnt=2), Q(book_cnt=2)).order_by('name')
self.assertQuerysetEqual(
qs,
expected_results,
lambda b: b.name
)
expected_results = Author.objects.exclude(
pk__in=Author.objects.annotate(book_cnt=Count('book')).filter(book_cnt=2)
).order_by('name')
expected_results = [a.name for a in expected_results]
qs = Author.objects.annotate(book_cnt=Count('book')).exclude(Q(book_cnt=2) | Q(book_cnt=2)).order_by('name')
self.assertQuerysetEqual(
qs,
expected_results,
lambda b: b.name
)
def test_name_filters(self):
qs = Author.objects.annotate(Count('book')).filter(
Q(book__count__exact=2) | Q(name='Adrian Holovaty')
).order_by('name')
self.assertQuerysetEqual(
qs,
['Adrian Holovaty', 'Peter Norvig'],
lambda b: b.name
)
def test_name_expressions(self):
# Test that aggregates are spotted corretly from F objects.
# Note that Adrian's age is 34 in the fixtures, and he has one book
# so both conditions match one author.
qs = Author.objects.annotate(Count('book')).filter(
Q(name='Peter Norvig') | Q(age=F('book__count') + 33)
).order_by('name')
self.assertQuerysetEqual(
qs,
['Adrian Holovaty', 'Peter Norvig'],
lambda b: b.name
)
def test_ticket_11293(self):
q1 = Q(price__gt=50)
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count('authors')).filter(
q1 | q2).order_by('pk')
self.assertQuerysetEqual(
query, [1, 4, 5, 6],
lambda b: b.pk)
def test_ticket_11293_q_immutable(self):
"""
Check that splitting a q object to parts for where/having doesn't alter
the original q-object.
"""
q1 = Q(isbn='')
q2 = Q(authors__count__gt=1)
query = Book.objects.annotate(Count('authors'))
query.filter(q1 | q2)
self.assertEqual(len(q2.children), 1)
def test_fobj_group_by(self):
"""
Check that an F() object referring to related column works correctly
in group by.
"""
qs = Book.objects.annotate(
acount=Count('authors')
).filter(
acount=F('publisher__num_awards')
)
self.assertQuerysetEqual(
qs, ['Sams Teach Yourself Django in 24 Hours'],
lambda b: b.name)
def test_annotate_reserved_word(self):
"""
Regression #18333 - Ensure annotated column name is properly quoted.
"""
vals = Book.objects.annotate(select=Count('authors__id')).aggregate(Sum('select'), Avg('select'))
self.assertEqual(vals, {
'select__sum': 10,
'select__avg': Approximate(1.666, places=2),
})
class JoinPromotionTests(TestCase):
def test_ticket_21150(self):
b = Bravo.objects.create()
c = Charlie.objects.create(bravo=b)
qs = Charlie.objects.select_related('alfa').annotate(Count('bravo__charlie'))
self.assertQuerysetEqual(
qs, [c], lambda x: x)
self.assertIs(qs[0].alfa, None)
a = Alfa.objects.create()
c.alfa = a
c.save()
# Force re-evaluation
qs = qs.all()
self.assertQuerysetEqual(
qs, [c], lambda x: x)
self.assertEqual(qs[0].alfa, a)
def test_existing_join_not_promoted(self):
# No promotion for existing joins
qs = Charlie.objects.filter(alfa__name__isnull=False).annotate(Count('alfa__name'))
self.assertTrue(' INNER JOIN ' in str(qs.query))
# Also, the existing join is unpromoted when doing filtering for already
# promoted join.
qs = Charlie.objects.annotate(Count('alfa__name')).filter(alfa__name__isnull=False)
self.assertTrue(' INNER JOIN ' in str(qs.query))
# But, as the join is nullable first use by annotate will be LOUTER
qs = Charlie.objects.annotate(Count('alfa__name'))
self.assertTrue(' LEFT OUTER JOIN ' in str(qs.query))
def test_non_nullable_fk_not_promoted(self):
qs = Book.objects.annotate(Count('contact__name'))
self.assertTrue(' INNER JOIN ' in str(qs.query))
| rogerhu/django | tests/aggregation_regress/tests.py | Python | bsd-3-clause | 47,311 |
# -*- coding: utf-8 -*-
# Generated by Django 1.9.4 on 2018-03-23 18:33
from __future__ import unicode_literals
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('d4s2_api', '0016_email_group_to_set'),
]
operations = [
migrations.AlterUniqueTogether(
name='emailtemplate',
unique_together=set([('template_set', 'template_type')]),
),
migrations.RemoveField(
model_name='historicalemailtemplate',
name='group',
),
migrations.AlterField(
model_name='emailtemplate',
name='template_set',
field=models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='d4s2_api.EmailTemplateSet'),
),
migrations.RemoveField(
model_name='emailtemplate',
name='group',
),
]
| Duke-GCB/DukeDSHandoverService | d4s2_api/migrations/0017_auto_20180323_1833.py | Python | mit | 944 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .resource import Resource
class Schema(Resource):
"""Represents a database schema.
Variables are only populated by the server, and will be ignored when
sending a request.
:ivar name: Resource name
:vartype name: str
:ivar id: Resource ID
:vartype id: str
:ivar type: Resource type
:vartype type: str
:param location: Resource location
:type location: str
:param tags: Resource tags
:type tags: dict
:ivar tables: The tables from this database.
:vartype tables: list of :class:`Table <azure.mgmt.sql.models.Table>`
"""
_validation = {
'name': {'readonly': True},
'id': {'readonly': True},
'type': {'readonly': True},
'location': {'required': True},
'tables': {'readonly': True},
}
_attribute_map = {
'name': {'key': 'name', 'type': 'str'},
'id': {'key': 'id', 'type': 'str'},
'type': {'key': 'type', 'type': 'str'},
'location': {'key': 'location', 'type': 'str'},
'tags': {'key': 'tags', 'type': '{str}'},
'tables': {'key': 'properties.tables', 'type': '[Table]'},
}
def __init__(self, location, tags=None):
super(Schema, self).__init__(location=location, tags=tags)
self.tables = None
| rjschwei/azure-sdk-for-python | azure-mgmt-sql/azure/mgmt/sql/models/schema.py | Python | mit | 1,759 |
# -*- coding: utf-8 -*-
# Copyright (C) 2006-2008 Vodafone España, S.A.
# Copyright (C) 2008-2009 Warp Networks, S.L.
# Author: Pablo Martí
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
"""
setuptools file for Wader
"""
from os.path import join, isdir, walk
import sys
from distutils.core import Extension
from setuptools import setup
from wader.common.consts import (APP_VERSION, APP_NAME,
APP_SLUG_NAME, BASE_DIR)
DATA_DIR = join(BASE_DIR, 'usr', 'share', APP_SLUG_NAME)
BIN_DIR = join(BASE_DIR, 'usr', 'bin')
RESOURCES = join(DATA_DIR, 'resources')
DBUS_SYSTEMD = join(BASE_DIR, 'etc', 'dbus-1', 'system.d')
DBUS_SYSTEM_SERVICES = join(BASE_DIR, 'usr', 'share', 'dbus-1',
'system-services')
UDEV_RULESD = join(BASE_DIR, 'lib', 'udev', 'rules.d')
def list_files(path, exclude=None):
result = []
def walk_callback(arg, directory, files):
for ext in ['.svn', '.git']:
if ext in files:
files.remove(ext)
if exclude:
for f in files:
if f.startswith(exclude):
files.remove(f)
result.extend(join(directory, f) for f in files
if not isdir(join(directory, f)))
walk(path, walk_callback, None)
return result
data_files = [
(join(RESOURCES, 'extra'), list_files('resources/extra')),
(join(RESOURCES, 'config'), list_files('resources/config')),
(join(DATA_DIR, 'plugins'), list_files('plugins/devices')),
(join(DATA_DIR, 'plugins'), list_files('plugins/oses')),
(join(DATA_DIR, 'core'), list_files('core')),
(join(DATA_DIR, 'core', 'backends'), list_files('core/backends')),
(join(DATA_DIR, 'core', 'hardware'), list_files('core/hardware')),
(join(DATA_DIR, 'core', 'oses'), list_files('core/oses')),
(join(DATA_DIR, 'core', 'statem'), list_files('core/statem')),
(join(DATA_DIR, 'test'), ['test/test_dbus.py', 'test/test_dbus_ussd_de.py']),
(DATA_DIR, ['core-tap.py']),
(BIN_DIR, ['bin/wader-core-ctl']),
(DBUS_SYSTEMD, ['resources/dbus/org.freedesktop.ModemManager.conf']),
(DBUS_SYSTEM_SERVICES,
['resources/dbus/org.freedesktop.ModemManager.service']),
]
ext_modules = []
if sys.platform.startswith('linux'):
data_files.append((UDEV_RULESD, list_files('resources/udev')))
elif sys.platform == 'darwin':
# XXX This is broken.
osxserialports = Extension('wader.common.oses.osxserialports',
sources=['wader/common/oses/_osxserialports.c'],
extra_link_args=['-framework', 'CoreFoundation',
'-framework', 'IOKit'])
ext_modules.append(osxserialports)
packages = [
'wader',
'wader.common',
'wader.common.backends',
]
setup(name=APP_NAME,
version=APP_VERSION,
description='3G device manager for Linux and OSX',
download_url="http://www.wader-project.org",
author='Pablo Martí Gamboa',
author_email='[email protected]',
license='GPL',
packages=packages,
data_files=data_files,
ext_modules=ext_modules,
zip_safe=False,
test_suite='test',
classifiers=[
'Development Status :: 4 - Beta',
'Environment :: No Input/Output (Daemon)',
'Framework :: Twisted',
'Intended Audience :: Developers',
'Intended Audience :: Telecommunications Industry',
'License :: OSI Approved :: GNU General Public License (GPL)',
'Natural Language :: English',
'Operating System :: POSIX :: Linux',
'Operating System :: MacOS :: MacOS X',
'Programming Language :: Python :: 2.5',
'Programming Language :: Python :: 2.6',
'Programming Language :: Python :: 2.7',
'Topic :: Communications :: Telephony',
],
)
| andrewbird/wader | setup.py | Python | gpl-2.0 | 4,510 |
from django.http import HttpResponse, Http404
from django.utils.http import urlquote
import os
import re
import mimetypes
from . settings import LASANA_USE_X_SENDFILE, LASANA_NGINX_ACCEL_REDIRECT_BASE_URL
#For no-XSendfile approach
from django.core.servers.basehttp import FileWrapper
CONTENT_RANGE_REGEXP = re.compile(r"bytes=(\d+)?-(\d+)?")
def send(request, file):
if not file:
raise Http404
detected_type = mimetypes.guess_type(file.path)[0]
if detected_type is None:
detected_type = 'application/octet-stream'
if LASANA_USE_X_SENDFILE:
response = HttpResponse(content_type=detected_type)
response['Content-Disposition'] = 'filename=%s' % urlquote(os.path.basename(file.name))
content_range = request.META.get('HTTP_RANGE')
range_begin = None
range_end = None
if content_range is not None:
match = CONTENT_RANGE_REGEXP.match(content_range)
if match is not None:
range_begin, range_end = match.groups()
range_begin = int(range_begin) if range_begin is not None else None
range_end = int(range_end) if range_end is not None else None
if (range_begin is None or range_begin < file.size) and (range_end is None or range_end < file.size):
# Use 206 Partial Content
response.status_code = 206
response['Content-Range'] = "bytes %s-%s/%d" % (range_begin if range_begin is not None else "0",
range_end if range_end is not None else (file.size - 1), file.size)
else:
# Throw 416 Range Not Satisfiable
return HttpResponse(status=416)
if LASANA_USE_X_SENDFILE == 'lighttpd':
response['X-Sendfile2'] = "%s %s-%s" % (urlquote(file.path), str(range_begin) if range_begin is not None else "0",
str(range_end) if range_end is not None else "")
elif LASANA_USE_X_SENDFILE == 'nginx':
response['X-Accel-Redirect'] = (LASANA_NGINX_ACCEL_REDIRECT_BASE_URL + os.path.basename(file.name)).encode('UTF-8')
else:
raise RuntimeError('LASANA_USE_X_SENDFILE must be "lighttpd" or "nginx".')
return response
else:
response = HttpResponse(FileWrapper(file), content_type=detected_type)
response['Content-Disposition'] = 'filename=%s' % urlquote(os.path.basename(file.name))
response['Content-Length'] = file.size
return response
| ntrrgc/lasana | sendfile.py | Python | mit | 2,641 |
# -*- coding: utf-8 -*-
"""
FlyFi - Floppy-Fidelity
@author: Ricardo (XeN) Band <[email protected]>,
Stephan (coon) Thiele <[email protected]>
This file is part of FlyFi.
FlyFi is free software: you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation, either version 3 of the License, or
(at your option) any later version.
FlyFi is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with FlyFi. If not, see <http://www.gnu.org/licenses/>.
Diese Datei ist Teil von FlyFi.
FlyFi ist Freie Software: Sie können es unter den Bedingungen
der GNU General Public License, wie von der Free Software Foundation,
Version 3 der Lizenz oder (nach Ihrer Option) jeder späteren
veröffentlichten Version, weiterverbreiten und/oder modifizieren.
FlyFi wird in der Hoffnung, dass es nützlich sein wird, aber
OHNE JEDE GEWÄHELEISTUNG, bereitgestellt; sogar ohne die implizite
Gewährleistung der MARKTFÄHIGKEIT oder EIGNUNG FÜR EINEN BESTIMMTEN ZWECK.
Siehe die GNU General Public License für weitere Details.
Sie sollten eine Kopie der GNU General Public License zusammen mit diesem
Programm erhalten haben. Wenn nicht, siehe <http://www.gnu.org/licenses/>.
FlyFi is using tango icons: <http://tango.freedesktop.org/>.
"""
from PySide import QtGui, QtCore
#import serial.tools.list_ports
import serial
import glob
import ConfigParser
import os, platform
import FloppyOut
#self.settingswindow.cb_update_serial_ports()
class SettingsWindow(QtGui.QMainWindow):
def __init__(self, midi_in, floppy_out):
super(SettingsWindow, self).__init__()
self.midi_in = midi_in
self.fout = floppy_out
self.config = ConfigParser.SafeConfigParser()
#if os.path.isfile(os.path.expanduser('~/.flyfirc')):
# self.config.read(os.path.expanduser('~/.flyfirc'))
#elif os.path.isfile('/etc/flyfirc'):
# self.config.read('/etc/flyfirc')
# fp = open(os.path.expanduser('~/.flyfirc'), 'w')
# self.config.write(fp)
# fp.close()
#else:
# only needed in dev env
self.config.read(os.path.join(os.getcwd(), 'flyfirc'))
fp = open(os.path.expanduser('~/.flyfirc'), 'w')
self.config.write(fp)
fp.close()
self.init_ui()
def setFloatNum(self, float_num):
self.lab_freq.setText( "%.2f" % (float_num / 100.0) )
def pb_play_pressed(self):
"""
send the current settings to floppy out and play the given tone
"""
self.fout.play_tone(self.spb_channel.value(), float(self.lab_freq.text())) # todo: split presentation layer from datamodel(?)
def pb_stop_pressed(self):
"""
stop playing the current tone on the floppy
"""
self.fout.play_tone(self.spb_channel.value(), 0) # playing a tone with 0hz will stop the floppy motor
def init_ui(self):
self.resize(650, 680)
self.setWindowTitle('FlyFi - Settings')
self.setWindowIcon(QtGui.QIcon('images/settings.png'))
self.center()
centralwidget = QtGui.QTabWidget()
tab_channel = QtGui.QWidget()
tab_benchmark = QtGui.QWidget()
# channel tab
channel_vbox = QtGui.QVBoxLayout()
self.channel_table = QtGui.QTableWidget(16, 3)
self.channel_table.cellClicked.connect(self.cell_clicked)
self.channel_table.setHorizontalHeaderLabels(['Active', 'Floppychannel', 'Serial Port'])
for row in range(0, 16):
self.channel_table.setCellWidget(row, 0, QtGui.QCheckBox())
self.channel_table.setCellWidget(row, 1, QtGui.QComboBox())
self.channel_table.setCellWidget(row, 2, QtGui.QComboBox())
self.channel_table.cellWidget(row, 0).setCheckState(
QtCore.Qt.CheckState.Checked if self.fout.midi_channels[row].active else
QtCore.Qt.CheckState.Unchecked )
self.channel_table.cellWidget(row, 1).addItems( [ str(s) for s in range(1, 16 + 1)] )
self.channel_table.cellWidget(row, 1).setCurrentIndex(self.fout.midi_channels[row].floppy_channel - 1)
self.pb_connect_to_serial_ports = QtGui.QPushButton('Connect to selected serial ports')
self.pb_connect_to_serial_ports.clicked.connect(self.cb_connect_to_serial_ports)
self.pb_update_serial_ports = QtGui.QPushButton('Refresh available serial ports')
self.pb_update_serial_ports.clicked.connect(self.cb_update_serial_ports)
lb_note = QtGui.QLabel("Note: Serial ports which are already in use by another program won't be dislayed!")
channel_vbox.addWidget(lb_note)
set_all_ports_hbox = QtGui.QHBoxLayout()
lb_set_all_ports_to = QtGui.QLabel('Set all floppies to serial port:')
self.cbx_set_all_ports = QtGui.QComboBox()
pb_set_all_ports = QtGui.QPushButton('ok')
set_all_ports_hbox.addWidget(lb_set_all_ports_to)
set_all_ports_hbox.addWidget(self.cbx_set_all_ports)
set_all_ports_hbox.addWidget(pb_set_all_ports)
pb_set_all_ports.clicked.connect(self.cb_set_all_serial_ports)
channel_vbox.addLayout(set_all_ports_hbox)
channel_vbox.addWidget(self.channel_table)
channel_vbox.addWidget(self.pb_connect_to_serial_ports)
channel_vbox.addWidget(self.pb_update_serial_ports)
tab_channel.setLayout(channel_vbox)
# benchmark tab
#benchmark_vbox = QtGui.QVBoxLayout()
controls_tones_hbox = QtGui.QHBoxLayout()
vbox = QtGui.QVBoxLayout()
tones_grid = QtGui.QGridLayout()
# generate frequency buttons
letters = ['C', 'C#', 'D', 'D#', 'E', 'F', 'F#', 'G', 'G#', 'A', 'A#', 'B']
# 0th octave
for octave in range(4):
for tone_of_octave in range(12):
btn = QtGui.QPushButton('%s%d' % (letters[tone_of_octave], octave), self)
btn.setFixedWidth(30)
tones_grid.addWidget(btn, octave, tone_of_octave)
self.lab_freq = QtGui.QLabel()
self.lab_freq.setMinimumWidth(35)
self.lab_freq.setAlignment(QtCore.Qt.AlignRight |
QtCore.Qt.AlignVCenter)
sld_freq = QtGui.QSlider()
sld_freq.setOrientation(QtCore.Qt.Horizontal)
sld_freq.setTracking(True)
sld_freq.setRange(0, 44000)
sld_freq.valueChanged.connect(self.setFloatNum)
sld_freq.setPageStep(1)
sld_freq.setSingleStep(1)
self.setFloatNum(sld_freq.value())
self.spb_channel = QtGui.QSpinBox()
self.spb_channel.setRange(1, 16)
pb_play = QtGui.QPushButton('Play')
pb_play.clicked.connect(self.pb_play_pressed)
pb_play.resize(pb_play.sizeHint())
pb_stop = QtGui.QPushButton('Stop')
pb_stop.clicked.connect(self.pb_stop_pressed)
pb_stop.resize(pb_stop.sizeHint())
# frequency controller
controls_grid = QtGui.QGridLayout()
controls_grid.addWidget(QtGui.QLabel('Frequency:'), 5, 0)
controls_grid.addWidget(sld_freq, 5, 1)
controls_grid.addWidget(self.lab_freq, 5, 2)
controls_grid.addWidget(QtGui.QLabel('Hz'), 5, 3)
controls_grid.addWidget(QtGui.QLabel('Channel:'), 6, 0)
controls_grid.addWidget(self.spb_channel, 6, 1, 1, 3)
controls_grid.addWidget(pb_play, 7, 0, 1, 4)
controls_grid.addWidget(pb_stop, 8, 0, 1, 4)
#building the form
controls_tones_hbox.addLayout(controls_grid)
controls_tones_hbox.addSpacing(15)
controls_tones_hbox.addLayout(tones_grid)
#vbox.addStretch(1)
vbox.addLayout(controls_tones_hbox)
tab_benchmark.setLayout(vbox)
# create tabs
centralwidget.addTab(tab_channel, "MIDI Channels")
centralwidget.addTab(tab_benchmark, "Benchmark")
self.setCentralWidget(centralwidget)
def center(self):
frame_geo = self.frameGeometry()
desktop_center = QtGui.QDesktopWidget().availableGeometry().center()
frame_geo.moveCenter(desktop_center)
self.move(frame_geo.topLeft())
def cell_clicked(self, row, col):
if col == 0:
self.channel_table.cellWidget(row, 0).toggle()
# A function that tries to list serial ports on most common platforms
def list_serial_ports(self):
system_name = platform.system()
if system_name == "Windows": # TODO: dont use system()
# Scan for available ports.
available = []
for i in range(256):
try:
s = serial.Serial("COM%d" % i)
available.append(s.portstr)
s.close()
except serial.SerialException:
pass # if no serial port is found this exception will be thrown which can be safely ignored
return available
elif system_name == "Darwin":
# Mac
return glob.glob('/dev/cu*')
else:
# Assume Linux or something else
return glob.glob('/dev/ttyUSB*')
def cb_update_serial_ports(self):
serialports = self.list_serial_ports()
items = None
if serialports != []:
items = serialports
self.pb_connect_to_serial_ports.setEnabled(True)
else:
items = [ "<no serial ports available>" ]
self.pb_connect_to_serial_ports.setEnabled(False)
for row in range(0, 16):
self.cbx_set_all_ports.clear()
self.channel_table.cellWidget(row, 2).clear()
self.cbx_set_all_ports.addItems( items )
self.channel_table.cellWidget(row, 2).addItems( items )
self.channel_table.resizeColumnsToContents()
def cb_connect_to_serial_ports(self):
for row in range(0, 16):
active = self.channel_table.cellWidget(row, 0).isChecked()
floppy_channel = int(self.channel_table.cellWidget(row, 1).currentText())
port_str = self.channel_table.cellWidget(row, 2).currentText()
self.fout.configure_midi_channel(row, active, floppy_channel, port_str)
self.fout.connect_to_serial_ports()
# TODO: Gray out stuff
def save_config(self):
self.config.add_section('Channel1')
self.config.set('Channel1', 'enabled', str(self.channel_table.cellWidget(0, 0).Value))
self.config.set('Channel1', 'serialport', str(self.channel_table.cellWidget(0, 1).Value))
self.config.add_section('Channel2')
self.config.set('Channel2', 'enabled', str(self.channel_table.cellWidget(1, 0).Value))
self.config.set('Channel2', 'serialport', str(self.channel_table.cellWidget(1, 1).Value))
with open('~/.flyfirc', 'wb') as configfile:
self.config.write(configfile)
def load_config(self):
ports = serial.tools.list_ports.comports()
serialports = []
for port in ports:
if port[2] != 'n/a':
serialports.append(port[0])
for row in range(0, 16):
# load channel active
if self.config.getboolean('Channel'+ str(row + 1), 'enabled'):
self.channel_table.cellWidget(row, 0).setCheckState(QtCore.Qt.Checked)
else:
self.channel_table.cellWidget(row, 0).setCheckState(QtCore.Qt.Unchecked)
# load serial ports
if self.config.get('Channel' + str(row + 1), 'serialport') in serialports:
index = self.channel_table.cellWidget(row, 1).findData(self.config.get('Channel' + str(row + 1), 'serialport'))
if not index == -1:
self.channel_table.cellWidget(row, 1).setCurrentIndex(index)
# ui events
def connect_pressed(self, button_id):
sender_button = self.sender().button(button_id)
if sender_button.text() == "Connect":
self.fout.connect_serial_port(button_id)
sender_button.setText("Disconnect")
else:
self.fout.disconnect_serial_port(button_id)
sender_button.setText("Connect")
def cb_set_all_serial_ports(self):
for row in range(0, 16):
self.channel_table.cellWidget(row, 2).setCurrentIndex(self.cbx_set_all_ports.currentIndex())
def channel_mapping_changed(self, combobox_id):
pass
| coon42/FlyFi | SettingsWindow.py | Python | gpl-3.0 | 13,127 |
import os
from unittest import TestCase
from unittest.mock import patch
from bs4 import BeautifulSoup
from RatS.criticker.criticker_ratings_inserter import CritickerRatingsInserter
TESTDATA_PATH = os.path.abspath(
os.path.join(os.path.dirname(__file__), os.pardir, os.pardir, "assets")
)
class CritickerRatingsInserterTest(TestCase):
def setUp(self):
if not os.path.exists(os.path.join(TESTDATA_PATH, "exports")):
os.makedirs(os.path.join(TESTDATA_PATH, "exports"))
self.movie = dict()
self.movie["title"] = "Fight Club"
self.movie["year"] = 1999
self.movie["imdb"] = dict()
self.movie["imdb"]["id"] = "tt0137523"
self.movie["imdb"]["url"] = "https://www.imdb.com/title/tt0137523"
self.movie["imdb"]["my_rating"] = 9
self.movie["tmdb"] = dict()
self.movie["tmdb"]["id"] = "550"
self.movie["tmdb"]["url"] = "https://www.themoviedb.org/movie/550"
with open(
os.path.join(TESTDATA_PATH, "criticker", "search_result.html"),
encoding="UTF-8",
) as search_results:
self.search_results = search_results.read()
with open(
os.path.join(TESTDATA_PATH, "criticker", "search_result_tile.html"),
encoding="UTF-8",
) as result_tile:
self.search_result_tile_list = [result_tile.read()]
with open(
os.path.join(TESTDATA_PATH, "criticker", "movie_details_page.html"),
encoding="UTF-8",
) as movie_details_page:
self.movie_details_page = movie_details_page.read()
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_init(self, browser_mock, base_init_mock):
CritickerRatingsInserter(None)
self.assertTrue(base_init_mock.called)
@patch("RatS.base.base_ratings_inserter.RatingsInserter._print_progress_bar")
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_insert(
self,
browser_mock,
base_init_mock,
site_mock,
overview_page_mock, # pylint: disable=too-many-arguments
eq_check_mock,
progress_print_mock,
):
overview_page_mock.return_value = self.search_result_tile_list
eq_check_mock.return_value = True
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.args = False
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
inserter.insert([self.movie], "IMDB")
self.assertTrue(base_init_mock.called)
self.assertTrue(progress_print_mock.called)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_external_link_compare_imdb_success(
self, browser_mock, base_init_mock, site_mock
):
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
result = inserter._compare_external_links(
self.movie_details_page, self.movie, "imdb.com", "imdb"
) # pylint: disable=protected-access
self.assertTrue(result)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_external_link_compare_imdb_fail(
self, browser_mock, base_init_mock, site_mock
):
site_mock.browser = browser_mock
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
movie2 = dict()
movie2["title"] = "Arrival"
movie2["year"] = 2006
movie2["imdb"] = dict()
movie2["imdb"]["id"] = "tt2543164"
movie2["imdb"]["url"] = "https://www.imdb.com/title/tt2543164"
movie2["imdb"]["my_rating"] = 7
result = inserter._compare_external_links(
self.movie_details_page, movie2, "imdb.com", "imdb"
) # pylint: disable=protected-access
self.assertFalse(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_success_by_imdb(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.search_results
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
result = inserter._find_movie(self.movie) # pylint: disable=protected-access
self.assertTrue(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_success_by_year(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.movie_details_page
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
movie2 = dict()
movie2["title"] = "Fight Club"
movie2["year"] = 1999
search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser")
result = inserter._is_requested_movie(
movie2, search_result
) # pylint: disable=protected-access
self.assertTrue(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_fail_by_year(
self, browser_mock, base_init_mock, site_mock, compare_mock
):
site_mock.browser = browser_mock
browser_mock.page_source = self.movie_details_page
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = True
movie2 = dict()
movie2["title"] = "Fight Club"
movie2["year"] = 1998
search_result = BeautifulSoup(self.search_result_tile_list[0], "html.parser")
result = inserter._is_requested_movie(
movie2, search_result
) # pylint: disable=protected-access
self.assertFalse(result)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._is_requested_movie"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._get_search_results"
)
@patch(
"RatS.criticker.criticker_ratings_inserter.CritickerRatingsInserter._compare_external_links"
)
@patch("RatS.criticker.criticker_ratings_inserter.Criticker")
@patch("RatS.base.base_ratings_inserter.RatingsInserter.__init__")
@patch("RatS.utils.browser_handler.Firefox")
def test_find_movie_fail(
self,
browser_mock,
base_init_mock,
site_mock,
compare_mock,
tiles_mock,
equality_mock,
): # pylint: disable=too-many-arguments
site_mock.browser = browser_mock
browser_mock.page_source = self.search_results
inserter = CritickerRatingsInserter(None)
inserter.site = site_mock
inserter.site.site_name = "Criticker"
inserter.failed_movies = []
compare_mock.return_value = False
tiles_mock.return_value = self.search_result_tile_list
equality_mock.return_value = False
movie2 = dict()
movie2["title"] = "The Matrix"
movie2["year"] = 1995
movie2["imdb"] = dict()
movie2["imdb"]["id"] = "tt0137523"
movie2["imdb"]["url"] = "https://www.imdb.com/title/tt0137523"
movie2["imdb"]["my_rating"] = 9
result = inserter._find_movie(movie2) # pylint: disable=protected-access
self.assertFalse(result)
| StegSchreck/RatS | tests/unit/criticker/test_criticker_ratings_inserter.py | Python | agpl-3.0 | 9,321 |
#!/usr/bin/env python
from sospex import __main__
__main__()
| LunaAstro/rochelobe | rochelobe.py | Python | gpl-3.0 | 61 |
with open('day8.txt') as f:
print(sum([len(line.strip()) - len(eval(line)) for line in f])) | BethyDiakabananas/Advent-of-Code | Day 8/day8_part1.py | Python | mit | 92 |
import cvxpy
GAMMA = 100
solver_map = {
'cvxopt': cvxpy.CVXOPT,
'gurobi': cvxpy.GUROBI
}
'''
- *r_list* is a list of tuples (weight, body, head)
- *body* and *head* are lists of tuples (is_constant, value/id, is_negated)
- *is_constant* is a flag, True if the truth value is known, False otherwise
- *value/id* equals the truth value if it is known,
and is the id of the corresponding variable otherwise
- *is_negated* is a flag, True if the atom is negated in the rule,
False otherwise
'''
def map_inference(rules, hard_rules, solver='cvxopt'):
vid_dict = dict()
var_ids = set()
all_rules = rules + hard_rules
for _, body, head in all_rules:
if (len(body)>0):
var_ids |= set([b[1] for b in body if not b[0]])
if (len(head)>0):
var_ids |= set([h[1] for h in head if not h[0]])
f, bounds = psl_objective(var_ids, vid_dict, rules)
hard_constraints = []
if len(hard_rules) > 0:
hard_constraints = psl_hard_constraints(vid_dict, hard_rules)
constraints = bounds + hard_constraints
objective = cvxpy.Minimize(f)
problem = cvxpy.Problem(objective, constraints)
problem.solve(solver=solver_map[solver])
results = dict()
for vid in var_ids:
results[vid] = vid_dict[vid].value
return results
def fair_map_inference(rules, hard_rules, counts, delta, fairness_measure, solver='cvxopt'):
assert(fairness_measure in ('RD', 'RR', 'RC'))
vid_dict = dict()
var_ids = set()
all_rules = rules + hard_rules
for _, body, head in all_rules:
var_ids |= set([b[1] for b in body if not b[0]])
var_ids |= set([h[1] for h in head if not h[0]])
f, bounds = psl_objective(var_ids, vid_dict, rules)
hard_constraints = []
if len(hard_rules) > 0:
hard_constraints = psl_hard_constraints(vid_dict, hard_rules)
fairness_constraints = psl_fairness_constraints(vid_dict, counts, delta, fairness_measure)
constraints= bounds + hard_constraints + fairness_constraints
objective = cvxpy.Minimize(f)
problem = cvxpy.Problem(objective, constraints)
problem.solve(solver=solver_map[solver])
results = dict()
for vid in var_ids:
results[vid] = vid_dict[vid].value
return results
def calculate(counts, vid_dict):
n1 = 0.0
n2 = 0.0
a = 0.0
c = 0.0
for f1, f2, d in counts:
f1f2 = max(f1+f2-1, 0)
nf1f2 = max(-f1+f2, 0)
n1 += f1f2
n2 += nf1f2
if d[0]:
a += max(f1f2 - d[1], 0)
c += max(nf1f2 - d[1], 0)
else:
if f1f2 == 1:
a += 1 - vid_dict[d[1]]
if nf1f2 == 1:
c += 1 - vid_dict[d[1]]
return a,c,n1,n2
def psl_fairness_constraints(vid_dict, counts, delta, fairness_measure):
if fairness_measure=='RD':
return risk_difference_constraints(counts,vid_dict,delta)
elif fairness_measure=='RR':
return risk_ratio_constraints(counts,vid_dict,delta)
elif fairness_measure=='RC':
return risk_chance_constraints(counts,vid_dict,delta)
def risk_difference_constraints(counts,vid_dict,delta):
a,c,n1,n2 = calculate(counts,vid_dict)
constraints = []
constraints.append((n2*a - n1*c - n1*n2*delta) <= 0)
constraints.append((n2*a - n1*c + n1*n2*delta) >= 0)
return constraints
def risk_ratio_constraints(counts,vid_dict,delta):
a,c,n1,n2 = calculate(counts,vid_dict)
constraints = []
constraints.append((n2*a - (1+delta)*n1*c) <= 0)
constraints.append((n2*a - (1-delta)*n1*c) >= 0)
return constraints
def risk_chance_constraints(counts,vid_dict,delta):
a,c,n1,n2 = calculate(counts,vid_dict)
constraints = []
constraints.append((-n2*a + (1+delta)*n1*c - delta*n1*n2) <= 0)
constraints.append((-n2*a + (1-delta)*n1*c + delta*n1*n2) >= 0)
return constraints
def psl_objective(var_ids, vid_dict, r_list):
constraints = []
for vid in var_ids:
var = cvxpy.Variable()
vid_dict[vid] = var
constraints += [0 <= var, var <= 1]
f = 0
for weight, body, head in r_list:
expr = 1
for b in body:
if b[0]:
y = b[1]
else:
y = vid_dict[b[1]]
if b[2]:
expr -= y
else:
expr -= (1-y)
for h in head:
if h[0]:
y = h[1]
else:
y = vid_dict[h[1]]
if h[2]:
expr -= (1-y)
else:
expr -= y
f += weight * cvxpy.pos(expr)
return f, constraints
def psl_hard_constraints(vid_dict, r_list):
constraints = []
for _, body, head in r_list:
expr = 1
for b in body:
if b[0]:
y = b[1]
else:
y = vid_dict[b[1]]
if b[2]:
expr -= y
else:
expr -= (1-y)
for h in head:
if h[0]:
y = h[1]
else:
y = vid_dict[h[1]]
if h[2]:
expr -= (1-y)
else:
expr -= y
constraints.append(expr <= 0)
return constraints
| gfarnadi/FairPSL | engines/fpsl_cvxpy.py | Python | mit | 5,365 |
# Licensed under a 3-clause BSD style license - see LICENSE.rst
from astropy import config as _config
class Conf(_config.ConfigNamespace):
"""
Configuration parameters for `astropy.table`.
"""
auto_colname = _config.ConfigItem(
'col{0}',
'The template that determines the name of a column if it cannot be '
'determined. Uses new-style (format method) string formatting.',
aliases=['astropy.table.column.auto_colname'])
default_notebook_table_class = _config.ConfigItem(
'table-striped table-bordered table-condensed',
'The table class to be used in Jupyter notebooks when displaying '
'tables (and not overridden). See <http://getbootstrap.com/css/#tables '
'for a list of useful bootstrap classes.')
replace_warnings = _config.ConfigItem(
['slice'],
'List of conditions for issuing a warning when replacing a table '
"column using setitem, e.g. t['a'] = value. Allowed options are "
"'always', 'slice', 'refcount', 'attributes'.",
'list',
)
replace_inplace = _config.ConfigItem(
False,
'Always use in-place update of a table column when using setitem, '
"e.g. t['a'] = value. This overrides the default behavior of "
"replacing the column entirely with the new value when possible. "
"This configuration option will be deprecated and then removed in "
"subsequent major releases."
)
conf = Conf()
from .column import Column, MaskedColumn, StringTruncateWarning, ColumnInfo
from .groups import TableGroups, ColumnGroups
from .table import (Table, QTable, TableColumns, Row, TableFormatter,
NdarrayMixin, TableReplaceWarning)
from .operations import join, setdiff, hstack, cstack, vstack, unique, TableMergeError
from .bst import BST, FastBST, FastRBT
from .sorted_array import SortedArray
from .soco import SCEngine
from .serialize import SerializedColumn, represent_mixins_as_columns
# Finally import the formats for the read and write method but delay building
# the documentation until all are loaded. (#5275)
from astropy.io import registry
with registry.delay_doc_updates(Table):
# Import routines that connect readers/writers to astropy.table
from .jsviewer import JSViewer
from astropy.io.ascii import connect
from astropy.io.fits import connect
from astropy.io.misc import connect
from astropy.io.votable import connect
from astropy.io.misc.asdf import connect
from astropy.io.misc.pandas import connect
| bsipocz/astropy | astropy/table/__init__.py | Python | bsd-3-clause | 2,572 |
# -*- coding: utf-8 -*-
import abc
class IWaitStrategy(object):
__metaclass__ = abc.ABCMeta
@abc.abstractmethod
def compute_wait_time(self, attempt):
"""
Parameters
----------
attempt : clare.common.retry.attempt.Attempt
Returns
-------
float
Wait time in seconds.
"""
pass
class Fixed(IWaitStrategy):
def __init__(self, wait_time):
self._wait_time = wait_time
def compute_wait_time(self, attempt):
return self._wait_time
def __repr__(self):
repr_ = '{}(wait_time={})'
return repr_.format(self.__class__.__name__, self._wait_time)
| dnguyen0304/clare | clare/clare/common/retry/wait_strategies.py | Python | mit | 684 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import Any, AsyncIterable, Callable, Dict, Generic, Optional, TypeVar, Union
import warnings
from azure.core.async_paging import AsyncItemPaged, AsyncList
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import AsyncHttpResponse, HttpRequest
from azure.core.polling import AsyncLROPoller, AsyncNoPolling, AsyncPollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.async_arm_polling import AsyncARMPolling
from ... import models as _models
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, AsyncHttpResponse], T, Dict[str, Any]], Any]]
class SubnetsOperations:
"""SubnetsOperations async operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2020_04_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer) -> None:
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
async def _delete_initial(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
async def begin_delete(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Deletes the specified subnet.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._delete_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
async def get(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
expand: Optional[str] = None,
**kwargs: Any
) -> "_models.Subnet":
"""Gets the specified subnet by virtual network and resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param expand: Expands referenced resources.
:type expand: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: Subnet, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2020_04_01.models.Subnet
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
if expand is not None:
query_parameters['$expand'] = self._serialize.query("expand", expand, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
async def _create_or_update_initial(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
subnet_parameters: "_models.Subnet",
**kwargs: Any
) -> "_models.Subnet":
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(subnet_parameters, 'Subnet')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('Subnet', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
async def begin_create_or_update(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
subnet_parameters: "_models.Subnet",
**kwargs: Any
) -> AsyncLROPoller["_models.Subnet"]:
"""Creates or updates a subnet in the specified virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param subnet_parameters: Parameters supplied to the create or update subnet operation.
:type subnet_parameters: ~azure.mgmt.network.v2020_04_01.models.Subnet
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either Subnet or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[~azure.mgmt.network.v2020_04_01.models.Subnet]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.Subnet"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._create_or_update_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
subnet_parameters=subnet_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('Subnet', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}'} # type: ignore
async def _prepare_network_policies_initial(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
prepare_network_policies_request_parameters: "_models.PrepareNetworkPoliciesRequest",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._prepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(prepare_network_policies_request_parameters, 'PrepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_prepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
async def begin_prepare_network_policies(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
prepare_network_policies_request_parameters: "_models.PrepareNetworkPoliciesRequest",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Prepares a subnet by applying network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param prepare_network_policies_request_parameters: Parameters supplied to prepare subnet by
applying network intent policies.
:type prepare_network_policies_request_parameters: ~azure.mgmt.network.v2020_04_01.models.PrepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._prepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
prepare_network_policies_request_parameters=prepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_prepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/PrepareNetworkPolicies'} # type: ignore
async def _unprepare_network_policies_initial(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
unprepare_network_policies_request_parameters: "_models.UnprepareNetworkPoliciesRequest",
**kwargs: Any
) -> None:
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._unprepare_network_policies_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(unprepare_network_policies_request_parameters, 'UnprepareNetworkPoliciesRequest')
body_content_kwargs['content'] = body_content
request = self._client.post(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_unprepare_network_policies_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
async def begin_unprepare_network_policies(
self,
resource_group_name: str,
virtual_network_name: str,
subnet_name: str,
unprepare_network_policies_request_parameters: "_models.UnprepareNetworkPoliciesRequest",
**kwargs: Any
) -> AsyncLROPoller[None]:
"""Unprepares a subnet by removing network intent policies.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:param subnet_name: The name of the subnet.
:type subnet_name: str
:param unprepare_network_policies_request_parameters: Parameters supplied to unprepare subnet
to remove network intent policies.
:type unprepare_network_policies_request_parameters: ~azure.mgmt.network.v2020_04_01.models.UnprepareNetworkPoliciesRequest
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be AsyncARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.AsyncPollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of AsyncLROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.AsyncLROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, AsyncPollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = await self._unprepare_network_policies_initial(
resource_group_name=resource_group_name,
virtual_network_name=virtual_network_name,
subnet_name=subnet_name,
unprepare_network_policies_request_parameters=unprepare_network_policies_request_parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subnetName': self._serialize.url("subnet_name", subnet_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = AsyncARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = AsyncNoPolling()
else: polling_method = polling
if cont_token:
return AsyncLROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return AsyncLROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_unprepare_network_policies.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets/{subnetName}/UnprepareNetworkPolicies'} # type: ignore
def list(
self,
resource_group_name: str,
virtual_network_name: str,
**kwargs: Any
) -> AsyncIterable["_models.SubnetListResult"]:
"""Gets all subnets in a virtual network.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param virtual_network_name: The name of the virtual network.
:type virtual_network_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either SubnetListResult or the result of cls(response)
:rtype: ~azure.core.async_paging.AsyncItemPaged[~azure.mgmt.network.v2020_04_01.models.SubnetListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.SubnetListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2020-04-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'virtualNetworkName': self._serialize.url("virtual_network_name", virtual_network_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
async def extract_data(pipeline_response):
deserialized = self._deserialize('SubnetListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, AsyncList(list_of_elem)
async def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = await self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return AsyncItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/virtualNetworks/{virtualNetworkName}/subnets'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2020_04_01/aio/operations/_subnets_operations.py | Python | mit | 35,858 |
# (C) British Crown Copyright 2017, Met Office
#
# This file is part of Iris.
#
# Iris is free software: you can redistribute it and/or modify it under
# the terms of the GNU Lesser General Public License as published by the
# Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Iris is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with Iris. If not, see <http://www.gnu.org/licenses/>.
"""
Test function :func:`iris.util._slice_data_with_keys`.
Note: much of the functionality really belongs to the other routines,
:func:`iris.util._build_full_slice_given_keys`, and
:func:`column_slices_generator`.
However, it is relatively simple to test multiple aspects of all three here
in combination.
"""
from __future__ import (absolute_import, division, print_function)
from six.moves import (filter, input, map, range, zip) # noqa
# import iris tests first so that some things can be initialised before
# importing anything else
import iris.tests as tests
import numpy as np
from iris.util import _slice_data_with_keys
from iris._lazy_data import as_lazy_data, as_concrete_data
class DummyArray(object):
# A dummy array-like that records the keys of indexing calls.
def __init__(self, shape, _indexing_record_list=None):
self.shape = shape
self.ndim = len(shape)
if _indexing_record_list is None:
_indexing_record_list = []
self._getitem_call_keys = _indexing_record_list
def __getitem__(self, keys):
# Add the indexing keys to the call list.
self._getitem_call_keys.append(keys)
# Return a new object with the correct derived shape, and record its
# indexing operations in the same key list as this.
shape_array = np.zeros(self.shape)
shape_array = shape_array.__getitem__(keys)
new_shape = shape_array.shape
return DummyArray(new_shape,
_indexing_record_list=self._getitem_call_keys)
class Indexer(object):
# An object to make __getitem__ arglists from indexing operations.
def __getitem__(self, keys):
return keys
# An Indexer object for generating indexing keys in a nice visible way.
Index = Indexer()
class MixinIndexingTest(object):
def check(self, shape, keys, expect_call_keys=None, expect_map=None):
data = DummyArray(shape)
dim_map, _ = _slice_data_with_keys(data, keys)
if expect_call_keys is not None:
calls_got = data._getitem_call_keys
# Check that the indexing keys applied were the expected ones.
equal = len(calls_got) == len(expect_call_keys)
for act_call, expect_call in zip(calls_got, expect_call_keys):
equal &= len(act_call) == len(expect_call)
# A problem here is that in each call, some keys may be
# *arrays*, and arrays can't be compared in the "normal"
# way. So we must use np.all for comparison :-(
for act_key, expect_key in zip(act_call, expect_call):
equal &= (np.asanyarray(act_key).dtype ==
np.asanyarray(expect_key).dtype and
np.all(act_key == expect_key))
errmsg = 'Different key lists:\n{!s}\n!=\n{!s}\n'
def showkeys(keys_list):
msg = '[\n '
msg += '\n '.join(str(x) for x in keys_list)
msg += '\n]'
return msg
self.assertTrue(equal, errmsg.format(showkeys(calls_got),
showkeys(expect_call_keys)))
if expect_map is not None:
self.assertEqual(dim_map, expect_map)
class Test_indexing(MixinIndexingTest, tests.IrisTest):
# Check the indexing operations performed for various requested keys.
def test_0d_nokeys(self):
# Performs *no* underlying indexing operation.
self.check((), Index[()],
[])
def test_1d_int(self):
self.check((4,), Index[2],
[(2,)])
def test_1d_all(self):
self.check((3,), Index[:],
[(slice(None),)])
def test_1d_tuple(self):
# The call makes tuples into 1-D arrays, and a trailing Ellipsis is
# added (for the 1-D case only).
self.check((3,), Index[(2, 0, 1), ],
[(np.array([2, 0, 1]), Ellipsis)])
def test_fail_1d_2keys(self):
msg = 'More slices .* than dimensions'
with self.assertRaisesRegexp(IndexError, msg):
self.check((3,), Index[1, 2])
def test_fail_empty_slice(self):
msg = 'Cannot index with zero length slice'
with self.assertRaisesRegexp(IndexError, msg):
self.check((3,), Index[1:1])
def test_2d_tuple(self):
# Like the above, but there is an extra no-op at the start and no
# trailing Ellipsis is generated.
self.check((3, 2), Index[(2, 0, 1), ],
[(slice(None), slice(None)),
(np.array([2, 0, 1]), slice(None))])
def test_2d_two_tuples(self):
# Could be treated as fancy indexing, but must not be !
# Two separate 2-D indexing operations.
self.check((3, 2), Index[(2, 0, 1, 1), (0, 1, 0, 1)],
[(np.array([2, 0, 1, 1]), slice(None)),
(slice(None), np.array([0, 1, 0, 1]))])
def test_2d_tuple_and_value(self):
# The two keys are applied in separate operations, and in the reverse
# order (?) : The second op is then slicing a 1-D array, not 2-D.
self.check((3, 5), Index[(2, 0, 1), 3],
[(slice(None), 3),
(np.array([2, 0, 1]), Ellipsis)])
def test_2d_single_int(self):
self.check((3, 4), Index[2],
[(2, slice(None))])
def test_2d_multiple_int(self):
self.check((3, 4), Index[2, 1:3],
[(2, slice(1, 3))])
def test_3d_1int(self):
self.check((3, 4, 5), Index[2],
[(2, slice(None), slice(None))])
def test_3d_2int(self):
self.check((3, 4, 5), Index[2, 3],
[(2, 3, slice(None))])
def test_3d_tuple_and_value(self):
# The two keys are applied in separate operations, and in the reverse
# order (?) : The second op is slicing a 2-D array, not 3-D.
self.check((3, 5, 7), Index[(2, 0, 1), 4],
[(slice(None), 4, slice(None)),
(np.array([2, 0, 1]), slice(None))])
def test_3d_ellipsis_last(self):
self.check((3, 4, 5), Index[2, ...],
[(2, slice(None), slice(None))])
def test_3d_ellipsis_first_1int(self):
self.check((3, 4, 5), Index[..., 2],
[(slice(None), slice(None), 2)])
def test_3d_ellipsis_first_2int(self):
self.check((3, 4, 5), Index[..., 2, 3],
[(slice(None), 2, 3)])
def test_3d_multiple_tuples(self):
# Where there are TWO or more tuple keys, this could be misinterpreted
# as 'fancy' indexing : It should resolve into multiple calls.
self.check((3, 4, 5), Index[(1, 2, 1), :, (2, 2, 3)],
[(slice(None), slice(None), slice(None)),
(np.array([1, 2, 1]), slice(None), slice(None)),
(slice(None), slice(None), np.array([2, 2, 3])),
])
# NOTE: there seem to be an extra initial [:, :, :].
# That's just what it does at present.
class Test_dimensions_mapping(MixinIndexingTest, tests.IrisTest):
# Check the dimensions map returned for various requested keys.
def test_1d_nochange(self):
self.check((3,), Index[1:2],
expect_map={None: None, 0: 0})
def test_1d_1int_losedim0(self):
self.check((3,), Index[1],
expect_map={None: None, 0: None})
def test_1d_tuple_nochange(self):
# A selection index leaves the dimension intact.
self.check((3,), Index[(1, 0, 1, 2), ],
expect_map={None: None, 0: 0})
def test_1d_1tuple_nochange(self):
# A selection index with only one value in it *still* leaves the
# dimension intact.
self.check((3,), Index[(2,), ],
expect_map={None: None, 0: 0})
def test_1d_slice_nochange(self):
# A slice leaves the dimension intact.
self.check((3,), Index[1:7],
expect_map={None: None, 0: 0})
def test_2d_nochange(self):
self.check((3, 4), Index[:, :],
expect_map={None: None, 0: 0, 1: 1})
def test_2d_losedim0(self):
self.check((3, 4), Index[1, :],
expect_map={None: None, 0: None, 1: 0})
def test_2d_losedim1(self):
self.check((3, 4), Index[1:4, 2],
expect_map={None: None, 0: 0, 1: None})
def test_2d_loseboth(self):
# Two indices give scalar result.
self.check((3, 4), Index[1, 2],
expect_map={None: None, 0: None, 1: None})
def test_3d_losedim1(self):
# Cutting out the middle dim.
self.check((3, 4, 2), Index[:, 2],
expect_map={None: None, 0: 0, 1: None, 2: 1})
class TestResults(tests.IrisTest):
# Integration-style test, exercising (mostly) the same cases as above,
# but checking actual results, for both real and lazy array inputs.
def check(self, real_data, keys, expect_result, expect_map):
real_data = np.array(real_data)
lazy_data = as_lazy_data(real_data, real_data.shape)
real_dim_map, real_result = _slice_data_with_keys(real_data, keys)
lazy_dim_map, lazy_result = _slice_data_with_keys(lazy_data, keys)
lazy_result = as_concrete_data(lazy_result)
self.assertArrayEqual(real_result, expect_result)
self.assertArrayEqual(lazy_result, expect_result)
self.assertEqual(real_dim_map, expect_map)
self.assertEqual(lazy_dim_map, expect_map)
def test_1d_int(self):
self.check([1, 2, 3, 4], Index[2],
[3],
{None: None, 0: None})
def test_1d_all(self):
self.check([1, 2, 3], Index[:],
[1, 2, 3],
{None: None, 0: 0})
def test_1d_tuple(self):
self.check([1, 2, 3], Index[(2, 0, 1, 0), ],
[3, 1, 2, 1],
{None: None, 0: 0})
def test_fail_1d_2keys(self):
msg = 'More slices .* than dimensions'
with self.assertRaisesRegexp(IndexError, msg):
self.check([1, 2, 3], Index[1, 2], None, None)
def test_fail_empty_slice(self):
msg = 'Cannot index with zero length slice'
with self.assertRaisesRegexp(IndexError, msg):
self.check([1, 2, 3], Index[1:1], None, None)
def test_2d_tuple(self):
self.check([[11, 12], [21, 22], [31, 32]],
Index[(2, 0, 1), ],
[[31, 32], [11, 12], [21, 22]],
{None: None, 0: 0, 1: 1})
def test_2d_two_tuples(self):
# Could be treated as fancy indexing, but must not be !
# Two separate 2-D indexing operations.
self.check([[11, 12, 13], [21, 22, 23], [31, 32, 33]],
Index[(2, 0), (0, 1, 0, 1)],
[[31, 32, 31, 32], [11, 12, 11, 12]],
{None: None, 0: 0, 1: 1})
def test_2d_tuple_and_value(self):
# The two keys are applied in separate operations, and in the reverse
# order (?) : The second op is then slicing a 1-D array, not 2-D.
self.check([[11, 12, 13, 14], [21, 22, 23, 24], [31, 32, 33, 34]],
Index[(2, 0, 1), 3],
[34, 14, 24],
{None: None, 0: 0, 1: None})
def test_2d_single_int(self):
self.check([[11, 12, 13], [21, 22, 23], [31, 32, 33]],
Index[1],
[21, 22, 23],
{None: None, 0: None, 1: 0})
def test_2d_int_slice(self):
self.check([[11, 12, 13], [21, 22, 23], [31, 32, 33]],
Index[2, 1:3],
[32, 33],
{None: None, 0: None, 1: 0})
def test_3d_1int(self):
self.check([[[111, 112, 113], [121, 122, 123]],
[[211, 212, 213], [221, 222, 223]],
[[311, 312, 313], [321, 322, 323]]],
Index[1],
[[211, 212, 213], [221, 222, 223]],
{None: None, 0: None, 1: 0, 2: 1})
def test_3d_2int(self):
self.check([[[111, 112, 113], [121, 122, 123], [131, 132, 133]],
[[211, 212, 213], [221, 222, 223], [231, 232, 233]]],
Index[1, 2],
[231, 232, 233],
{None: None, 0: None, 1: None, 2: 0})
def test_3d_tuple_and_value(self):
# The two keys are applied in separate operations, and in the reverse
# order (?) : The second op is slicing a 2-D array, not 3-D.
self.check([[[111, 112, 113, 114], [121, 122, 123, 124]],
[[211, 212, 213, 214], [221, 222, 223, 224]],
[[311, 312, 313, 314], [321, 322, 323, 324]]],
Index[(2, 0, 1), 1],
[[321, 322, 323, 324],
[121, 122, 123, 124],
[221, 222, 223, 224]],
{None: None, 0: 0, 1: None, 2: 1})
def test_3d_ellipsis_last(self):
self.check([[[111, 112, 113], [121, 122, 123]],
[[211, 212, 213], [221, 222, 223]],
[[311, 312, 313], [321, 322, 323]]],
Index[2, ...],
[[311, 312, 313], [321, 322, 323]],
{None: None, 0: None, 1: 0, 2: 1})
def test_3d_ellipsis_first_1int(self):
self.check([[[111, 112, 113, 114], [121, 122, 123, 124]],
[[211, 212, 213, 214], [221, 222, 223, 224]],
[[311, 312, 313, 314], [321, 322, 323, 324]]],
Index[..., 2],
[[113, 123],
[213, 223],
[313, 323]],
{None: None, 0: 0, 1: 1, 2: None})
def test_3d_ellipsis_mid_1int(self):
self.check([[[111, 112, 113], [121, 122, 123]],
[[211, 212, 213], [221, 222, 223]],
[[311, 312, 313], [321, 322, 323]]],
Index[..., 1, ...],
[[121, 122, 123],
[221, 222, 223],
[321, 322, 323]],
{None: None, 0: 0, 1: None, 2: 1})
def test_3d_ellipsis_first_2int(self):
self.check([[[111, 112, 113], [121, 122, 123]],
[[211, 212, 213], [221, 222, 223]],
[[311, 312, 313], [321, 322, 323]]],
Index[..., 1, 2],
[123, 223, 323],
{None: None, 0: 0, 1: None, 2: None})
def test_3d_multiple_tuples(self):
# Where there are TWO or more tuple keys, this could be misinterpreted
# as 'fancy' indexing : It should resolve into multiple calls.
self.check([[[111, 112, 113, 114], [121, 122, 123, 124]],
[[211, 212, 213, 214], [221, 222, 223, 224]],
[[311, 312, 313, 314], [321, 322, 323, 324]]],
Index[(1, 2, 1), :, (2, 2, 3)],
[[[213, 213, 214], [223, 223, 224]],
[[313, 313, 314], [323, 323, 324]],
[[213, 213, 214], [223, 223, 224]]],
{None: None, 0: 0, 1: 1, 2: 2})
# NOTE: there seem to be an extra initial [:, :, :].
# That's just what it does at present.
if __name__ == '__main__':
tests.main()
| QuLogic/iris | lib/iris/tests/unit/util/test__slice_data_with_keys.py | Python | gpl-3.0 | 16,153 |
#!/usr/bin/env python
#-*- coding: utf-8 -*-
import mock
import unittest
from livespace import Client
from livespace.exceptions import ApiError
class ClientTestCase(unittest.TestCase):
def setUp(self):
self.client = Client('api_url', 'api_key', 'api_secret')
super(ClientTestCase, self).setUp()
@mock.patch('requests.post')
@mock.patch('livespace.Client.get_access_token')
def test_client_raises_api_error(self, get_access_token, post):
get_access_token.return_value = {
'_api_auth': 'key',
'_api_key': 'api key',
'_api_session': 'session',
'_api_sha': 'sha'}
class MyPost(mock.Mock):
def json(self):
return {'data': [], 'error': None, 'result': 561,
'status': False}
post.return_value = MyPost()
with self.assertRaises(ApiError):
self.client('Default', 'ping', {'foo': 'bar'})
if __name__ == '__main__':
unittest.main()
| ra2er/livespace-sdk | livespace/tests.py | Python | mit | 1,010 |
import threading
import logging
import time
import os
logging.basicConfig( level=logging.DEBUG, format='[%(levelname)s] - %(threadName)-10s : %(message)s')
def worker(x):
logging.debug('Lanzado')
importer = 'bin/mallet import-svmlight --input archivoEntrenamiento%s.txt --output training%s.mallet' % (x,x)
print importer
os.system(importer)
classifiers = ['NaiveBayes', 'DecisionTree','MaxEntL1','MaxEnt', 'BalancedWinnow', 'Winnow']
for j in range(len(classifiers)):
trainer= 'bin/mallet train-classifier --input training%s.mallet --output-classifier output%s_%s.classifier --trainer %s' % (x,x,classifiers[j],classifiers[j])
print trainer
os.system(trainer)
classify = 'bin/mallet classify-file --input archivo%s.txt --output output%s_%s.txt --classifier output%s_%s.classifier' % (x,x,classifiers[j],x,classifiers[j])
print classify
os.system(classify)
logging.debug('Deteniendo')
return
threads = list()
for i in range(1,11):
t = threading.Thread(target=worker, args=(i,))
threads.append(t)
t.start()
| mespinozas/si | t3/t3_marcelo/t3loader.py | Python | gpl-2.0 | 1,105 |
# stdlib
from collections import defaultdict
import time
# 3p
import psutil
# project
from checks import AgentCheck
from config import _is_affirmative
from utils.platform import Platform
DEFAULT_AD_CACHE_DURATION = 120
DEFAULT_PID_CACHE_DURATION = 120
ATTR_TO_METRIC = {
'thr': 'threads',
'cpu': 'cpu.pct',
'rss': 'mem.rss',
'vms': 'mem.vms',
'real': 'mem.real',
'open_fd': 'open_file_descriptors',
'r_count': 'ioread_count', # FIXME: namespace me correctly (6.x), io.r_count
'w_count': 'iowrite_count', # FIXME: namespace me correctly (6.x) io.r_bytes
'r_bytes': 'ioread_bytes', # FIXME: namespace me correctly (6.x) io.w_count
'w_bytes': 'iowrite_bytes', # FIXME: namespace me correctly (6.x) io.w_bytes
'ctx_swtch_vol': 'voluntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.voluntary
'ctx_swtch_invol': 'involuntary_ctx_switches', # FIXME: namespace me correctly (6.x), ctx_swt.involuntary
}
class ProcessCheck(AgentCheck):
def __init__(self, name, init_config, agentConfig, instances=None):
AgentCheck.__init__(self, name, init_config, agentConfig, instances)
# ad stands for access denied
# We cache the PIDs getting this error and don't iterate on them
# more often than `access_denied_cache_duration`
# This cache is for all PIDs so it's global, but it should
# be refreshed by instance
self.last_ad_cache_ts = {}
self.ad_cache = set()
self.access_denied_cache_duration = int(
init_config.get(
'access_denied_cache_duration',
DEFAULT_AD_CACHE_DURATION
)
)
# By default cache the PID list for a while
# Sometimes it's not wanted b/c it can mess with no-data monitoring
# This cache is indexed per instance
self.last_pid_cache_ts = {}
self.pid_cache = {}
self.pid_cache_duration = int(
init_config.get(
'pid_cache_duration',
DEFAULT_PID_CACHE_DURATION
)
)
if Platform.is_linux():
procfs_path = init_config.get('procfs_path')
if procfs_path:
psutil.PROCFS_PATH = procfs_path
# Process cache, indexed by instance
self.process_cache = defaultdict(dict)
def should_refresh_ad_cache(self, name):
now = time.time()
return now - self.last_ad_cache_ts.get(name, 0) > self.access_denied_cache_duration
def should_refresh_pid_cache(self, name):
now = time.time()
return now - self.last_pid_cache_ts.get(name, 0) > self.pid_cache_duration
def find_pids(self, name, search_string, exact_match, ignore_ad=True):
"""
Create a set of pids of selected processes.
Search for search_string
"""
if not self.should_refresh_pid_cache(name):
return self.pid_cache[name]
ad_error_logger = self.log.debug
if not ignore_ad:
ad_error_logger = self.log.error
refresh_ad_cache = self.should_refresh_ad_cache(name)
matching_pids = set()
for proc in psutil.process_iter():
# Skip access denied processes
if not refresh_ad_cache and proc.pid in self.ad_cache:
continue
found = False
for string in search_string:
try:
# FIXME 6.x: All has been deprecated from the doc, should be removed
if string == 'All':
found = True
if exact_match:
if proc.name() == string:
found = True
else:
cmdline = proc.cmdline()
if string in ' '.join(cmdline):
found = True
except psutil.NoSuchProcess:
self.log.warning('Process disappeared while scanning')
except psutil.AccessDenied, e:
ad_error_logger('Access denied to process with PID %s', proc.pid)
ad_error_logger('Error: %s', e)
if refresh_ad_cache:
self.ad_cache.add(proc.pid)
if not ignore_ad:
raise
else:
if refresh_ad_cache:
self.ad_cache.discard(proc.pid)
if found:
matching_pids.add(proc.pid)
break
self.pid_cache[name] = matching_pids
self.last_pid_cache_ts[name] = time.time()
if refresh_ad_cache:
self.last_ad_cache_ts[name] = time.time()
return matching_pids
def psutil_wrapper(self, process, method, accessors, *args, **kwargs):
"""
A psutil wrapper that is calling
* psutil.method(*args, **kwargs) and returns the result
OR
* psutil.method(*args, **kwargs).accessor[i] for each accessors given in
a list, the result being indexed in a dictionary by the accessor name
"""
if accessors is None:
result = None
else:
result = {}
# Ban certain method that we know fail
if method == 'memory_info_ex'\
and (Platform.is_win32() or Platform.is_solaris()):
return result
elif method == 'num_fds' and not Platform.is_unix():
return result
try:
res = getattr(process, method)(*args, **kwargs)
if accessors is None:
result = res
else:
for acc in accessors:
try:
result[acc] = getattr(res, acc)
except AttributeError:
self.log.debug("psutil.%s().%s attribute does not exist", method, acc)
except (NotImplementedError, AttributeError):
self.log.debug("psutil method %s not implemented", method)
except psutil.AccessDenied:
self.log.debug("psutil was denied acccess for method %s", method)
except psutil.NoSuchProcess:
self.warning("Process {0} disappeared while scanning".format(process.pid))
return result
def get_process_state(self, name, pids):
st = defaultdict(list)
# Remove from cache the processes that are not in `pids`
cached_pids = set(self.process_cache[name].keys())
pids_to_remove = cached_pids - pids
for pid in pids_to_remove:
del self.process_cache[name][pid]
for pid in pids:
st['pids'].append(pid)
new_process = False
# If the pid's process is not cached, retrieve it
if pid not in self.process_cache[name] or not self.process_cache[name][pid].is_running():
new_process = True
try:
self.process_cache[name][pid] = psutil.Process(pid)
self.log.debug('New process in cache: %s' % pid)
# Skip processes dead in the meantime
except psutil.NoSuchProcess:
self.warning('Process %s disappeared while scanning' % pid)
# reset the PID cache now, something changed
self.last_pid_cache_ts[name] = 0
continue
p = self.process_cache[name][pid]
meminfo = self.psutil_wrapper(p, 'memory_info', ['rss', 'vms'])
st['rss'].append(meminfo.get('rss'))
st['vms'].append(meminfo.get('vms'))
# will fail on win32 and solaris
shared_mem = self.psutil_wrapper(p, 'memory_info_ex', ['shared']).get('shared')
if shared_mem is not None and meminfo.get('rss') is not None:
st['real'].append(meminfo['rss'] - shared_mem)
else:
st['real'].append(None)
ctxinfo = self.psutil_wrapper(p, 'num_ctx_switches', ['voluntary', 'involuntary'])
st['ctx_swtch_vol'].append(ctxinfo.get('voluntary'))
st['ctx_swtch_invol'].append(ctxinfo.get('involuntary'))
st['thr'].append(self.psutil_wrapper(p, 'num_threads', None))
cpu_percent = self.psutil_wrapper(p, 'cpu_percent', None)
if not new_process:
# psutil returns `0.` for `cpu_percent` the first time it's sampled on a process,
# so save the value only on non-new processes
st['cpu'].append(cpu_percent)
st['open_fd'].append(self.psutil_wrapper(p, 'num_fds', None))
ioinfo = self.psutil_wrapper(p, 'io_counters', ['read_count', 'write_count', 'read_bytes', 'write_bytes'])
st['r_count'].append(ioinfo.get('read_count'))
st['w_count'].append(ioinfo.get('write_count'))
st['r_bytes'].append(ioinfo.get('read_bytes'))
st['w_bytes'].append(ioinfo.get('write_bytes'))
return st
def check(self, instance):
name = instance.get('name', None)
tags = instance.get('tags', [])
exact_match = _is_affirmative(instance.get('exact_match', True))
search_string = instance.get('search_string', None)
ignore_ad = _is_affirmative(instance.get('ignore_denied_access', True))
if not isinstance(search_string, list):
raise KeyError('"search_string" parameter should be a list')
# FIXME 6.x remove me
if "All" in search_string:
self.warning('Deprecated: Having "All" in your search_string will'
'greatly reduce the performance of the check and '
'will be removed in a future version of the agent.')
if name is None:
raise KeyError('The "name" of process groups is mandatory')
if search_string is None:
raise KeyError('The "search_string" is mandatory')
pids = self.find_pids(
name,
search_string,
exact_match,
ignore_ad=ignore_ad
)
proc_state = self.get_process_state(name, pids)
# FIXME 6.x remove the `name` tag
tags.extend(['process_name:%s' % name, name])
self.log.debug('ProcessCheck: process %s analysed', name)
self.gauge('system.processes.number', len(pids), tags=tags)
for attr, mname in ATTR_TO_METRIC.iteritems():
vals = [x for x in proc_state[attr] if x is not None]
# skip []
if vals:
# FIXME 6.x: change this prefix?
self.gauge('system.processes.%s' % mname, sum(vals), tags=tags)
self._process_service_check(name, len(pids), instance.get('thresholds', None))
def _process_service_check(self, name, nb_procs, bounds):
'''
Report a service check, for each process in search_string.
Report as OK if the process is in the warning thresholds
CRITICAL out of the critical thresholds
WARNING out of the warning thresholds
'''
tag = ["process:%s" % name]
status = AgentCheck.OK
message_str = "PROCS %s: %s processes found for %s"
status_str = {
AgentCheck.OK: "OK",
AgentCheck.WARNING: "WARNING",
AgentCheck.CRITICAL: "CRITICAL"
}
if not bounds and nb_procs < 1:
status = AgentCheck.CRITICAL
elif bounds:
warning = bounds.get('warning', [1, float('inf')])
critical = bounds.get('critical', [1, float('inf')])
if warning[1] < nb_procs or nb_procs < warning[0]:
status = AgentCheck.WARNING
if critical[1] < nb_procs or nb_procs < critical[0]:
status = AgentCheck.CRITICAL
self.service_check(
"process.up",
status,
tags=tag,
message=message_str % (status_str[status], nb_procs, name)
)
| mertaytore/koding | deployment/datadog/checks.d/process.py | Python | agpl-3.0 | 12,203 |
from kokki import Package
Package("php5-mysql")
| samuel/kokki | kokki/cookbooks/mysql/recipes/php5.py | Python | bsd-3-clause | 50 |
#!/usr/bin/env python
# built-ins
import sys
import os
import ConfigParser
from xml.dom import minidom
import json
import random
import time
import datetime
import re
import traceback
import pickle
import sqlite3
import tempfile
import urllib
# site-packages
import requests
import prettytable
import inflection
# global constants
BASE_PATH = os.path.dirname(os.path.abspath( __file__ ))
CONFIG_PATH = os.path.join("config", "config.ini")
CREDENTIALS_PATH = os.path.join("config", "credentials.ini")
TWITTERGC_PATH = os.path.join("config", "twitter_global_config.json")
PERSIST_PATH = os.path.join("data", "persistence.sqlite3")
TWITTER_RESOURCES = "statuses,help,application,account,trends"
INNOCENCE_PATH = "http://bot-innocence.herokuapp.com/muted"
# local
sys.path.insert(0, os.path.join(BASE_PATH, "lib"))
import twitter
import titlecase
# package
import SimulatorGeneratorImage
# globals
config = None
creds = None
twitterApi = None
twitterGlobalConfig = None
persistenceConnection = None
persistence = None
def setup():
global creds
global config
global twitterApi
global twitterGlobalConfig
global persistenceConnection
global persistence
os.chdir(BASE_PATH)
random.seed()
for crucial in (CONFIG_PATH, CREDENTIALS_PATH):
if not os.path.exists(crucial):
sys.stderr.write("Couldn't load %s; exiting.\n" % (crucial))
config = ConfigParser.ConfigParser()
creds = ConfigParser.ConfigParser()
config.read(CONFIG_PATH)
creds.read(CREDENTIALS_PATH)
creatingDB = not os.path.exists(PERSIST_PATH)
persistenceConnection = sqlite3.connect(PERSIST_PATH)
persistence = persistenceConnection.cursor()
if (creatingDB):
persistence.execute("CREATE TABLE rateLimits (resource text unique, reset int, max int, remaining int)")
persistence.execute("CREATE TABLE intPrefs (name text unique, value int)")
persistence.execute("CREATE TABLE queuedRequests (tweet int unique, job text, user text)")
persistence.execute("CREATE TABLE failedRequests (tweet int unique, job text, user text)")
persistenceConnection.commit()
if (config.getint("services", "twitter_live") == 1):
consumer_key = creds.get("twitter", "consumer_key")
consumer_secret = creds.get("twitter", "consumer_secret")
access_token = creds.get("twitter", "access_token")
access_token_secret = creds.get("twitter", "access_token_secret")
twitterApi = twitter.Api(consumer_key, consumer_secret, access_token, access_token_secret)
# global config data
oneDayAgo = time.time() - 60*60*24
if (not os.path.exists(TWITTERGC_PATH) or os.path.getmtime(TWITTERGC_PATH) < oneDayAgo):
print("Getting configuration data from Twitter.")
# not checking twitter rate limits here since it will only get called once per day
twitterGlobalConfig = twitterApi.GetHelpConfiguration()
with open(TWITTERGC_PATH, "w") as tgcFile:
json.dump(twitterGlobalConfig, tgcFile)
else:
with open(TWITTERGC_PATH, "r") as tgcFile:
twitterGlobalConfig = json.load(tgcFile)
# install the rate limits
if creatingDB:
getTwitterRateLimits()
else:
# cached values will do in a pinch
with open(TWITTERGC_PATH, "r") as tgcFile:
twitterGlobalConfig = json.load(tgcFile)
def getTwitterRateLimits():
persistence.execute("SELECT resource FROM rateLimits")
existingResources = map(lambda x: x[0], persistence.fetchall())
# not checking twitter rate limits here since it will only gets called when
# one of the limits is ready to reset
limits = twitterApi.GetRateLimitStatus(TWITTER_RESOURCES)
for resourceGroup, resources in limits["resources"].items():
for resource, rateValues in resources.items():
if resource not in existingResources:
persistence.execute(
"INSERT INTO rateLimits VALUES (?, ?, ?, ?)",
[
resource,
int(rateValues["reset"]),
int(rateValues["limit"]),
int(rateValues["remaining"]),
]
)
else:
persistence.execute(
"UPDATE rateLimits SET reset=?, max=?, remaining=? WHERE resource=?",
[
int(rateValues["reset"]),
int(rateValues["limit"]),
int(rateValues["remaining"]),
resource,
]
)
persistenceConnection.commit()
defaults = [
("tweetHourlyReset", int(time.time()) + 60*60),
("tweetDailyReset", int(time.time()) + 60*60*24),
("tweetHourlyRemaining", config.getint("services", "twitter_hourly_limit")),
("tweetDailyRemaining", config.getint("services", "twitter_daily_limit")),
]
for default in defaults:
if getIntPref(default[0]) == -1:
setIntPref(default[0], default[1])
def getIntPref(name):
persistence.execute("SELECT value FROM intPrefs WHERE name=?", [name])
pref = persistence.fetchone()
if (pref == None):
return -1
return pref[0]
def setIntPref(name, value):
if getIntPref(name) == -1:
persistence.execute("INSERT INTO intPrefs VALUES (?, ?)", [name, value])
else:
persistence.execute("UPDATE intPrefs SET value=? WHERE name=?", [value, name])
persistenceConnection.commit()
def checkTwitterPostLimit():
currentTime = int(time.time())
hourlyReset = getIntPref("tweetHourlyReset")
if currentTime - hourlyReset > 0:
setIntPref("tweetHourlyReset", currentTime + 60*60)
setIntPref("tweetHourlyRemaining", config.getint("services", "twitter_hourly_limit"))
hourly = config.getint("services", "twitter_hourly_limit")
dailyReset = getIntPref("tweetDailyReset")
if currentTime - dailyReset > 0:
setIntPref("tweetDailyReset", currentTime + 60*60*24)
setIntPref("tweetDailyRemaining", config.getint("services", "twitter_daily_limit"))
hourly = getIntPref("tweetHourlyRemaining")
daily = getIntPref("tweetDailyRemaining")
if hourly > 0 and daily > 0:
return True
else:
return False
def useTweet():
for resource in ["tweetDailyRemaining", "tweetHourlyRemaining"]:
setIntPref(resource, getIntPref(resource) - 1)
def checkTwitterResource(resourceKey, proposedUsage=1):
persistence.execute("SELECT * FROM rateLimits WHERE resource=?", [resourceKey])
resourceData = persistence.fetchone()
if resourceData == None or int(time.time()) - resourceData[1] > 0:
getTwitterRateLimits()
persistence.execute("SELECT * FROM rateLimits WHERE resource=?", [resourceKey])
resourceData = persistence.fetchone()
if (resourceData == None):
sys.stderr.write("Invalid Twitter resource: %s\n" % (resourceKey))
return False
if (resourceData[3] - proposedUsage > 0):
return True
else:
return False
def useTwitterResource(resourceKey, usage=1):
persistence.execute("SELECT * FROM rateLimits WHERE resource=?", [resourceKey])
resourceData = persistence.fetchone()
if (resourceData == None):
sys.stderr.write("Invalid Twitter resource: %s\n" % (resourceKey))
return
newVal = resourceData[3] - usage
persistence.execute("UPDATE rateLimits SET reset=?, max=?, remaining=? WHERE resource=?",
[
resourceData[1],
resourceData[2],
newVal,
resourceKey,
]
)
persistenceConnection.commit()
return newVal
def getTrends():
location = config.getint("services", "twitter_trends_woeid")
trends = []
if (checkTwitterResource("/trends/place")):
trendsRaw = twitterApi.GetTrendsWoeid(location)
useTwitterResource("/trends/place")
for t in trendsRaw:
trends.append(t.name)
response = urllib.urlopen(INNOCENCE_PATH)
if response.getcode() == 200:
mutedTopics = json.loads(response.read())
trends = filter(lambda x: x not in mutedTopics, trends)
return trends
def shutdown():
if (persistence != None):
persistence.close()
def getRandomCBJobTitle():
# TODO: store off the category, don't repeat job titles, rotate categories
# http://api.careerbuilder.com/CategoryCodes.aspx
cb_apiKey = creds.get("careerbuilder", "apikey")
js_params = {
"DeveloperKey" : cb_apiKey,
"HostSite" : "US",
"OrderBy" : "Date",
}
cb_URL = "http://api.careerbuilder.com/v1/jobsearch?"
if (config.getint("services", "careerbuilder_live") == 1):
response = requests.get(cb_URL, params=js_params)
dom = minidom.parseString(response.content)
else:
with open(os.path.join("offline-samples", "careerbuilder-jobsearch.xml")) as jobFile:
dom = minidom.parse(jobFile)
jobs = []
for node in dom.getElementsByTagName("JobTitle"):
jobs.append(node.firstChild.nodeValue)
# NOTE: in the year 10,000 AD, this will need to be updated
maxLength = twitter.CHARACTER_LIMIT - (len(" Simulator ") + 4 + twitterGlobalConfig["characters_reserved_per_media"])
job = ""
count = 0
while (len(job) == 0 or len(job) > maxLength):
if (count >= 25):
# buncha really long job titles up in here
job = job[:maxLength-1] # (not great, but there are worse things)
break
job = random.choice(jobs)
count += 1
job = job.replace("'", "\\'").replace('"', '\\"')
print("%i iteration(s) found random job title: %s" % (count, job))
return job
def getRandomBLSJobTitle():
with open(os.path.join("data", "bls", "bls_normalized.txt")) as jobList:
jobs = map(str.rstrip, jobList.readlines())
job = ""
# NOTE: in the year 10,000 AD, this will need to be updated
maxLength = twitter.CHARACTER_LIMIT - (len(" Simulator ") + 4 + twitterGlobalConfig["characters_reserved_per_media"])
while (len(job) == 0 or len(job) > maxLength):
job = random.choice(jobs)
job = job.replace("'", "\\'").replace('"', '\\"')
return job
def tweet(job, year, artFile, respondingTo=None):
timestamp = datetime.datetime.now().strftime("%Y-%m-%d-%H%M.%f")
title = "%s Simulator %i" % (job, year)
userName = None
requestId = None
if (respondingTo != None):
userName = respondingTo[0]
requestId = respondingTo[1]
if not os.path.exists("archive"):
os.makedirs("archive")
if (artFile != None and os.path.exists(artFile)):
if (userName != None):
title = "@%s %s" % (userName, title)
posting = True
if config.getint("services", "twitter_live") == 0:
print("Twitter support is disabled; not posting.")
posting = False
elif config.getint("services", "actively_tweet") == 0:
print("Tweeting is turned off; not posting.")
posting = False
elif not checkTwitterPostLimit():
print("Over rate limit; not posting.")
posting = False
if posting:
useTweet()
print("Tweeting '%s' to Twitter with image: %s" % (title.encode("utf8"), artFile))
twitterApi.PostMedia(title, artFile, in_reply_to_status_id=requestId)
else:
print("Would have posted '%s' to Twitter with image: %s" % (title.encode("utf8"), artFile))
os.rename(artFile, os.path.join("archive", "output-%s.png" % timestamp))
with open(os.path.join("archive", "text-%s.txt" % timestamp), "w") as archFile:
archFile.write(title.encode('utf8'))
else:
# don't tweet; something's wrong.
sys.stderr.write("FAILURE: %s\n" % title.encode("utf8"))
with open(os.path.join("archive", "failed-%s.txt" % timestamp), "w") as archFile:
archFile.write(title.encode('utf8'))
def manualJobTweet(job, year=None):
image = SimulatorGeneratorImage.getImageFor(
job,
safeSearchLevel=config.get("services", "google_safesearch"),
referer="http://twitter.com/SimGenerator"
)
if (year == None):
year = random.randint(config.getint("settings", "minyear"), datetime.date.today().year)
artFile = "output-%s.png" % datetime.datetime.now().strftime("%Y-%m-%d-%H%M.%f")
artFile = os.path.join(tempfile.gettempdir(), artFile)
SimulatorGeneratorImage.createBoxArt(
job,
year,
image,
artFile,
maxSize=(
str(twitterGlobalConfig["photo_sizes"]["large"]["w"] - 1),
str(twitterGlobalConfig["photo_sizes"]["large"]["h"] - 1),
),
deleteInputFile=True
)
tweet(titlecase.titlecase(job), year, artFile)
def randomJobTweet(source="BLS"):
if source == "BLS":
job = getRandomBLSJobTitle()
elif source == "CB":
job = getRandomCBJobTitle()
image = SimulatorGeneratorImage.getImageFor(
job,
safeSearchLevel=config.get("services", "google_safesearch"),
referer="http://twitter.com/SimGenerator"
)
year = random.randint(config.getint("settings", "minyear"), datetime.date.today().year)
artFile = "output-%s.png" % datetime.datetime.now().strftime("%Y-%m-%d-%H%M.%f")
artFile = os.path.join(tempfile.gettempdir(), artFile)
SimulatorGeneratorImage.createBoxArt(
job,
year,
image,
artFile,
maxSize=(
str(twitterGlobalConfig["photo_sizes"]["large"]["w"] - 1),
str(twitterGlobalConfig["photo_sizes"]["large"]["h"] - 1),
),
deleteInputFile=True
)
tweet(titlecase.titlecase(job), year, artFile)
def queueMentions():
if (config.getint("settings", "taking_requests") == 0):
print("Not taking requests.")
else:
filterPath = os.path.join("data", "filters")
badWords = []
for filterFile in os.listdir(filterPath):
if filterFile[0] == ".":
continue
fp = os.path.join(filterPath, filterFile)
with open(fp, "r") as f:
loaded = json.load(f)
badWords += loaded['badwords']
print badWords
requestRegex = re.compile('make one about ([^,\.\n@]*)', re.IGNORECASE)
lastReply = getIntPref("lastReply")
if (lastReply == -1):
lastReply = 0
if (config.getint("services", "twitter_live") == 1):
if checkTwitterResource("/statuses/mentions_timeline"):
mentions = twitterApi.GetMentions(count=100, since_id=lastReply)
useTwitterResource("/statuses/mentions_timeline")
else:
print("Hit the mentions rate limit. Empty mentions.")
mentions = []
else:
with open(os.path.join("offline-samples", "twitter-mentions.pickle"), "rb") as mentionArchive:
mentions = pickle.load(mentionArchive)
print("Processing %i mentions..." % (len(mentions)))
mentions.reverse() # look at the newest one last and hold our place there
for status in mentions:
result = requestRegex.search(status.text)
if (result):
job = result.groups()[0]
# because regex is annoying
if (job.lower().startswith("a ")):
job = job[2:]
elif (job.lower().startswith("an ")):
job = job[3:]
job = titlecase.titlecase(job)
earlyOut = False
jobCheck = job.lower()
# don't accept links
# (fine with it, but Twitter's aggressive URL parsing means it's
# unexpected behavior in many instances)
if "http://" in jobCheck:
earlyOut = True
# check for derogatory speech, shock sites, etc.
for phrase in badWords:
if phrase in jobCheck:
earlyOut = True
break
# see if we'll even be able to post back at them
if len("@%s %s Simulator %i" %
(status.user.screen_name,
job,
datetime.date.today().year)
) + twitterGlobalConfig["characters_reserved_per_media"] > twitter.CHARACTER_LIMIT:
earlyOut = True
# don't let people crowd the queue
persistence.execute("SELECT user FROM queuedRequests")
existingUsers = map(lambda x: x[0], persistence.fetchall())
if status.user.screen_name in existingUsers:
earlyOut = True
if earlyOut:
# TODO: consider tweeting back "no" at them?
continue
# put them in the queue
try:
persistence.execute("INSERT INTO queuedRequests VALUES (?, ?, ?)", [status.id, job, status.user.screen_name])
persistenceConnection.commit()
except sqlite3.IntegrityError:
# already queued this tweet
pass
# even if we don't store it off, still mark the place here
setIntPref("lastReply", status.id)
def fulfillQueue():
# cycle through the queue
if (config.getint("settings", "making_requests") == 0):
print("Not automatically fulfilling requests.")
else:
print("Dequeueing %i request(s) from the backlog." % (config.getint("settings", "requests_per_run")))
persistence.execute("SELECT * FROM queuedRequests LIMIT ?", [config.getint("settings", "requests_per_run")])
artRequests = persistence.fetchall()
for req in artRequests:
takeSpecificRequest(data=req)
def printQueue():
persistence.execute("SELECT * FROM queuedRequests")
tab = prettytable.from_db_cursor(persistence)
print(tab)
def deleteRequest(tweetID=None):
if tweetID == None:
sys.stderr.write("No tweet ID provided. :-/\n")
return
persistence.execute("DELETE FROM queuedRequests WHERE tweet=?", [tweetID])
persistenceConnection.commit()
printQueue()
def takeSpecificRequest(tweetID=None, data=None):
if (tweetID != None and type(tweetID) == int):
persistence.execute("SELECT * FROM queuedRequests WHERE tweet=?", [tweetID])
req = persistence.fetchone()
if req == None:
print "Tweet not queued."
return
elif (data != None and type(data) == tuple and len(data) >= 3):
req = data
else:
print type(data)
sys.stderr.write("Need to pass either tweet ID or data to this function.\n")
return
tweetID = req[0]
job = req[1]
user = req[2]
try:
image = SimulatorGeneratorImage.getImageFor(
job,
safeSearchLevel=config.get("services", "google_safesearch"),
referer="http://twitter.com/SimGenerator"
)
year = random.randint(config.getint("settings", "minyear"), datetime.date.today().year)
artFile = "output-%s.png" % datetime.datetime.now().strftime("%Y-%m-%d-%H%M.%f")
artFile = os.path.join(tempfile.gettempdir(), artFile)
SimulatorGeneratorImage.createBoxArt(
job,
year,
image,
artFile,
maxSize=(
str(twitterGlobalConfig["photo_sizes"]["large"]["w"] - 1),
str(twitterGlobalConfig["photo_sizes"]["large"]["h"] - 1),
),
deleteInputFile=True
)
tweet( titlecase.titlecase(job), year, artFile, (user, str(tweetID)) )
except Exception, e:
sys.stderr.write("Couldn't respond to request: '%s' from %s in %i\n" %
(
job.encode("utf8"),
user.encode("utf8"),
tweetID
)
)
traceback.print_exc(file=sys.stderr)
persistence.execute("INSERT INTO failedRequests VALUES (?, ?, ?)",
[
tweetID, job, user
]
)
persistenceConnection.commit()
finally:
persistence.execute("DELETE FROM queuedRequests WHERE tweet=?", [tweetID])
persistenceConnection.commit()
printQueue()
def updateQueue():
# twitter documentation says this is rate-limited, doesn't appear
# to actually count against any resources. hmmmmm.
# resource should be "/account/update_profile", but that's not
# in the resource list at all.
persistence.execute("SELECT COUNT(*) FROM queuedRequests")
queueCount = persistence.fetchone()[0]
setIntPref("queueCount", queueCount)
print("Backlog is currently %i items." % (queueCount))
locString = "Request queue: %i" % queueCount
if len(locString) > 30:
locString = "Request queue: very, very long"
if queueCount == 0:
locString = "Request queue: EMPTY!"
twitterApi.UpdateProfile(location=locString)
def randomTrendTweet():
trends = getTrends()
if len(trends) == 0:
sys.stderr.write("Couldn't get any trending topics. :-/\n")
return
trend = random.choice(trends)
if trend[0] == "#":
text = trend[1:]
text = inflection.titleize(text)
text = titlecase.titlecase(text)
else:
text = trend
image = SimulatorGeneratorImage.getImageFor(
text,
safeSearchLevel=config.get("services", "google_safesearch"),
referer="http://twitter.com/SimGenerator"
)
year = random.randint(config.getint("settings", "minyear"), datetime.date.today().year)
artFile = "output-%s.png" % datetime.datetime.now().strftime("%Y-%m-%d-%H%M.%f")
artFile = os.path.join(tempfile.gettempdir(), artFile)
SimulatorGeneratorImage.createBoxArt(
text,
year,
image,
artFile,
maxSize=(
str(twitterGlobalConfig["photo_sizes"]["large"]["w"] - 1),
str(twitterGlobalConfig["photo_sizes"]["large"]["h"] - 1),
),
deleteInputFile=True
)
tweetString = text
if trend[0] == "#":
tweetString = trend + " " + tweetString
tweet(tweetString, year, artFile)
if __name__ == '__main__':
setup()
if (config.getint("settings", "faking_requests") == 1 or (len(sys.argv) > 1 and sys.argv[1] == "check")):
queueMentions()
elif (len(sys.argv) > 1 and sys.argv[1] == "fulfill"):
fulfillQueue()
elif (len(sys.argv) > 1 and sys.argv[1] == "updateQueue"):
updateQueue()
elif (len(sys.argv) > 2 and sys.argv[1] == "take"):
takeSpecificRequest(tweetID=int(sys.argv[2]))
elif (len(sys.argv) > 2 and sys.argv[1] == "del"):
deleteRequest(tweetID=int(sys.argv[2]))
elif (len(sys.argv) > 1 and sys.argv[1] == "pq"):
printQueue()
elif (len(sys.argv) > 1 and sys.argv[1] == "trend"):
randomTrendTweet()
elif (len(sys.argv) > 1 and sys.argv[1] == "cb"):
randomJobTweet(source="CB")
else:
randomJobTweet(source="BLS")
shutdown()
| sjml/SimulatorGenerator | SimulatorGeneratorTwitter.py | Python | mit | 23,502 |
from __future__ import division, print_function, absolute_import, unicode_literals
from calendar_cli.operation.operation import Operation
from calendar_cli.setting.arg_parser import parser
from mog_commons.io import print_safe
class HelpOperation(Operation):
def __init__(self, exception=None):
Operation.__init__(self, ('exception', exception))
def run(self):
parser.print_usage()
if self.exception:
print_safe('%s: %s' % (self.exception.__class__.__name__, self.exception))
| mogproject/calendar-cli | src/calendar_cli/operation/help_operation.py | Python | apache-2.0 | 524 |
from PyQt4 import QtCore, QtGui
from PyQt4.Qt import *
import ui_phylo_analysis_details_page
#import python_to_R
from ome_globals import *
RMA_MV_RANDOM_EFFECTS_METHODS_TO_PRETTY_STRS = {
"ML":"maximum-likelihood estimator",
"REML":"restricted maximum likelihood estimator",}
class PhyloAnalysisDetailsPage(QWizardPage, ui_phylo_analysis_details_page.Ui_WizardPage):
def __init__(self, model, default_method="ML",parent=None):
super(PhyloAnalysisDetailsPage, self).__init__(parent)
self.setupUi(self)
self.model=model
self.current_param_vals = {}
continuous_columns = self.model.get_continuous_columns()
# TODO: make a 'species' variable subtype that can be assigned to categorical columns
categorical_columns = self.model.get_categorical_columns()
effect_columns = [col for col in continuous_columns if self._col_assigned_to_effect_variable(col)]
variance_columns = [col for col in continuous_columns if self._col_assigned_to_variance_variable(col)]
# populate effect, variance, species combo boxes
self._populate_data_location_combo_box(self.effect_comboBox, effect_columns)
self._populate_data_location_combo_box(self.variance_comboBox, variance_columns)
self._populate_data_location_combo_box(self.species_comboBox, categorical_columns)
# populate random effects method combo box
self.add_random_effect_methods_to_combo_box(default_method)
self.species_comboBox.currentIndexChanged.connect(self.enable_species_checkbox)
self.completeChanged.emit()
def enable_species_checkbox(self):
# Make sure that species are NOT all unique
included_studies = self.wizard().get_included_studies_in_proper_order()
species_column = self.get_data_location()['species']
species_var = self.model.get_variable_assigned_to_column(species_column)
species_vals = [study.get_var(species_var) for study in included_studies]
enable = len(species_vals) > len(set(species_vals))
if enable:
self.include_species_checkBoxsetEnabled(True)
self.setToolTip("")
else:
self.include_species_checkBox.setChecked(False)
self.include_species_checkBox.setEnabled(False)
self.setToolTip("All the species are unique, cannot include species as a random factor")
def add_random_effect_methods_to_combo_box(self, default_method):
self.random_effects_method_ComboBox.clear()
for short_name, pretty_name in RMA_MV_RANDOM_EFFECTS_METHODS_TO_PRETTY_STRS.items():
self.random_effects_method_ComboBox.addItem(
pretty_name,
userData=QVariant(short_name))
if default_method:
idx = self.random_effects_method_ComboBox.findData(QVariant(default_method))
self.random_effects_method_ComboBox.setCurrentIndex(idx)
def _populate_data_location_combo_box(self, box, columns):
box.blockSignals(True)
box.clear()
key_fn = lambda col: self.model.get_variable_assigned_to_column(col).get_label() # sort by column label
for col in sorted(columns, key=key_fn):
var = self.model.get_variable_assigned_to_column(col)
box.addItem(var.get_label(), col) # store the chosen col
# if there is only one choice for in columns, select it
if box.count() > 0:
box.setCurrentIndex(0)
box.blockSignals(False)
def isComplete(self):
# effect size, variance, and species chosen
data_locations = self.get_data_location()
locations_not_none = [x is not None for x in data_locations.values()]
if all(locations_not_none):
return True
else:
return False
def _col_assigned_to_effect_variable(self, col):
var = self.model.get_variable_assigned_to_column(col)
if var.get_subtype() == TRANS_EFFECT:
return True
else:
return False
def _col_assigned_to_variance_variable(self, col):
var = self.model.get_variable_assigned_to_column(col)
if var.get_subtype() == TRANS_VAR:
return True
else:
return False
############# getters #############
def get_plot_params(self):
self.current_param_vals["fp_show_col1"] = self.show_1.isChecked()
self.current_param_vals["fp_col1_str"] = unicode(self.col1_str_edit.text().toUtf8(), "utf-8")
self.current_param_vals["fp_show_col2"] = self.show_2.isChecked()
self.current_param_vals["fp_col2_str"] = unicode(self.col2_str_edit.text().toUtf8(), "utf-8")
self.current_param_vals["fp_show_col3"] = self.show_3.isChecked()
self.current_param_vals["fp_col3_str"] = unicode(self.col3_str_edit.text().toUtf8(), "utf-8")
self.current_param_vals["fp_show_col4"] = self.show_4.isChecked()
self.current_param_vals["fp_col4_str"] = unicode(self.col4_str_edit.text().toUtf8(), "utf-8")
self.current_param_vals["fp_xlabel"] = unicode(self.x_lbl_le.text().toUtf8(), "utf-8")
self.current_param_vals["fp_outpath"] = unicode(self.image_path.text().toUtf8(), "utf-8")
plot_lb = unicode(self.plot_lb_le.text().toUtf8(), "utf-8")
self.current_param_vals["fp_plot_lb"] = "[default]"
if plot_lb != "[default]" and check_plot_bound(plot_lb):
self.current_param_vals["fp_plot_lb"] = plot_lb
plot_ub = unicode(self.plot_ub_le.text().toUtf8(), "utf-8")
self.current_param_vals["fp_plot_ub"] = "[default]"
if plot_ub != "[default]" and check_plot_bound(plot_ub):
self.current_param_vals["fp_plot_ub"] = plot_ub
xticks = unicode(self.x_ticks_le.text().toUtf8(), "utf-8")
self.current_param_vals["fp_xticks"] = "[default]"
if xticks != "[default]" and seems_sane(xticks):
self.current_param_vals["fp_xticks"] = xticks
self.current_param_vals["fp_show_summary_line"] = self.show_summary_line.isChecked()
return self.current_param_vals
def get_data_location(self):
locations = {'effect_size':self._selected_column(self.effect_comboBox),
'variance':self._selected_column(self.variance_comboBox),
'species':self._selected_column(self.species_comboBox)}
return locations
def _selected_column(self,combo_box):
current_index = combo_box.currentIndex()
if current_index < 0:
return None
item_data = combo_box.itemData(current_index)
selected_column = item_data.toInt()[0]
return selected_column
def get_random_effects_method(self):
current_index = self.random_effects_method_ComboBox.currentIndex()
current_data = self.random_effects_method_ComboBox.itemData(current_index)
method = str(current_data.toString())
return method
def get_conf_level(self):
return self.conf_level_spinbox.value()
def get_include_species_as_random_factor(self):
return self.include_species_checkBox.isChecked()
#######################
def _get_data_location_string(self, data_location):
''' helper for summary '''
get_column_name_for_key = lambda key: self.model.get_variable_assigned_to_column(data_location[key]).get_label()
get_substr_for_key = lambda key: " " + key.replace('_',' ') + ": " + get_column_name_for_key(key)
lines = []
lines.append(get_substr_for_key('effect_size'))
lines.append(get_substr_for_key('variance'))
lines.append(get_substr_for_key('species'))
data_location_str = "\n".join(lines)
return data_location_str
def __str__(self):
# data locations
data_locations_str = self._get_data_location_string(self.get_data_location())
data_locations_output = "Data Location:\n%s" % indent(data_locations_str)
# random_effects_method
random_method_str = "Random Effects Method: %s" % RMA_MV_RANDOM_EFFECTS_METHODS_TO_PRETTY_STRS[self.get_random_effects_method()]
# confidence level
conf_level_str = "Confidence Level: %s%%" % self.get_conf_level()
# Species as random factor
species_as_random_factor_str = "Species " + ("will" if self.get_include_species_as_random_factor() else "will not") + " be included as a random factor"
return "\n".join([data_locations_output, random_method_str, conf_level_str, species_as_random_factor_str]) | gdietz/OpenMEE | phylo/phylo_analysis_details_page.py | Python | gpl-3.0 | 8,970 |
from typeahead import app
if __name__ == "__main__":
app.run()
| mhanline/FlaskTypeahead | wsgi.py | Python | mit | 69 |
# -*- coding: utf-8 -*-
# Copyright(C) 2010-2011 Vincent Paredes
#
# This file is part of a weboob module.
#
# This weboob module is free software: you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This weboob module is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public License
# along with this weboob module. If not, see <http://www.gnu.org/licenses/>.
from __future__ import unicode_literals
import re
try:
from html.parser import HTMLParser
except ImportError:
import HTMLParser
from weboob.browser.pages import HTMLPage, LoggedPage, JsonPage
from weboob.capabilities.bill import Subscription
from weboob.browser.elements import DictElement, ListElement, ItemElement, method, TableElement
from weboob.browser.filters.standard import (
CleanDecimal, CleanText, Env, Field,
Regexp, Date, Currency, BrowserURL,
Format, Eval, Lower,
)
from weboob.browser.filters.html import Link, TableCell
from weboob.browser.filters.javascript import JSValue
from weboob.browser.filters.json import Dict
from weboob.capabilities.base import NotAvailable
from weboob.capabilities.bill import DocumentTypes, Bill
from weboob.tools.date import parse_french_date
from weboob.tools.compat import urlencode
class BillsApiProPage(LoggedPage, JsonPage):
@method
class get_bills(DictElement):
item_xpath = 'bills'
# orange's API will sometimes return the temporary bill for the current month along with other bills
# in the json. The url will lead to the exact same document, this is probably not intended behaviour and
# causes weboob to raise a DataError as they'll have identical ids.
ignore_duplicate = True
class item(ItemElement):
klass = Bill
obj_date = Date(Dict('dueDate'), parse_func=parse_french_date, default=NotAvailable)
obj_price = CleanDecimal(Dict('amountIncludingTax'))
obj_format = 'pdf'
def obj_label(self):
return 'Facture du %s' % Field('date')(self)
def obj_id(self):
return '%s_%s' % (Env('subid')(self), Field('date')(self).strftime('%d%m%Y'))
def get_params(self):
params = {'billid': Dict('id')(self), 'billDate': Dict('dueDate')(self)}
return urlencode(params)
obj_url = BrowserURL('doc_api_pro', subid=Env('subid'), dir=Dict('documents/0/mainDir'), fact_type=Dict('documents/0/subDir'), billparams=get_params)
obj__is_v2 = False
class BillsApiParPage(LoggedPage, JsonPage):
@method
class get_bills(DictElement):
item_xpath = 'billsHistory/billList'
class item(ItemElement):
klass = Bill
obj_date = Date(Dict('date'), default=NotAvailable)
obj_price = Eval(lambda x: x / 100, CleanDecimal(Dict('amount')))
obj_format = 'pdf'
def obj_label(self):
return 'Facture du %s' % Field('date')(self)
def obj_id(self):
return '%s_%s' % (Env('subid')(self), Field('date')(self).strftime('%d%m%Y'))
obj_url = Format('%s%s', BrowserURL('doc_api_par'), Dict('hrefPdf'))
obj__is_v2 = True
# is BillsPage deprecated ?
class BillsPage(LoggedPage, HTMLPage):
@method
class get_bills(TableElement):
item_xpath = '//table[has-class("table-hover")]/div/div/tr | //table[has-class("table-hover")]/div/tr'
head_xpath = '//table[has-class("table-hover")]/thead/tr/th'
col_date = 'Date'
col_amount = ['Montant TTC', 'Montant']
col_ht = 'Montant HT'
col_url = 'Télécharger'
col_infos = 'Infos paiement'
class item(ItemElement):
klass = Bill
obj_type = DocumentTypes.BILL
obj_format = "pdf"
# TableCell('date') can have other info like: 'duplicata'
obj_date = Date(CleanText('./td[@headers="ec-dateCol"]/text()[not(preceding-sibling::br)]'), parse_func=parse_french_date, dayfirst=True)
def obj__cell(self):
# sometimes the link to the bill is not in the right column (Thanks Orange!!)
if CleanText(TableCell('url')(self))(self):
return 'url'
return 'infos'
def obj_price(self):
if CleanText(TableCell('amount')(self))(self):
return CleanDecimal(Regexp(CleanText(TableCell('amount')), '.*?([\d,]+).*', default=NotAvailable), replace_dots=True, default=NotAvailable)(self)
else:
return Field('_ht')(self)
def obj_currency(self):
if CleanText(TableCell('amount')(self))(self):
return Currency(TableCell('amount')(self))(self)
else:
return Currency(TableCell('ht')(self))(self)
# Only when a list of documents is present
obj__url_base = Regexp(CleanText('.//ul[@class="liste"]/script', default=None), '.*?contentList[\d]+ \+= \'<li><a href=".*\"(.*?idDocument=2)"', default=None)
def obj_url(self):
if Field('_url_base')(self):
# URL won't work if HTML is not unescape
return HTMLParser().unescape(str(Field('_url_base')(self)))
return Link(TableCell(Field('_cell')(self))(self)[0].xpath('./a'), default=NotAvailable)(self)
obj__label_base = Regexp(CleanText('.//ul[@class="liste"]/script', default=None), '.*</span>(.*?)</a.*', default=None)
def obj_label(self):
if Field('_label_base')(self):
return HTMLParser().unescape(str(Field('_label_base')(self)))
else:
return CleanText(TableCell(Field('_cell')(self))(self)[0].xpath('.//span[@class="ec_visually_hidden"]'))(self)
obj__ht = CleanDecimal(TableCell('ht', default=NotAvailable), replace_dots=True, default=NotAvailable)
def obj_vat(self):
if Field('_ht')(self) is NotAvailable or Field('price')(self) is NotAvailable:
return
return Field('price')(self) - Field('_ht')(self)
def obj_id(self):
if Field('price')(self) is NotAvailable:
return '%s_%s%s' % (Env('subid')(self), Field('date')(self).strftime('%d%m%Y'), Field('_ht')(self))
else:
return '%s_%s%s' % (Env('subid')(self), Field('date')(self).strftime('%d%m%Y'), Field('price')(self))
class SubscriptionsPage(LoggedPage, HTMLPage):
def build_doc(self, data):
data = data.decode(self.encoding)
for line in data.split('\n'):
mtc = re.match('necFe.bandeau.container.innerHTML\s*=\s*stripslashes\((.*)\);$', line)
if mtc:
html = JSValue().filter(mtc.group(1)).encode(self.encoding)
return super(SubscriptionsPage, self).build_doc(html)
@method
class iter_subscription(ListElement):
item_xpath = '//ul[@id="contractContainer"]//a[starts-with(@id,"carrousel-")]'
class item(ItemElement):
klass = Subscription
obj_id = Regexp(Link('.'), r'\bidContrat=(\d+)', default='')
obj__page = Regexp(Link('.'), r'\bpage=([^&]+)', default='')
obj_label = CleanText('.')
obj__is_pro = False
def validate(self, obj):
# unsubscripted contracts may still be there, skip them else
# facture-historique could yield wrong bills
return bool(obj.id) and obj._page != 'nec-tdb-ouvert'
class SubscriptionsApiPage(LoggedPage, JsonPage):
@method
class iter_subscription(DictElement):
item_xpath = 'contracts'
class item(ItemElement):
klass = Subscription
def condition(self):
return Dict('contractStatus')(self) != 'CLOS'
obj_id = Dict('contractId')
obj_label = Dict('offerName')
obj__is_pro = False
class ContractsPage(LoggedPage, JsonPage):
@method
class iter_subscriptions(DictElement):
item_xpath = 'contracts'
class item(ItemElement):
klass = Subscription
obj_id = Dict('id')
obj_label = Format('%s %s', Dict('name'), Dict('mainLine'))
obj__from_api = False
def condition(self):
return Dict('status')(self) == 'OK'
def obj__is_pro(self):
return Dict('offerNature')(self) == 'PROFESSIONAL'
class ContractsApiPage(LoggedPage, JsonPage):
@method
class iter_subscriptions(DictElement):
item_xpath = 'contracts'
class item(ItemElement):
klass = Subscription
obj_id = CleanText(Dict('cid'))
obj_label = Dict('offerName')
def obj_subscriber(self):
names = (
CleanText(Dict('holder/firstName', default=""))(self),
CleanText(Dict('holder/lastName', default=""))(self),
)
assert any(names), "At least one name field should be populated. Has the page changed?"
return ' '.join([n for n in names if n])
def obj__is_pro(self):
return Dict('telco/marketType', default='PAR')(self) == 'PRO'
obj__from_api = True
def condition(self):
return Lower(Dict('status'))(self) == 'actif'
| laurentb/weboob | modules/orange/pages/bills.py | Python | lgpl-3.0 | 9,949 |
"""
Support for Denon Network Receivers.
For more details about this platform, please refer to the documentation at
https://home-assistant.io/components/media_player.denon/
"""
import logging
import telnetlib
from homeassistant.components.media_player import (
DOMAIN, SUPPORT_NEXT_TRACK, SUPPORT_PAUSE, SUPPORT_PREVIOUS_TRACK,
SUPPORT_TURN_OFF, SUPPORT_TURN_ON, SUPPORT_VOLUME_MUTE, SUPPORT_VOLUME_SET,
MediaPlayerDevice)
from homeassistant.const import CONF_HOST, STATE_OFF, STATE_ON, STATE_UNKNOWN
_LOGGER = logging.getLogger(__name__)
SUPPORT_DENON = SUPPORT_PAUSE | SUPPORT_VOLUME_SET | SUPPORT_VOLUME_MUTE | \
SUPPORT_PREVIOUS_TRACK | SUPPORT_NEXT_TRACK | \
SUPPORT_TURN_ON | SUPPORT_TURN_OFF
def setup_platform(hass, config, add_devices, discovery_info=None):
"""Setup the Denon platform."""
if not config.get(CONF_HOST):
_LOGGER.error(
"Missing required configuration items in %s: %s",
DOMAIN,
CONF_HOST)
return False
denon = DenonDevice(
config.get("name", "Music station"),
config.get("host")
)
if denon.update():
add_devices([denon])
return True
else:
return False
class DenonDevice(MediaPlayerDevice):
"""Representation of a Denon device."""
# pylint: disable=too-many-public-methods, abstract-method
def __init__(self, name, host):
"""Initialize the Denon device."""
self._name = name
self._host = host
self._pwstate = "PWSTANDBY"
self._volume = 0
self._muted = False
self._mediasource = ""
@classmethod
def telnet_request(cls, telnet, command):
"""Execute `command` and return the response."""
telnet.write(command.encode("ASCII") + b"\r")
return telnet.read_until(b"\r", timeout=0.2).decode("ASCII").strip()
def telnet_command(self, command):
"""Establish a telnet connection and sends `command`."""
telnet = telnetlib.Telnet(self._host)
telnet.write(command.encode("ASCII") + b"\r")
telnet.read_very_eager() # skip response
telnet.close()
def update(self):
"""Get the latest details from the device."""
try:
telnet = telnetlib.Telnet(self._host)
except ConnectionRefusedError:
return False
self._pwstate = self.telnet_request(telnet, "PW?")
# PW? sends also SISTATUS, which is not interesting
telnet.read_until(b"\r", timeout=0.2)
volume_str = self.telnet_request(telnet, "MV?")[len("MV"):]
self._volume = int(volume_str) / 60
self._muted = (self.telnet_request(telnet, "MU?") == "MUON")
self._mediasource = self.telnet_request(telnet, "SI?")[len("SI"):]
telnet.close()
return True
@property
def name(self):
"""Return the name of the device."""
return self._name
@property
def state(self):
"""Return the state of the device."""
if self._pwstate == "PWSTANDBY":
return STATE_OFF
if self._pwstate == "PWON":
return STATE_ON
return STATE_UNKNOWN
@property
def volume_level(self):
"""Volume level of the media player (0..1)."""
return self._volume
@property
def is_volume_muted(self):
"""Boolean if volume is currently muted."""
return self._muted
@property
def media_title(self):
"""Current media source."""
return self._mediasource
@property
def supported_media_commands(self):
"""Flag of media commands that are supported."""
return SUPPORT_DENON
def turn_off(self):
"""Turn off media player."""
self.telnet_command("PWSTANDBY")
def volume_up(self):
"""Volume up media player."""
self.telnet_command("MVUP")
def volume_down(self):
"""Volume down media player."""
self.telnet_command("MVDOWN")
def set_volume_level(self, volume):
"""Set volume level, range 0..1."""
# 60dB max
self.telnet_command("MV" + str(round(volume * 60)).zfill(2))
def mute_volume(self, mute):
"""Mute (true) or unmute (false) media player."""
self.telnet_command("MU" + ("ON" if mute else "OFF"))
def media_play(self):
"""Play media media player."""
self.telnet_command("NS9A")
def media_pause(self):
"""Pause media player."""
self.telnet_command("NS9B")
def media_next_track(self):
"""Send the next track command."""
self.telnet_command("NS9D")
def media_previous_track(self):
"""Send the previous track command."""
self.telnet_command("NS9E")
def turn_on(self):
"""Turn the media player on."""
self.telnet_command("PWON")
| mikaelboman/home-assistant | homeassistant/components/media_player/denon.py | Python | mit | 4,842 |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
from __future__ import unicode_literals
from flask import session
from indico.core import signals
from indico.core.config import config
from indico.core.logger import Logger
from indico.core.permissions import ManagementPermission, check_permissions
from indico.core.settings import SettingsProxy
from indico.core.settings.converters import ModelListConverter
from indico.modules.categories.models.categories import Category
from indico.modules.rb.models.rooms import Room
from indico.util.i18n import _
from indico.web.flask.util import url_for
from indico.web.menu import SideMenuItem, TopMenuItem
logger = Logger.get('rb')
rb_settings = SettingsProxy('roombooking', {
'managers_edit_rooms': False,
'excluded_categories': [],
'notification_before_days': 2,
'notification_before_days_weekly': 5,
'notification_before_days_monthly': 7,
'notifications_enabled': True,
'end_notification_daily': 1,
'end_notification_weekly': 3,
'end_notification_monthly': 7,
'end_notifications_enabled': True,
'booking_limit': 365,
'tileserver_url': None,
'grace_period': None,
}, acls={
'admin_principals',
'authorized_principals'
}, converters={
'excluded_categories': ModelListConverter(Category)
})
@signals.import_tasks.connect
def _import_tasks(sender, **kwargs):
import indico.modules.rb.tasks # noqa: F401
@signals.users.preferences.connect
def _get_extra_user_prefs(sender, **kwargs):
from indico.modules.rb.user_prefs import RBUserPreferences
if config.ENABLE_ROOMBOOKING:
return RBUserPreferences
@signals.menu.items.connect_via('admin-sidemenu')
def _extend_admin_menu(sender, **kwargs):
if config.ENABLE_ROOMBOOKING and session.user.is_admin:
url = url_for('rb.roombooking', path='admin')
return SideMenuItem('rb', _('Room Booking'), url, 70, icon='location')
@signals.menu.items.connect_via('top-menu')
def _topmenu_items(sender, **kwargs):
if config.ENABLE_ROOMBOOKING:
yield TopMenuItem('room_booking', _('Room booking'), url_for('rb.roombooking'), 80)
@signals.menu.items.connect_via('event-management-sidemenu')
def _sidemenu_items(sender, event, **kwargs):
if config.ENABLE_ROOMBOOKING and event.can_manage(session.user):
yield SideMenuItem('room_booking', _('Room Booking'), url_for('rb.event_booking_list', event), 50,
icon='location')
@signals.users.merged.connect
def _merge_users(target, source, **kwargs):
from indico.modules.rb.models.blocking_principals import BlockingPrincipal
from indico.modules.rb.models.blockings import Blocking
from indico.modules.rb.models.principals import RoomPrincipal
from indico.modules.rb.models.reservations import Reservation
Blocking.query.filter_by(created_by_id=source.id).update({Blocking.created_by_id: target.id})
BlockingPrincipal.merge_users(target, source, 'blocking')
Reservation.query.filter_by(created_by_id=source.id).update({Reservation.created_by_id: target.id})
Reservation.query.filter_by(booked_for_id=source.id).update({Reservation.booked_for_id: target.id})
Room.query.filter_by(owner_id=source.id).update({Room.owner_id: target.id})
RoomPrincipal.merge_users(target, source, 'room')
rb_settings.acls.merge_users(target, source)
@signals.event.deleted.connect
def _event_deleted(event, user, **kwargs):
from indico.modules.rb.models.reservations import Reservation
reservation_links = (event.all_room_reservation_links
.join(Reservation)
.filter(~Reservation.is_rejected, ~Reservation.is_cancelled)
.all())
for link in reservation_links:
link.reservation.cancel(user or session.user, 'Associated event was deleted')
class BookPermission(ManagementPermission):
name = 'book'
friendly_name = _('Book')
description = _('Allows booking the room')
user_selectable = True
color = 'green'
class PrebookPermission(ManagementPermission):
name = 'prebook'
friendly_name = _('Prebook')
description = _('Allows prebooking the room')
user_selectable = True
default = True
color = 'orange'
class OverridePermission(ManagementPermission):
name = 'override'
friendly_name = _('Override')
description = _('Allows overriding restrictions when booking the room')
user_selectable = True
color = 'pink'
class ModeratePermission(ManagementPermission):
name = 'moderate'
friendly_name = _('Moderate')
description = _('Allows moderating bookings (approving/rejecting/editing)')
user_selectable = True
color = 'purple'
@signals.acl.get_management_permissions.connect_via(Room)
def _get_management_permissions(sender, **kwargs):
yield BookPermission
yield PrebookPermission
yield OverridePermission
yield ModeratePermission
@signals.app_created.connect
def _check_permissions(app, **kwargs):
check_permissions(Room)
| mic4ael/indico | indico/modules/rb/__init__.py | Python | mit | 5,197 |
###########################################################
#
# Copyright (c) 2005, Southpaw Technology
# All Rights Reserved
#
# PROPRIETARY INFORMATION. This software is proprietary to
# Southpaw Technology, and is not to be reproduced, transmitted,
# or disclosed in any way without written permission.
#
#
#
import os,unittest
from pyasm.security import Batch
from pyasm.command import Command
from pyasm.prod.biz import Asset
from pyams.prod.maya import *
from maya_checkin import *
class MayaCheckinTest(unittest.TestCase):
def setUp(my):
batch = Batch()
def test_all(my):
# create a scene that will be checked in
asset_code = "prp101"
sid = "12345"
# create an asset
mel('sphere -n sphere1')
mel('circle -n circle1')
mel('group -n |%s |circle1 |sphere1' % asset_code )
# convert node into a maya asset
node = MayaNode("|%s" % asset_code )
asset_node = MayaAssetNode.add_sid( node, sid )
# checkin the asset
checkin = MayaAssetNodeCheckin(asset_node)
Command.execute_cmd(checkin)
# create a file from this node
asset_node.export()
if __name__ == '__main__':
unittest.main()
| sadanandb/pmt | src/pyasm/prod/checkin/maya_checkin_test.py | Python | epl-1.0 | 1,263 |
# This file is part of Indico.
# Copyright (C) 2002 - 2020 CERN
#
# Indico is free software; you can redistribute it and/or
# modify it under the terms of the MIT License; see the
# LICENSE file for more details.
import re
from datetime import datetime
import dateutil.parser
from lxml import etree
from pytz import timezone, utc
from indico.core.logger import Logger
from indico.util.string import to_unicode
from indico.web.http_api.metadata.serializer import Serializer
def _deserialize_date(date_dict):
dt = datetime.combine(dateutil.parser.parse(date_dict['date']).date(),
dateutil.parser.parse(date_dict['time']).time())
return timezone(date_dict['tz']).localize(dt).astimezone(utc)
class XMLSerializer(Serializer):
"""
Receives a fossil (or a collection of them) and converts them to XML
"""
_mime = 'text/xml'
def __init__(self, query_params, pretty=False, **kwargs):
self._typeMap = kwargs.pop('typeMap', {})
super(XMLSerializer, self).__init__(query_params, pretty, **kwargs)
def _convert(self, value, _control_char_re=re.compile(r'[\x00-\x08\x0b\x0c\x0e-\x1f]')):
if isinstance(value, datetime):
return value.isoformat()
elif isinstance(value, (int, long, float, bool)):
return str(value)
else:
value = to_unicode(value) if isinstance(value, str) else value
if isinstance(value, basestring):
# Get rid of control chars breaking XML conversion
value = _control_char_re.sub(u'', value)
return value
def _xmlForFossil(self, fossil, doc=None):
attribs = {}
id = None
if '_fossil' in fossil:
attribs['fossil'] = fossil['_fossil']
if 'id' in fossil:
id = attribs['id'] = str(fossil['id'])
if '_type' in fossil:
typeName = self._typeMap.get(fossil['_type'], fossil['_type'])
else:
typeName = 'collection'
felement = etree.Element(typeName.lower(),
attrib=attribs)
if doc:
doc.getroot().append(felement)
for k, v in fossil.iteritems():
if k in ['_fossil', '_type', 'id']:
continue
if isinstance(k, (int, float)) or (isinstance(k, basestring) and k.isdigit()):
elem = etree.SubElement(felement, 'entry', {'key': unicode(k)})
else:
elem = etree.SubElement(felement, k)
if isinstance(v, dict) and set(v.viewkeys()) == {'date', 'time', 'tz'}:
v = _deserialize_date(v)
if isinstance(v, (list, tuple)):
onlyDicts = all(isinstance(subv, dict) for subv in v)
if onlyDicts:
for subv in v:
elem.append(self._xmlForFossil(subv))
else:
for subv in v:
if isinstance(subv, dict):
elem.append(self._xmlForFossil(subv))
else:
subelem = etree.SubElement(elem, 'item')
subelem.text = self._convert(subv)
elif isinstance(v, dict):
elem.append(self._xmlForFossil(v))
else:
txt = self._convert(v)
try:
elem.text = txt
except Exception:
Logger.get('xmlSerializer').exception('Setting XML text value failed (id: %s, value %r)', id, txt)
return felement
def _execute(self, fossil, xml_declaration=True):
if isinstance(fossil, list):
# collection of fossils
doc = etree.ElementTree(etree.Element("collection"))
for elem in fossil:
self._xmlForFossil(elem, doc)
result = doc
else:
result = self._xmlForFossil(fossil)
return etree.tostring(result, pretty_print=self.pretty,
xml_declaration=xml_declaration, encoding='utf-8')
Serializer.register('xml', XMLSerializer)
| mic4ael/indico | indico/web/http_api/metadata/xml.py | Python | mit | 4,159 |
"""Tests for certbot.plugins.util."""
import os
import unittest
import sys
import mock
from six.moves import reload_module # pylint: disable=import-error
from certbot.tests import test_util
class PathSurgeryTest(unittest.TestCase):
"""Tests for certbot.plugins.path_surgery."""
@mock.patch("certbot.plugins.util.logger.warning")
@mock.patch("certbot.plugins.util.logger.debug")
def test_path_surgery(self, mock_debug, mock_warn):
from certbot.plugins.util import path_surgery
all_path = {"PATH": "/usr/local/bin:/bin/:/usr/sbin/:/usr/local/sbin/"}
with mock.patch.dict('os.environ', all_path):
with mock.patch('certbot.util.exe_exists') as mock_exists:
mock_exists.return_value = True
self.assertEqual(path_surgery("eg"), True)
self.assertEqual(mock_debug.call_count, 0)
self.assertEqual(mock_warn.call_count, 0)
self.assertEqual(os.environ["PATH"], all_path["PATH"])
no_path = {"PATH": "/tmp/"}
with mock.patch.dict('os.environ', no_path):
path_surgery("thingy")
self.assertEqual(mock_debug.call_count, 1)
self.assertEqual(mock_warn.call_count, 1)
self.assertTrue("Failed to find" in mock_warn.call_args[0][0])
self.assertTrue("/usr/local/bin" in os.environ["PATH"])
self.assertTrue("/tmp" in os.environ["PATH"])
class AlreadyListeningTestNoPsutil(unittest.TestCase):
"""Tests for certbot.plugins.already_listening when
psutil is not available"""
def setUp(self):
import certbot.plugins.util
# Ensure we get importerror
self.psutil = None
if "psutil" in sys.modules:
self.psutil = sys.modules['psutil']
sys.modules['psutil'] = None
# Reload hackery to ensure getting non-psutil version
# loaded to memory
reload_module(certbot.plugins.util)
def tearDown(self):
# Need to reload the module to ensure
# getting back to normal
import certbot.plugins.util
sys.modules["psutil"] = self.psutil
reload_module(certbot.plugins.util)
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_ports_available(self, mock_getutil):
import certbot.plugins.util as plugins_util
# Ensure we don't get error
with mock.patch("socket.socket.bind"):
self.assertFalse(plugins_util.already_listening(80))
self.assertFalse(plugins_util.already_listening(80, True))
self.assertEqual(mock_getutil.call_count, 0)
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_ports_blocked(self, mock_getutil):
sys.modules["psutil"] = None
import certbot.plugins.util as plugins_util
import socket
with mock.patch("socket.socket.bind", side_effect=socket.error):
self.assertTrue(plugins_util.already_listening(80))
self.assertTrue(plugins_util.already_listening(80, True))
with mock.patch("socket.socket", side_effect=socket.error):
self.assertFalse(plugins_util.already_listening(80))
self.assertEqual(mock_getutil.call_count, 2)
def psutil_available():
"""Checks if psutil can be imported.
:rtype: bool
:returns: ``True`` if psutil can be imported, otherwise, ``False``
"""
try:
import psutil # pylint: disable=unused-variable
except ImportError:
return False
return True
@test_util.skip_unless(psutil_available(),
"optional dependency psutil is not available")
class AlreadyListeningTestPsutil(unittest.TestCase):
"""Tests for certbot.plugins.already_listening."""
def _call(self, *args, **kwargs):
from certbot.plugins.util import already_listening
return already_listening(*args, **kwargs)
@mock.patch("certbot.plugins.util.psutil.net_connections")
@mock.patch("certbot.plugins.util.psutil.Process")
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_race_condition(self, mock_get_utility, mock_process, mock_net):
# This tests a race condition, or permission problem, or OS
# incompatibility in which, for some reason, no process name can be
# found to match the identified listening PID.
import psutil
from psutil._common import sconn
conns = [
sconn(fd=-1, family=2, type=1, laddr=("0.0.0.0", 30),
raddr=(), status="LISTEN", pid=None),
sconn(fd=3, family=2, type=1, laddr=("192.168.5.10", 32783),
raddr=("20.40.60.80", 22), status="ESTABLISHED", pid=1234),
sconn(fd=-1, family=10, type=1, laddr=("::1", 54321),
raddr=("::1", 111), status="CLOSE_WAIT", pid=None),
sconn(fd=3, family=2, type=1, laddr=("0.0.0.0", 17),
raddr=(), status="LISTEN", pid=4416)]
mock_net.return_value = conns
mock_process.side_effect = psutil.NoSuchProcess("No such PID")
# We simulate being unable to find the process name of PID 4416,
# which results in returning False.
self.assertFalse(self._call(17))
self.assertEqual(mock_get_utility.generic_notification.call_count, 0)
mock_process.assert_called_once_with(4416)
@mock.patch("certbot.plugins.util.psutil.net_connections")
@mock.patch("certbot.plugins.util.psutil.Process")
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_not_listening(self, mock_get_utility, mock_process, mock_net):
from psutil._common import sconn
conns = [
sconn(fd=-1, family=2, type=1, laddr=("0.0.0.0", 30),
raddr=(), status="LISTEN", pid=None),
sconn(fd=3, family=2, type=1, laddr=("192.168.5.10", 32783),
raddr=("20.40.60.80", 22), status="ESTABLISHED", pid=1234),
sconn(fd=-1, family=10, type=1, laddr=("::1", 54321),
raddr=("::1", 111), status="CLOSE_WAIT", pid=None)]
mock_net.return_value = conns
mock_process.name.return_value = "inetd"
self.assertFalse(self._call(17))
self.assertEqual(mock_get_utility.generic_notification.call_count, 0)
self.assertEqual(mock_process.call_count, 0)
@mock.patch("certbot.plugins.util.psutil.net_connections")
@mock.patch("certbot.plugins.util.psutil.Process")
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_listening_ipv4(self, mock_get_utility, mock_process, mock_net):
from psutil._common import sconn
conns = [
sconn(fd=-1, family=2, type=1, laddr=("0.0.0.0", 30),
raddr=(), status="LISTEN", pid=None),
sconn(fd=3, family=2, type=1, laddr=("192.168.5.10", 32783),
raddr=("20.40.60.80", 22), status="ESTABLISHED", pid=1234),
sconn(fd=-1, family=10, type=1, laddr=("::1", 54321),
raddr=("::1", 111), status="CLOSE_WAIT", pid=None),
sconn(fd=3, family=2, type=1, laddr=("0.0.0.0", 17),
raddr=(), status="LISTEN", pid=4416)]
mock_net.return_value = conns
mock_process.name.return_value = "inetd"
result = self._call(17, True)
self.assertTrue(result)
self.assertEqual(mock_get_utility.call_count, 1)
mock_process.assert_called_once_with(4416)
@mock.patch("certbot.plugins.util.psutil.net_connections")
@mock.patch("certbot.plugins.util.psutil.Process")
@mock.patch("certbot.plugins.util.zope.component.getUtility")
def test_listening_ipv6(self, mock_get_utility, mock_process, mock_net):
from psutil._common import sconn
conns = [
sconn(fd=-1, family=2, type=1, laddr=("0.0.0.0", 30),
raddr=(), status="LISTEN", pid=None),
sconn(fd=3, family=2, type=1, laddr=("192.168.5.10", 32783),
raddr=("20.40.60.80", 22), status="ESTABLISHED", pid=1234),
sconn(fd=-1, family=10, type=1, laddr=("::1", 54321),
raddr=("::1", 111), status="CLOSE_WAIT", pid=None),
sconn(fd=3, family=10, type=1, laddr=("::", 12345), raddr=(),
status="LISTEN", pid=4420),
sconn(fd=3, family=2, type=1, laddr=("0.0.0.0", 17),
raddr=(), status="LISTEN", pid=4416)]
mock_net.return_value = conns
mock_process.name.return_value = "inetd"
result = self._call(12345)
self.assertTrue(result)
self.assertEqual(mock_get_utility.call_count, 1)
mock_process.assert_called_once_with(4420)
@mock.patch("certbot.plugins.util.psutil.net_connections")
def test_access_denied_exception(self, mock_net):
import psutil
mock_net.side_effect = psutil.AccessDenied("")
self.assertFalse(self._call(12345))
if __name__ == "__main__":
unittest.main() # pragma: no cover
| jtl999/certbot | certbot/plugins/util_test.py | Python | apache-2.0 | 9,024 |
#!/usr/bin/env python
# -*- coding:utf-8 -*-
from celery import task
@task
def add(x,y):
print x+y
return x[0]+y[0]
| schedul-xor/celery-practice | tasks.py | Python | mit | 126 |
from . import db
class Credentials(db.Model):
"""username and password for system"""
__tablename__ = "credentials"
password = db.Column(db.String(20))
username = db.Column(db.String(20), primary_key=True) | RollingThunder6/MidnightInventers | Source Code/Flask/app/models.py | Python | gpl-3.0 | 209 |
from django.contrib import admin
from messaging.models import ServerMOTD, CharacterMessage, MessageRecipient, \
MessageRecipientGroup, MessageRelationship
@admin.register(ServerMOTD)
class MOTDAdmin(admin.ModelAdmin):
list_display = ('title', 'display_order', 'draft')
list_filter = ('draft', )
admin.site.register(CharacterMessage)
admin.site.register(MessageRecipient)
admin.site.register(MessageRecipientGroup)
admin.site.register(MessageRelationship)
| jardiacaj/finem_imperii | messaging/admin.py | Python | agpl-3.0 | 471 |
# coding: utf-8
__author__ = 'edubecks'
from pprint import pprint
# # oauth_access_token = facebook.get_app_access_token(config.DEV_FB_APP_ID, config.DEV_FB_APP_SECRET)
# oauth_access_token = config.OAUTH_TOKEN
# graph = facebook.GraphAPI(oauth_access_token)
# profile = graph.get_object('me')
# group = graph.get_object('641749869191341')
# pprint(group)
import facebook
import urllib
import urlparse
import subprocess
import warnings
# Keep Facebook settings like APP_ID
import configuration
# Hide deprecation warnings. The facebook module isn't that up-to-date (facebook.GraphAPIError).
warnings.filterwarnings('ignore', category=DeprecationWarning)
# Trying to get an access token. Very awkward.
oauth_args = dict(client_id=configuration.DEV_FB_APP_ID,
client_secret=configuration.DEV_FB_APP_SECRET,
grant_type='client_credentials')
oauth_curl_cmd = ['curl',
'https://graph.facebook.com/oauth/access_token?' + urllib.urlencode(oauth_args)]
oauth_response = subprocess.Popen(oauth_curl_cmd,
stdout=subprocess.PIPE,
stderr=subprocess.PIPE).communicate()[0]
try:
oauth_access_token = urlparse.parse_qs(str(oauth_response))['access_token'][0]
except KeyError:
print('Unable to grab an access token!')
exit()
# facebook_graph = facebook.GraphAPI(oauth_access_token)
facebook_graph = facebook.GraphAPI(configuration.LONG_LIVED_OAUTH_TOKEN)
# Try to post something on the wall.
try:
# friends = facebook_graph.get_connections('me', 'friends')
# pprint(friends)
group_feed = facebook_graph.get_connections('641749869191341', 'feed')
pprint(group_feed)
except facebook.GraphAPIError as e:
print 'Something went wrong:', e.type, e.message | edubecks/vaidecaronaorg | caronasbrasilapp/djangoapp/apps/caronasbrasil/model/test.py | Python | mit | 1,802 |
#!/usr/bin/python
"""
Read all the caveman/pindel files, collect gene names
and create a table with genes and mutation counts in
every sample (try to merge pindel and caveman results)
"""
from __future__ import print_function
import sys
import gzip
import re
genepatt = re.compile("VD=([^|]+)")
table = dict()
header = set()
for sample in sys.argv[1:]:
sampleid = sample.replace(".v1.caveman_c.annot.vcf.gz", "").replace(".v2.pindel.annot.vcf.gz", "")
header.add(sampleid)
for line in gzip.open(sample):
if line.startswith("#"):
continue
fields = line.rstrip().split()
if fields[6] != "PASS":
continue
match = genepatt.search(line)
if match:
genename = match.group(1)
if genename not in table:
table[genename] = dict()
if sampleid not in table[genename]:
table[genename][sampleid] = 0
table[genename][sampleid] += 1
header = list(header)
print( "\t".join(header))
for genename in sorted(table.keys()):
print(genename, end="")
for sample in header:
if sample in table[genename]:
print("\t" + str(table[genename][sample]), end="")
else:
print("\t0", end="")
print()
| TravisCG/SI_scripts | genefound.py | Python | gpl-3.0 | 1,134 |
__source__ = 'https://leetcode.com/problems/detect-capital/'
# Time: O()
# Space: O()
#
# Description: 520. Detect Capital
#
# Given a word, you need to judge whether the usage of capitals in it is right or not.
#
# We define the usage of capitals in a word to be right when one of the following cases holds:
#
# All letters in this word are capitals, like "USA".
# All letters in this word are not capitals, like "leetcode".
# Only the first letter in this word is capital if it has more than one letter, like "Google".
# Otherwise, we define that this word doesn't use capitals in a right way.
# Example 1:
# Input: "USA"
# Output: True
# Example 2:
# Input: "FlaG"
# Output: False
# Note: The input will be a non-empty word consisting of uppercase and lowercase latin letters.
#
# Hide Company Tags Google
# Hide Tags String
import unittest
# 24ms 100%
class Solution(object):
def detectCapitalUse(self, word):
"""
:type word: str
:rtype: bool
"""
return word.isupper() or word.islower() or word.istitle()
# your function here
class TestMethods(unittest.TestCase):
def test_Local(self):
self.assertEqual(1, 1)
if __name__ == '__main__':
unittest.main()
Java = '''
# Thought:
# 13ms 86.84%
class Solution {
public boolean detectCapitalUse(String word) {
int cnt = 0;
for(char c: word.toCharArray()) if('Z' - c >= 0) cnt++;
return ((cnt==0 || cnt==word.length()) || (cnt==1 && 'Z' - word.charAt(0)>=0));
}
}
# 21ms 41.31%
class Solution {
public boolean detectCapitalUse(String word) {
return word.equals(word.toUpperCase()) ||
word.equals(word.toLowerCase()) ||
Character.isUpperCase(word.charAt(0)) &&
word.substring(1).equals(word.substring(1).toLowerCase());
}
}
# 21ms 41.31%
class Solution {
public boolean detectCapitalUse(String word) {
return word.matches("[A-Z]+|[a-z]+|[A-Z][a-z]+");
}
}
''' | JulyKikuAkita/PythonPrac | cs15211/DetectCapital.py | Python | apache-2.0 | 1,983 |
# AFM font NewCenturySchlbk-Bold (path: /usr/share/fonts/afms/adobe/pncb8a.afm).
# Derived from Ghostscript distribution.
# Go to www.cs.wisc.edu/~ghost to get the Ghostcript source code.
from . import dir
dir.afm["NewCenturySchlbk-Bold"] = (
500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 500, 287, 296, 333, 574, 574, 833, 852, 241, 389, 389, 500, 606, 278, 333, 278, 278,
574, 574, 574, 574, 574, 574, 574, 574, 574, 574, 278, 278, 606, 606, 606, 500, 747, 759, 778, 778, 833, 759, 722, 833,
870, 444, 648, 815, 722, 981, 833, 833, 759, 833, 815, 667, 722, 833, 759, 981, 722, 722, 667, 389, 606, 389, 606, 500,
241, 611, 648, 556, 667, 574, 389, 611, 685, 370, 352, 667, 352, 963, 685, 611, 667, 648, 519, 500, 426, 685, 611, 889,
611, 611, 537, 389, 606, 389, 606, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 296, 574, 574, 167, 574, 574, 500,
574, 241, 481, 500, 333, 333, 685, 685, 500, 500, 500, 500, 278, 500, 747, 606, 241, 481, 481, 500, 1000, 1000, 500,
500, 500, 333, 333, 333, 333, 333, 333, 333, 333, 500, 333, 333, 500, 333, 333, 333, 1000, 500, 500, 500, 500, 500, 500,
500, 500, 500, 500, 500, 500, 500, 500, 500, 500, 981, 500, 367, 500, 500, 500, 500, 722, 833, 1000, 367, 500, 500, 500,
500, 500, 870, 500, 500, 500, 370, 500, 500, 352, 611, 907, 611,)
| ska-sa/purr | Purr/Plugins/local_pychart/afm/NewCenturySchlbk_Bold.py | Python | gpl-2.0 | 1,509 |
import os
import tempfile
import unittest
import logging
from pyidf import ValidationLevel
import pyidf
from pyidf.idf import IDF
from pyidf.thermal_zones_and_surfaces import Door
log = logging.getLogger(__name__)
class TestDoor(unittest.TestCase):
def setUp(self):
self.fd, self.path = tempfile.mkstemp()
def tearDown(self):
os.remove(self.path)
def test_create_door(self):
pyidf.validation_level = ValidationLevel.error
obj = Door()
# alpha
var_name = "Name"
obj.name = var_name
# object-list
var_construction_name = "object-list|Construction Name"
obj.construction_name = var_construction_name
# object-list
var_building_surface_name = "object-list|Building Surface Name"
obj.building_surface_name = var_building_surface_name
# real
var_multiplier = 1.0
obj.multiplier = var_multiplier
# real
var_starting_x_coordinate = 5.5
obj.starting_x_coordinate = var_starting_x_coordinate
# real
var_starting_z_coordinate = 6.6
obj.starting_z_coordinate = var_starting_z_coordinate
# real
var_length = 7.7
obj.length = var_length
# real
var_height = 8.8
obj.height = var_height
idf = IDF()
idf.add(obj)
idf.save(self.path, check=False)
with open(self.path, mode='r') as f:
for line in f:
log.debug(line.strip())
idf2 = IDF(self.path)
self.assertEqual(idf2.doors[0].name, var_name)
self.assertEqual(idf2.doors[0].construction_name, var_construction_name)
self.assertEqual(idf2.doors[0].building_surface_name, var_building_surface_name)
self.assertAlmostEqual(idf2.doors[0].multiplier, var_multiplier)
self.assertAlmostEqual(idf2.doors[0].starting_x_coordinate, var_starting_x_coordinate)
self.assertAlmostEqual(idf2.doors[0].starting_z_coordinate, var_starting_z_coordinate)
self.assertAlmostEqual(idf2.doors[0].length, var_length)
self.assertAlmostEqual(idf2.doors[0].height, var_height) | rbuffat/pyidf | tests/test_door.py | Python | apache-2.0 | 2,158 |
# Copyright (c) 2021 Red Hat Inc.
# All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from neutron_lib.api.definitions import l3 as l3_apidef
from neutron_lib.api.definitions import qos as qos_apidef
from neutron_lib.api.definitions import qos_fip as qos_fip_apidef
from neutron_lib.services.qos import constants as qos_consts
ALIAS = 'qos-fip-network-policy'
IS_SHIM_EXTENSION = False
IS_STANDARD_ATTR_EXTENSION = False
NAME = 'QoS floating IP network policy ID'
DESCRIPTION = 'Adds a the QoS network ID to the floating IP definition'
UPDATED_TIMESTAMP = '2021-11-15T10:00:00-00:00'
RESOURCE_ATTRIBUTE_MAP = {
l3_apidef.FLOATINGIPS: {
qos_consts.QOS_NETWORK_POLICY_ID: {
'allow_post': False,
'allow_put': False,
'is_visible': True,
'default': None
}
}
}
SUB_RESOURCE_ATTRIBUTE_MAP = {}
ACTION_MAP = {}
REQUIRED_EXTENSIONS = [l3_apidef.ALIAS, qos_apidef.ALIAS, qos_fip_apidef.ALIAS]
OPTIONAL_EXTENSIONS = []
ACTION_STATUS = {}
| openstack/neutron-lib | neutron_lib/api/definitions/qos_fip_network_policy.py | Python | apache-2.0 | 1,546 |
#
# Copyright (c) 2008--2012 Red Hat, Inc.
#
# This software is licensed to you under the GNU General Public License,
# version 2 (GPLv2). There is NO WARRANTY for this software, express or
# implied, including the implied warranties of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. You should have received a copy of GPLv2
# along with this software; if not, see
# http://www.gnu.org/licenses/old-licenses/gpl-2.0.txt.
#
# Red Hat trademarks are not licensed under GPLv2. No permission is
# granted to use or replicate Red Hat trademarks that are incorporated
# in this software or its documentation.
#
import sys
import time
from types import ListType, TupleType
from spacewalk.common import rhn_rpm, rhn_mpm
def main():
packages = sys.argv[1:]
if not packages:
return
for pkgfile in packages:
# Try to open the package as a patch first
try:
f = open(pkgfile)
header = rhn_rpm.get_package_header(file_obj=f)
p = rpm_to_mpm(header, f)
dest_filename = _compute_filename(p.header)
print "Writing out the package to %s" % dest_filename
dest_file = open(dest_filename, "w+")
p.write(dest_file)
dest_file.close()
f.close()
except:
raise
def _compute_filename(hdr):
return '%s-%s.%s.mpm' % (hdr['name'], hdr['version'], hdr['arch'])
def rpm_to_mpm(header, file_stream):
tag_map = {
'package_group' : 'group',
'rpm_version' : 'rpmversion',
'payload_size' : 'archivesize',
'payload_format': 'payloadformat',
'build_host' : 'buildhost',
'build_time' : 'buildtime',
'source_rpm' : 'sourcerpm',
}
tags = [
'name',
'epoch',
'version',
'release',
'arch',
'description',
'summary',
'license',
'package_group',
'rpm_version',
'payload_size',
'payload_format',
'build_host',
'build_time',
'cookie',
'vendor',
'source_rpm',
'sigmd5',
'sigpgp',
'siggpg',
'sigsize',
]
result = {}
for t in tags:
tt = tag_map.get(t, t)
result[t] = header[tt]
# Add files
result['files'] = _extract_files(header)
# Dependency
result['provides'] = _extract_rpm_requires(header)
result['requires'] = _extract_rpm_provides(header)
result['conflicts'] = _extract_rpm_conflicts(header)
result['obsoletes'] = _extract_rpm_obsoletes(header)
result['changelog'] = _extract_rpm_changelog(header)
# md5sum, package_size
file_stream.seek(0, 2)
file_size = file_stream.tell()
result['package_size'] = file_size
is_source = 0
if header.is_source:
is_source = 1
result['is_source'] = is_source
result['package_type'] = 'rpm'
h = rhn_mpm.MPM_Header(result)
p = rhn_mpm.MPM_Package()
p.header = h
p.payload_stream = file_stream
return p
def _extract_files(header):
tag_maps = {
'name' : 'filenames',
'device' : 'filedevices',
'inode' : 'fileinodes',
'file_mode' : 'filemodes',
'username' : 'fileusername',
'groupname' : 'filegroupname',
'rdev' : 'filerdevs',
'file_size' : 'filesizes',
'mtime' : 'filemtimes',
'md5' : 'filemd5s',
'linkto' : 'filelinktos',
'flags' : 'fileflags',
'verifyflags' : 'fileverifyflags',
'lang' : 'filelangs',
}
files = _extract_array_fields(header, tag_maps)
# Munge the mtime
for f in files:
f['mtime'] = gmtime(f['mtime'])
return files
def _extract_rpm_provides(header):
tag_maps = {
'name' : 'provides',
'version' : 'provideversion',
'flags' : 'provideflags',
}
return _extract_array_fields(header, tag_maps)
def _extract_rpm_requires(header):
tag_maps = {
'name' : 'requirename',
'version' : 'requireversion',
'flags' : 'requireflags',
}
return _extract_array_fields(header, tag_maps)
def _extract_rpm_conflicts(header):
tag_maps = {
'name' : 'conflictname',
'version' : 'conflictversion',
'flags' : 'conflictflags',
}
return _extract_array_fields(header, tag_maps)
def _extract_rpm_obsoletes(header):
tag_maps = {
'name' : 'obsoletename',
'version' : 'obsoleteversion',
'flags' : 'obsoleteflags',
}
return _extract_array_fields(header, tag_maps)
def _extract_rpm_changelog(header):
tag_maps = {
'name' : 'changelogname',
'text' : 'changelogtext',
'time' : 'changelogtime',
}
cl = _extract_array_fields(header, tag_maps)
# Munge the changelog time
for c in cl:
c['time'] = gmtime(c['time'])
return cl
def _extract_array_fields(header, tag_maps):
# First determine the number of entries
key = tag_maps.keys()[0]
rpmtag = tag_maps.get(key)
arr = header[rpmtag]
if arr is None:
# nothing to do
return []
count = len(arr)
result = []
for i in range(count):
tag_dict = {}
for key, rpmtag in tag_maps.items():
arr = header[rpmtag]
if type(arr) not in (ListType, TupleType):
arr = [arr]
tag_dict[key] = arr[i]
result.append(tag_dict)
return result
def gmtime(timestamp):
ttuple = time.gmtime(timestamp)
return "%d-%02d-%02d %02d:%02d:%02d" % ttuple[:6]
if __name__ == '__main__':
sys.exit(main() or 0)
| dmacvicar/spacewalk | client/tools/rhnpush/rpm2mpm.py | Python | gpl-2.0 | 5,730 |
import asyncore
import email.utils
import socket
import smtpd
import smtplib
import StringIO
import sys
import time
import select
import unittest
from test import test_support
try:
import threading
except ImportError:
threading = None
HOST = test_support.HOST
def server(evt, buf, serv):
serv.listen(5)
evt.set()
try:
conn, addr = serv.accept()
except socket.timeout:
pass
else:
n = 500
while buf and n > 0:
r, w, e = select.select([], [conn], [])
if w:
sent = conn.send(buf)
buf = buf[sent:]
n -= 1
conn.close()
finally:
serv.close()
evt.set()
@unittest.skipUnless(threading, 'Threading required for this test.')
class GeneralTests(unittest.TestCase):
def setUp(self):
self._threads = test_support.threading_setup()
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, "220 Hola mundo\n", self.sock)
self.thread = threading.Thread(target=server, args=servargs)
self.thread.start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
self.thread.join()
test_support.threading_cleanup(*self._threads)
def testBasic1(self):
# connects
smtp = smtplib.SMTP(HOST, self.port)
smtp.close()
def testBasic2(self):
# connects, include port in host name
smtp = smtplib.SMTP("%s:%s" % (HOST, self.port))
smtp.close()
def testLocalHostName(self):
# check that supplied local_hostname is used
smtp = smtplib.SMTP(HOST, self.port, local_hostname="testhost")
self.assertEqual(smtp.local_hostname, "testhost")
smtp.close()
def testTimeoutDefault(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
smtp = smtplib.SMTP(HOST, self.port)
finally:
socket.setdefaulttimeout(None)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
def testTimeoutNone(self):
self.assertIsNone(socket.getdefaulttimeout())
socket.setdefaulttimeout(30)
try:
smtp = smtplib.SMTP(HOST, self.port, timeout=None)
finally:
socket.setdefaulttimeout(None)
self.assertIsNone(smtp.sock.gettimeout())
smtp.close()
def testTimeoutValue(self):
smtp = smtplib.SMTP(HOST, self.port, timeout=30)
self.assertEqual(smtp.sock.gettimeout(), 30)
smtp.close()
# Test server thread using the specified SMTP server class
def debugging_server(serv, serv_evt, client_evt):
serv_evt.set()
try:
if hasattr(select, 'poll'):
poll_fun = asyncore.poll2
else:
poll_fun = asyncore.poll
n = 1000
while asyncore.socket_map and n > 0:
poll_fun(0.01, asyncore.socket_map)
# when the client conversation is finished, it will
# set client_evt, and it's then ok to kill the server
if client_evt.is_set():
serv.close()
break
n -= 1
except socket.timeout:
pass
finally:
if not client_evt.is_set():
# allow some time for the client to read the result
time.sleep(0.5)
serv.close()
asyncore.close_all()
serv_evt.set()
MSG_BEGIN = '---------- MESSAGE FOLLOWS ----------\n'
MSG_END = '------------ END MESSAGE ------------\n'
# NOTE: Some SMTP objects in the tests below are created with a non-default
# local_hostname argument to the constructor, since (on some systems) the FQDN
# lookup caused by the default local_hostname sometimes takes so long that the
# test server times out, causing the test to fail.
# Test behavior of smtpd.DebuggingServer
@unittest.skipUnless(threading, 'Threading required for this test.')
class DebuggingServerTests(unittest.TestCase):
def setUp(self):
# temporarily replace sys.stdout to capture DebuggingServer output
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self._threads = test_support.threading_setup()
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = smtpd.DebuggingServer((HOST, 0), ('nowhere', -1))
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
test_support.threading_cleanup(*self._threads)
# restore sys.stdout
sys.stdout = self.old_stdout
def testBasic(self):
# connect
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.quit()
def testNOOP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, 'Ok')
self.assertEqual(smtp.noop(), expected)
smtp.quit()
def testRSET(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (250, 'Ok')
self.assertEqual(smtp.rset(), expected)
smtp.quit()
def testNotImplemented(self):
# EHLO isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (502, 'Error: command "EHLO" not implemented')
self.assertEqual(smtp.ehlo(), expected)
smtp.quit()
def testVRFY(self):
# VRFY isn't implemented in DebuggingServer
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
expected = (502, 'Error: command "VRFY" not implemented')
self.assertEqual(smtp.vrfy('[email protected]'), expected)
self.assertEqual(smtp.verify('[email protected]'), expected)
smtp.quit()
def testSecondHELO(self):
# check that a second HELO returns a message that it's a duplicate
# (this behavior is specific to smtpd.SMTPChannel)
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.helo()
expected = (503, 'Duplicate HELO/EHLO')
self.assertEqual(smtp.helo(), expected)
smtp.quit()
def testHELP(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
self.assertEqual(smtp.help(), 'Error: command "HELP" not implemented')
smtp.quit()
def testSend(self):
# connect and send mail
m = 'A test message'
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=3)
smtp.sendmail('John', 'Sally', m)
# XXX(nnorwitz): this test is flaky and dies with a bad file descriptor
# in asyncore. This sleep might help, but should really be fixed
# properly by using an Event variable.
time.sleep(0.01)
smtp.quit()
self.client_evt.set()
self.serv_evt.wait()
self.output.flush()
mexpect = '%s%s\n%s' % (MSG_BEGIN, m, MSG_END)
self.assertEqual(self.output.getvalue(), mexpect)
class NonConnectingTests(unittest.TestCase):
def testNotConnected(self):
# Test various operations on an unconnected SMTP object that
# should raise exceptions (at present the attempt in SMTP.send
# to reference the nonexistent 'sock' attribute of the SMTP object
# causes an AttributeError)
smtp = smtplib.SMTP()
self.assertRaises(smtplib.SMTPServerDisconnected, smtp.ehlo)
self.assertRaises(smtplib.SMTPServerDisconnected,
smtp.send, 'test msg')
def testNonnumericPort(self):
# check that non-numeric port raises socket.error
self.assertRaises(socket.error, smtplib.SMTP,
"localhost", "bogus")
self.assertRaises(socket.error, smtplib.SMTP,
"localhost:bogus")
# test response of client to a non-successful HELO message
@unittest.skipUnless(threading, 'Threading required for this test.')
class BadHELOServerTests(unittest.TestCase):
def setUp(self):
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self._threads = test_support.threading_setup()
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, "199 no hello for you!\n", self.sock)
self.thread = threading.Thread(target=server, args=servargs)
self.thread.start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
self.thread.join()
test_support.threading_cleanup(*self._threads)
sys.stdout = self.old_stdout
def testFailingHELO(self):
self.assertRaises(smtplib.SMTPConnectError, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
@unittest.skipUnless(threading, 'Threading required for this test.')
class TooLongLineTests(unittest.TestCase):
respdata = '250 OK' + ('.' * smtplib._MAXLINE * 2) + '\n'
def setUp(self):
self.old_stdout = sys.stdout
self.output = StringIO.StringIO()
sys.stdout = self.output
self.evt = threading.Event()
self.sock = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
self.sock.settimeout(15)
self.port = test_support.bind_port(self.sock)
servargs = (self.evt, self.respdata, self.sock)
threading.Thread(target=server, args=servargs).start()
self.evt.wait()
self.evt.clear()
def tearDown(self):
self.evt.wait()
sys.stdout = self.old_stdout
def testLineTooLong(self):
self.assertRaises(smtplib.SMTPResponseException, smtplib.SMTP,
HOST, self.port, 'localhost', 3)
sim_users = {'[email protected]':'John A',
'[email protected]':'Sally B',
'[email protected]':'Ruth C',
}
sim_auth = ('[email protected]', 'somepassword')
sim_cram_md5_challenge = ('PENCeUxFREJoU0NnbmhNWitOMjNGNn'
'dAZWx3b29kLmlubm9zb2Z0LmNvbT4=')
sim_auth_credentials = {
'login': 'TXIuQUBzb21ld2hlcmUuY29t',
'plain': 'AE1yLkFAc29tZXdoZXJlLmNvbQBzb21lcGFzc3dvcmQ=',
'cram-md5': ('TXIUQUBZB21LD2HLCMUUY29TIDG4OWQ0MJ'
'KWZGQ4ODNMNDA4NTGXMDRLZWMYZJDMODG1'),
}
sim_auth_login_password = 'C29TZXBHC3N3B3JK'
sim_lists = {'list-1':['[email protected]','[email protected]'],
'list-2':['[email protected]',],
}
# Simulated SMTP channel & server
class SimSMTPChannel(smtpd.SMTPChannel):
def __init__(self, extra_features, *args, **kw):
self._extrafeatures = ''.join(
[ "250-{0}\r\n".format(x) for x in extra_features ])
smtpd.SMTPChannel.__init__(self, *args, **kw)
def smtp_EHLO(self, arg):
resp = ('250-testhost\r\n'
'250-EXPN\r\n'
'250-SIZE 20000000\r\n'
'250-STARTTLS\r\n'
'250-DELIVERBY\r\n')
resp = resp + self._extrafeatures + '250 HELP'
self.push(resp)
def smtp_VRFY(self, arg):
# For max compatibility smtplib should be sending the raw address.
if arg in sim_users:
self.push('250 %s %s' % (sim_users[arg], smtplib.quoteaddr(arg)))
else:
self.push('550 No such user: %s' % arg)
def smtp_EXPN(self, arg):
list_name = arg.lower()
if list_name in sim_lists:
user_list = sim_lists[list_name]
for n, user_email in enumerate(user_list):
quoted_addr = smtplib.quoteaddr(user_email)
if n < len(user_list) - 1:
self.push('250-%s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('250 %s %s' % (sim_users[user_email], quoted_addr))
else:
self.push('550 No access for you!')
def smtp_AUTH(self, arg):
if arg.strip().lower()=='cram-md5':
self.push('334 {0}'.format(sim_cram_md5_challenge))
return
mech, auth = arg.split()
mech = mech.lower()
if mech not in sim_auth_credentials:
self.push('504 auth type unimplemented')
return
if mech == 'plain' and auth==sim_auth_credentials['plain']:
self.push('235 plain auth ok')
elif mech=='login' and auth==sim_auth_credentials['login']:
self.push('334 Password:')
else:
self.push('550 No access for you!')
def handle_error(self):
raise
class SimSMTPServer(smtpd.SMTPServer):
def __init__(self, *args, **kw):
self._extra_features = []
smtpd.SMTPServer.__init__(self, *args, **kw)
def handle_accept(self):
conn, addr = self.accept()
self._SMTPchannel = SimSMTPChannel(self._extra_features,
self, conn, addr)
def process_message(self, peer, mailfrom, rcpttos, data):
pass
def add_feature(self, feature):
self._extra_features.append(feature)
def handle_error(self):
raise
# Test various SMTP & ESMTP commands/behaviors that require a simulated server
# (i.e., something with more features than DebuggingServer)
@unittest.skipUnless(threading, 'Threading required for this test.')
class SMTPSimTests(unittest.TestCase):
def setUp(self):
self._threads = test_support.threading_setup()
self.serv_evt = threading.Event()
self.client_evt = threading.Event()
# Pick a random unused port by passing 0 for the port number
self.serv = SimSMTPServer((HOST, 0), ('nowhere', -1))
# Keep a note of what port was assigned
self.port = self.serv.socket.getsockname()[1]
serv_args = (self.serv, self.serv_evt, self.client_evt)
self.thread = threading.Thread(target=debugging_server, args=serv_args)
self.thread.start()
# wait until server thread has assigned a port number
self.serv_evt.wait()
self.serv_evt.clear()
def tearDown(self):
# indicate that the client is finished
self.client_evt.set()
# wait for the server thread to terminate
self.serv_evt.wait()
self.thread.join()
test_support.threading_cleanup(*self._threads)
def testBasic(self):
# smoke test
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
smtp.quit()
def testEHLO(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
# no features should be present before the EHLO
self.assertEqual(smtp.esmtp_features, {})
# features expected from the test server
expected_features = {'expn':'',
'size': '20000000',
'starttls': '',
'deliverby': '',
'help': '',
}
smtp.ehlo()
self.assertEqual(smtp.esmtp_features, expected_features)
for k in expected_features:
self.assertTrue(smtp.has_extn(k))
self.assertFalse(smtp.has_extn('unsupported-feature'))
smtp.quit()
def testVRFY(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for email, name in sim_users.items():
expected_known = (250, '%s %s' % (name, smtplib.quoteaddr(email)))
self.assertEqual(smtp.vrfy(email), expected_known)
u = '[email protected]'
expected_unknown = (550, 'No such user: %s' % u)
self.assertEqual(smtp.vrfy(u), expected_unknown)
smtp.quit()
def testEXPN(self):
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
for listname, members in sim_lists.items():
users = []
for m in members:
users.append('%s %s' % (sim_users[m], smtplib.quoteaddr(m)))
expected_known = (250, '\n'.join(users))
self.assertEqual(smtp.expn(listname), expected_known)
u = 'PSU-Members-List'
expected_unknown = (550, 'No access for you!')
self.assertEqual(smtp.expn(u), expected_unknown)
smtp.quit()
def testAUTH_PLAIN(self):
self.serv.add_feature("AUTH PLAIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
expected_auth_ok = (235, b'plain auth ok')
self.assertEqual(smtp.login(sim_auth[0], sim_auth[1]), expected_auth_ok)
# SimSMTPChannel doesn't fully support LOGIN or CRAM-MD5 auth because they
# require a synchronous read to obtain the credentials...so instead smtpd
# sees the credential sent by smtplib's login method as an unknown command,
# which results in smtplib raising an auth error. Fortunately the error
# message contains the encoded credential, so we can partially check that it
# was generated correctly (partially, because the 'word' is uppercased in
# the error message).
def testAUTH_LOGIN(self):
self.serv.add_feature("AUTH LOGIN")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
try: smtp.login(sim_auth[0], sim_auth[1])
except smtplib.SMTPAuthenticationError as err:
if sim_auth_login_password not in str(err):
raise "expected encoded password not found in error message"
def testAUTH_CRAM_MD5(self):
self.serv.add_feature("AUTH CRAM-MD5")
smtp = smtplib.SMTP(HOST, self.port, local_hostname='localhost', timeout=15)
try: smtp.login(sim_auth[0], sim_auth[1])
except smtplib.SMTPAuthenticationError as err:
if sim_auth_credentials['cram-md5'] not in str(err):
raise "expected encoded credentials not found in error message"
#TODO: add tests for correct AUTH method fallback now that the
#test infrastructure can support it.
def test_quit_resets_greeting(self):
smtp = smtplib.SMTP(HOST, self.port,
local_hostname='localhost',
timeout=15)
code, message = smtp.ehlo()
self.assertEqual(code, 250)
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
self.assertNotIn('size', smtp.esmtp_features)
smtp.connect(HOST, self.port)
self.assertNotIn('size', smtp.esmtp_features)
smtp.ehlo_or_helo_if_needed()
self.assertIn('size', smtp.esmtp_features)
smtp.quit()
def test_main(verbose=None):
test_support.run_unittest(GeneralTests, DebuggingServerTests,
NonConnectingTests,
BadHELOServerTests, SMTPSimTests,
TooLongLineTests)
if __name__ == '__main__':
test_main()
| Jeff-Tian/mybnb | Python27/Lib/test/test_smtplib.py | Python | apache-2.0 | 20,434 |
from scara5 import FiveBar
b = FiveBar([1,1,1,1,1],[1,1,1,1,1]);
print(b.L);
| bulski7/ScaraRobot | Test1.py | Python | gpl-2.0 | 78 |
# ***************************************************************************
# * Copyright (c) 2017 Johannes Hartung <[email protected]> *
# * *
# * This file is part of the FreeCAD CAx development system. *
# * *
# * This program is free software; you can redistribute it and/or modify *
# * it under the terms of the GNU Lesser General Public License (LGPL) *
# * as published by the Free Software Foundation; either version 2 of *
# * the License, or (at your option) any later version. *
# * for detail see the LICENCE text file. *
# * *
# * This program is distributed in the hope that it will be useful, *
# * but WITHOUT ANY WARRANTY; without even the implied warranty of *
# * MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the *
# * GNU Library General Public License for more details. *
# * *
# * You should have received a copy of the GNU Library General Public *
# * License along with this program; if not, write to the Free Software *
# * Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 *
# * USA *
# * *
# ***************************************************************************
__title__ = "FreeCAD Fenics XDMF mesh reader"
__author__ = "Johannes Hartung"
__url__ = "https://www.freecadweb.org"
## @package importFenicsXDMF
# \ingroup FEM
# \brief FreeCAD Fenics Mesh XDMF reader for FEM workbench
from FreeCAD import Console
def read_fenics_mesh_xdmf(xdmffilename):
Console.PrintMessage("Not operational, yet\n")
return {
"Nodes": {},
"Hexa8Elem": {},
"Penta6Elem": {},
"Tetra4Elem": {},
"Tetra10Elem": {},
"Penta15Elem": {},
"Hexa20Elem": {},
"Tria3Elem": {},
"Tria6Elem": {},
"Quad4Elem": {},
"Quad8Elem": {},
"Seg2Elem": {}
}
| sanguinariojoe/FreeCAD | src/Mod/Fem/feminout/readFenicsXDMF.py | Python | lgpl-2.1 | 2,385 |
# Copyright (C) 2016 Jordan Tardif http://github.com/jordant
# Jordan Tardif <[email protected]>
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
"""
Nagios plugin to check running nova images.
This corresponds to the output of 'nova image-list'.
"""
import time
import openstacknagios.openstacknagios as osnag
from novaclient import client
from novaclient.v2 import images
class NovaImages(osnag.Resource):
"""
Lists nova images and gets timing
"""
def probe(self):
start = time.time()
try:
nova = client.Client(self.api_version, session=self.session, region_name=self.region_name)
images.GlanceManager(nova).list()
except Exception as e:
self.exit_error(str(e))
get_time = time.time()
yield osnag.Metric('gettime', get_time-start, min=0)
@osnag.guarded
def main():
argp = osnag.ArgumentParser(description=__doc__)
argp.add_argument('-w', '--warn', metavar='RANGE', default='0:',
help='return warning if repsonse time is outside RANGE (default: 0:, never warn)')
argp.add_argument('-c', '--critical', metavar='RANGE', default='0:',
help='return critical if repsonse time is outside RANGE (default 1:, never critical)')
args = argp.parse_args()
check = osnag.Check(
NovaImages(args=args),
osnag.ScalarContext('gettime', args.warn, args.critical),
osnag.Summary(show=['gettime'])
)
check.main(verbose=args.verbose, timeout=args.timeout)
if __name__ == '__main__':
main()
| ChameleonCloud/openstack-nagios-plugins | openstacknagios/nova/Images.py | Python | gpl-3.0 | 2,216 |
from setuptools import setup
requirements = {
'install': [
'distribute',
],
'extras': {
'docs': [
'sphinx>=1.1',
'agoraplex.themes.sphinx>=0.1.3',
'pygments',
],
'tests': [
'nose>=1.2.1',
'coverage>=3.6',
'pinocchio>=0.3.1',
'xtraceback>=0.3.3',
'pygments',
],
},
}
# write requirements for Travis and ReadTheDocs to use...
with open("reqs/travis.txt", "w") as travis:
travis.write('\n'.join(requirements['extras']['tests']) + '\n')
with open("reqs/rtfd.txt", "w") as rtfd:
rtfd.write('\n'.join(requirements['extras']['docs']) + '\n')
setup(
name='predicates',
version='0.0.5',
author='Tripp Lilley',
author_email='[email protected]',
packages=['predicates'],
namespace_packages=[],
url='https://github.com/agoraplex/predicates',
license='BSD',
description='A collection of predicate factories, functions, and partials, for functional programming.',
long_description=open('README.rst').read(),
install_requires=requirements.get('install', None),
tests_require=requirements.get('extras', {}).get('tests', None),
extras_require=requirements.get('extras', None),
classifiers=[
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Operating System :: OS Independent",
"Programming Language :: Python :: 2.7",
"Topic :: Software Development :: Libraries :: Python Modules",
]
)
| agoraplex/predicates | setup.py | Python | bsd-3-clause | 1,584 |
# -*- coding: utf-8 -*-
import unittest
from cwr.parser.decoder.dictionary import GroupTrailerDictionaryDecoder
"""
Dictionary to Message decoding tests.
The following cases are tested:
"""
__author__ = 'Bernardo Martínez Garrido'
__license__ = 'MIT'
__status__ = 'Development'
class TestGroupTrailerDictionaryEncoding(unittest.TestCase):
def setUp(self):
self._decoder = GroupTrailerDictionaryDecoder()
def test_encoded(self):
data = {}
data['record_type'] = 'GRH'
data['group_id'] = 5
data['transaction_type'] = 'AGR'
data['transaction_count'] = 111
data['record_count'] = 222
record = self._decoder.decode(data)
self.assertEqual('GRH', record.record_type)
self.assertEqual(5, record.group_id)
self.assertEqual(111, record.transaction_count)
self.assertEqual(222, record.record_count)
| weso/CWR-DataApi | tests/parser/dictionary/decoder/control/test_group_trailer.py | Python | mit | 903 |
# Copyright (c) 2011 Neal H. Walfield
#
# This software is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 3 of the License, or
# (at your option) any later version.
#
# This software is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from PySide.QtCore import QTimer
from functools import wraps
import time
import threading
import traceback
import logging
from mainthread import mainthread
from settings import accounts, settings_db
from coroutine import coroutine
# Don't fail if the Woodchuck modules are not available. Just disable
# Woodchuck's functionality.
# Whether we imported the woodchuck modules successfully.
woodchuck_imported = True
try:
import pywoodchuck
from pywoodchuck import PyWoodchuck
from pywoodchuck import woodchuck
except ImportError, e:
import traceback
logging.exception(
"Unable to load Woodchuck modules: disabling Woodchuck support: %s"
% str(e))
print("Unable to load Woodchuck modules: disabling Woodchuck support: %s"
% traceback.format_exc())
woodchuck_imported = False
# Users of this module do: from wc import woodchuck
# Make sure that doesn't gratutiously fail.
woodchuck = None
class PyWoodchuck(object):
def __init__(self, *args, **kwargs):
pass
def available(self):
return False
def refresh_interval():
"""Return the refresh interval (in seconds)."""
settings = settings_db()
if not settings.contains('refresh_interval'):
return 600
else:
return int(settings.value('refresh_interval')) * 60
def stream_id_build(account, feed):
return account.uuid + '::' + feed
def stream_id_split(id):
try:
(account_id, feed) = (id.split('::', 1) + [None, ])[:2]
except (TypeError, ValueError):
return (None, None)
for account in accounts():
if account.uuid == account_id:
break
else:
return (None, None)
return (account, feed)
class mywoodchuck(PyWoodchuck):
"""
stream_update is a function that is called when a stream should be
updated. It is passed two arguments: the account (a
settings.Account instance) and the feed (a string) to update.
object_transfer is a function that is called when an object should
be transferred. It is passed three arguments: the account (a
settings.Account instance), the feed (a string) and the tweet
identifier (a string).
If stream_update is None, then no callbacks will be requested.
"""
def __init__(self, stream_update, object_transfer):
if stream_update is None:
# Disable upcalls.
request_feedback = False
else:
request_feedback = True
PyWoodchuck.__init__(self, "Khweeteur", "net.khertan.khweeteur.daemon",
request_feedback=request_feedback)
self.stream_update = stream_update
self.object_transfer = object_transfer
def stream_unregister(self, stream):
try:
logging.debug(
"Unregistering stream %s(%s)"
% (stream.human_readable_name, stream.identifier))
del self[stream.identifier]
except (KeyError, woodchuck.Error), exception:
logging.exception(
"Unregistering stream %s(%s): %s"
% (stream.human_readable_name, stream.identifier,
str(exception)))
# Woodchuck upcalls.
def stream_update_cb(self, stream, *args, **kwargs):
logging.debug("stream update called on %s (%s)"
% (stream.human_readable_name, stream.identifier,))
account, feed = stream_id_split(stream.identifier)
if account is None:
self.stream_unregister(stream)
return
self.stream_update(account, feed)
def object_transfer_cb(self, stream, object,
version, filename, quality, *args, **kwargs):
logging.debug("object transfer called on %s (%s) in stream %s (%s)"
% (object.human_readable_name, object.identifier,
stream.human_readable_name, stream.identifier))
if stream.identifier == 'topost':
account = None
feed = 'topost'
else:
account, feed = stream_id_split(stream.identifier)
if account is None:
del self[stream.identifier]
return
if not (account is None and feed == 'topost'):
# object_transfer should only be called on topost
logging.debug(
"object_transfer_cb called on feed other than topost (%s)!"
% (stream.identifier))
try:
self[stream.identifier][object.identifier].dont_transfer = True
except Exception:
logger.exception(
"Setting DontTransfer on %s.%s: %s"
% (stream.identifier, object.identifier, str(e)))
return
self.object_transfer(account, feed, object.identifier)
@coroutine
def synchronize_config(self):
# Called to synchronize Woodchuck's configuration with our
# configuration.
# The list of known streams.
streams = self.streams_list()
stream_ids = [s.identifier for s in streams]
freshness = refresh_interval()
# Register any unknown streams. Remove known streams from
# STREAMS_IDS.
def check(stream_id, name, freshness):
if stream_id not in stream_ids:
logging.debug(
"Registering previously unknown feed: %s (%s)"
% (name, stream_id))
self.stream_register(stream_identifier=stream_id,
human_readable_name=name,
freshness=freshness)
else:
logging.debug(
"%s (%s) already registered"
% (name, stream_id))
# The account name can change: it's the user's stream
# name.
stream_ids.remove(stream_id)
self[stream_id].human_readable_name = name
self[stream_id].freshness = freshness
for account in accounts():
for feed in account.feeds():
check(stream_id_build(account, feed),
account.name + ': ' + feed,
freshness=freshness)
yield
# The outbox.
check('topost', 'outbox', woodchuck.never_updated)
# Unregister any streams that are no longer subscribed to.
for stream_id in stream_ids:
logging.debug("%s no longer registered." % (stream_id,))
self.stream_unregister(self[stream_id])
yield
_w = None
def wc(stream_update=None, object_transfer=None):
"""
Connect to the woodchuck server and initialize any state.
stream_update is a function that is passed two arguments: an
account identifier and the name of the stream to update (e.g.,
'HomeTimeline').
object_transfer is a function that is passed three arguments: an
account identifier, a name of the stream and the post to transfer.
If channel_update and episode_download are None, then Woodchuck
upcalls will be disabled.
"""
global _w
if _w is not None:
return _w
_w = mywoodchuck(stream_update, object_transfer)
if not _w.available():
logging.info(
"Woodchuck support disabled: unable to contact Woodchuck server.")
print "Woodchuck support disabled: unable to contact Woodchuck server."
return _w
logging.info("Woodchuck appears to be available.")
if stream_update is not None:
QTimer.singleShot(10 * 1000, _w.synchronize_config)
return _w
| khertan/Khweeteur | khweeteur/wc.py | Python | gpl-3.0 | 8,356 |
#!/usr/bin/env python
"""
responses
=========
A utility library for mocking out the `requests` Python library.
:copyright: (c) 2013 Dropbox, Inc.
"""
from setuptools import setup
from setuptools.command.test import test as TestCommand
import sys
setup_requires = []
if 'test' in sys.argv:
setup_requires.append('pytest')
install_requires = [
'requests',
'mock',
'six',
]
tests_require = [
'pytest',
'pytest-cov',
'flake8',
]
class PyTest(TestCommand):
def finalize_options(self):
TestCommand.finalize_options(self)
self.test_args = ['test_responses.py']
self.test_suite = True
def run_tests(self):
# import here, cause outside the eggs aren't loaded
import pytest
errno = pytest.main(self.test_args)
sys.exit(errno)
setup(
name='responses',
version='0.2.2',
author='David Cramer',
description=(
'A utility library for mocking out the `requests` Python library.'
),
long_description=open('README.rst').read(),
py_modules=['responses'],
zip_safe=False,
install_requires=install_requires,
extras_require={
'tests': tests_require,
},
tests_require=tests_require,
setup_requires=setup_requires,
cmdclass={'test': PyTest},
include_package_data=True,
classifiers=[
'Intended Audience :: Developers',
'Intended Audience :: System Administrators',
'Operating System :: OS Independent',
'Topic :: Software Development'
],
)
| cournape/responses | setup.py | Python | apache-2.0 | 1,534 |
# -*- coding: utf-8 -*-
#
# This file is part of INGInious. See the LICENSE and the COPYRIGHTS files for
# more information about the licensing of this file.
import web
from inginious.frontend.pages.course_admin.utils import make_csv, INGIniousAdminPage
class CourseStudentInfoPage(INGIniousAdminPage):
""" List information about a student """
def GET_AUTH(self, courseid, username): # pylint: disable=arguments-differ
""" GET request """
course, __ = self.get_course_and_check_rights(courseid)
return self.page(course, username)
def submission_url_generator(self, username, taskid):
""" Generates a submission url """
return "?format=taskid%2Fusername&tasks=" + taskid + "&users=" + username
def page(self, course, username):
""" Get all data and display the page """
data = list(self.database.user_tasks.find({"username": username, "courseid": course.get_id()}))
tasks = course.get_tasks()
result = dict([(taskid, {"taskid": taskid, "name": tasks[taskid].get_name_or_id(self.user_manager.session_language()),
"tried": 0, "status": "notviewed", "grade": 0,
"url": self.submission_url_generator(username, taskid)}) for taskid in tasks])
for taskdata in data:
if taskdata["taskid"] in result:
result[taskdata["taskid"]]["tried"] = taskdata["tried"]
if taskdata["tried"] == 0:
result[taskdata["taskid"]]["status"] = "notattempted"
elif taskdata["succeeded"]:
result[taskdata["taskid"]]["status"] = "succeeded"
else:
result[taskdata["taskid"]]["status"] = "failed"
result[taskdata["taskid"]]["grade"] = taskdata["grade"]
result[taskdata["taskid"]]["submissionid"] = str(taskdata["submissionid"])
if "csv" in web.input():
return make_csv(result)
results = sorted(list(result.values()), key=lambda result: (tasks[result["taskid"]].get_order(), result["taskid"]))
return self.template_helper.get_renderer().course_admin.student_info(course, username, results)
| JuezUN/INGInious | inginious/frontend/pages/course_admin/student_info.py | Python | agpl-3.0 | 2,233 |
import os
from django.test import TestCase
from django.core.files.uploadedfile import SimpleUploadedFile
from demos.models import Demo, Project
class DemoTestCase(TestCase):
def setUp(self):
super(DemoTestCase, self).setUp()
self.demo = Demo.objects.create(
title='CloudCV Classification',
demo_url='http://cloudcv.org/classify/',
demo_base_url='http://cloudcv.org/',
permalink='classify',
tag_line='Use CloudCV to automatically find which objects are present in an image',
description='Click on one of these images to send it to our servers (Or upload your own images below)',
sample=True,
text_inputs=0,
image_inputs=1,
text_outputs=1,
image_outputs=0,
)
def test__unicode__(self):
title = self.demo.title
demo_url = self.demo.demo_url
final_str = '{0}: {1}'.format(title, demo_url)
self.assertEqual(final_str, self.demo.__str__())
class ProjectTestCase(TestCase):
def setUp(self):
super(ProjectTestCase, self).setUp()
try:
os.makedirs('/tmp/cloudcv')
except OSError:
pass
with self.settings(MEDIA_ROOT='/tmp/cloudcv'):
self.project = Project.objects.create(
title='Origami',
project_url='http://origami.cloudcv.org',
github_url='http://github.com/Cloud-CV/origami',
documentation_url='http://origami.cloudcv.org/libdocs/',
image=SimpleUploadedFile(
name='test_image.jpg',
content=open('frontend/src/images/cloudcv_logo.png', 'rb').read(),
content_type='image/png'
),
)
def test__unicode__(self):
title = self.project.title
project_url = self.project.project_url
final_str = '{0}: {1}'.format(title, project_url)
self.assertEqual(final_str, self.project.__str__())
| Cloud-CV/CloudCV | tests/unit/demos/test_models.py | Python | gpl-3.0 | 2,051 |
NAME="Phone Alert Status"
| brettchien/PyBLEWrapper | pyble/const/profile/phone_alert_status.py | Python | mit | 26 |
from datetime import datetime, timedelta
from flask import Flask, render_template, jsonify
from flask_moment import Moment
app = Flask(__name__)
moment = Moment(app)
@app.route('/')
def index():
now = datetime.utcnow()
midnight = datetime(now.year, now.month, now.day, 0, 0, 0)
epoch = datetime(1970, 1, 1, 0, 0, 0)
next_saturday = now + timedelta(5 - now.weekday())
return render_template('index.html', now=now, midnight=midnight,
epoch=epoch, next_saturday=next_saturday)
@app.route('/ajax')
def ajax():
return jsonify({'timestamp': moment.create(datetime.utcnow()).format(
'LLLL')})
if __name__ == '__main__':
app.run(debug=True)
| miguelgrinberg/Flask-Moment | example/app.py | Python | mit | 703 |
from controller import app
if __name__ == "__main__":
app.run()
| Plezito/TCC_fuji_plez | site/wsgi.py | Python | mit | 69 |
""" support for skip/xfail functions and markers. """
from __future__ import absolute_import, division, print_function
import os
import sys
import traceback
import py
from _pytest.config import hookimpl
from _pytest.mark import MarkInfo, MarkDecorator
from _pytest.runner import fail, skip
def pytest_addoption(parser):
group = parser.getgroup("general")
group.addoption('--runxfail',
action="store_true", dest="runxfail", default=False,
help="run tests even if they are marked xfail")
parser.addini("xfail_strict", "default for the strict parameter of xfail "
"markers when not given explicitly (default: "
"False)",
default=False,
type="bool")
def pytest_configure(config):
if config.option.runxfail:
# yay a hack
import pytest
old = pytest.xfail
config._cleanup.append(lambda: setattr(pytest, "xfail", old))
def nop(*args, **kwargs):
pass
nop.Exception = XFailed
setattr(pytest, "xfail", nop)
config.addinivalue_line("markers",
"skip(reason=None): skip the given test function with an optional reason. "
"Example: skip(reason=\"no way of currently testing this\") skips the "
"test."
)
config.addinivalue_line("markers",
"skipif(condition): skip the given test function if eval(condition) "
"results in a True value. Evaluation happens within the "
"module global context. Example: skipif('sys.platform == \"win32\"') "
"skips the test if we are on the win32 platform. see "
"http://pytest.org/latest/skipping.html"
)
config.addinivalue_line("markers",
"xfail(condition, reason=None, run=True, raises=None, strict=False): "
"mark the test function as an expected failure if eval(condition) "
"has a True value. Optionally specify a reason for better reporting "
"and run=False if you don't even want to execute the test function. "
"If only specific exception(s) are expected, you can list them in "
"raises, and if the test fails in other ways, it will be reported as "
"a true failure. See http://pytest.org/latest/skipping.html"
)
class XFailed(fail.Exception):
""" raised from an explicit call to pytest.xfail() """
def xfail(reason=""):
""" xfail an executing test or setup functions with the given reason."""
__tracebackhide__ = True
raise XFailed(reason)
xfail.Exception = XFailed
class MarkEvaluator:
def __init__(self, item, name):
self.item = item
self.name = name
@property
def holder(self):
return self.item.keywords.get(self.name)
def __bool__(self):
return bool(self.holder)
__nonzero__ = __bool__
def wasvalid(self):
return not hasattr(self, 'exc')
def invalidraise(self, exc):
raises = self.get('raises')
if not raises:
return
return not isinstance(exc, raises)
def istrue(self):
try:
return self._istrue()
except Exception:
self.exc = sys.exc_info()
if isinstance(self.exc[1], SyntaxError):
msg = [" " * (self.exc[1].offset + 4) + "^", ]
msg.append("SyntaxError: invalid syntax")
else:
msg = traceback.format_exception_only(*self.exc[:2])
fail("Error evaluating %r expression\n"
" %s\n"
"%s"
% (self.name, self.expr, "\n".join(msg)),
pytrace=False)
def _getglobals(self):
d = {'os': os, 'sys': sys, 'config': self.item.config}
if hasattr(self.item, 'obj'):
d.update(self.item.obj.__globals__)
return d
def _istrue(self):
if hasattr(self, 'result'):
return self.result
if self.holder:
if self.holder.args or 'condition' in self.holder.kwargs:
self.result = False
# "holder" might be a MarkInfo or a MarkDecorator; only
# MarkInfo keeps track of all parameters it received in an
# _arglist attribute
marks = getattr(self.holder, '_marks', None) \
or [self.holder.mark]
for _, args, kwargs in marks:
if 'condition' in kwargs:
args = (kwargs['condition'],)
for expr in args:
self.expr = expr
if isinstance(expr, py.builtin._basestring):
d = self._getglobals()
result = cached_eval(self.item.config, expr, d)
else:
if "reason" not in kwargs:
# XXX better be checked at collection time
msg = "you need to specify reason=STRING " \
"when using booleans as conditions."
fail(msg)
result = bool(expr)
if result:
self.result = True
self.reason = kwargs.get('reason', None)
self.expr = expr
return self.result
else:
self.result = True
return getattr(self, 'result', False)
def get(self, attr, default=None):
return self.holder.kwargs.get(attr, default)
def getexplanation(self):
expl = getattr(self, 'reason', None) or self.get('reason', None)
if not expl:
if not hasattr(self, 'expr'):
return ""
else:
return "condition: " + str(self.expr)
return expl
@hookimpl(tryfirst=True)
def pytest_runtest_setup(item):
# Check if skip or skipif are specified as pytest marks
skipif_info = item.keywords.get('skipif')
if isinstance(skipif_info, (MarkInfo, MarkDecorator)):
eval_skipif = MarkEvaluator(item, 'skipif')
if eval_skipif.istrue():
item._evalskip = eval_skipif
skip(eval_skipif.getexplanation())
skip_info = item.keywords.get('skip')
if isinstance(skip_info, (MarkInfo, MarkDecorator)):
item._evalskip = True
if 'reason' in skip_info.kwargs:
skip(skip_info.kwargs['reason'])
elif skip_info.args:
skip(skip_info.args[0])
else:
skip("unconditional skip")
item._evalxfail = MarkEvaluator(item, 'xfail')
check_xfail_no_run(item)
@hookimpl(hookwrapper=True)
def pytest_pyfunc_call(pyfuncitem):
check_xfail_no_run(pyfuncitem)
outcome = yield
passed = outcome.excinfo is None
if passed:
check_strict_xfail(pyfuncitem)
def check_xfail_no_run(item):
"""check xfail(run=False)"""
if not item.config.option.runxfail:
evalxfail = item._evalxfail
if evalxfail.istrue():
if not evalxfail.get('run', True):
xfail("[NOTRUN] " + evalxfail.getexplanation())
def check_strict_xfail(pyfuncitem):
"""check xfail(strict=True) for the given PASSING test"""
evalxfail = pyfuncitem._evalxfail
if evalxfail.istrue():
strict_default = pyfuncitem.config.getini('xfail_strict')
is_strict_xfail = evalxfail.get('strict', strict_default)
if is_strict_xfail:
del pyfuncitem._evalxfail
explanation = evalxfail.getexplanation()
fail('[XPASS(strict)] ' + explanation, pytrace=False)
@hookimpl(hookwrapper=True)
def pytest_runtest_makereport(item, call):
outcome = yield
rep = outcome.get_result()
evalxfail = getattr(item, '_evalxfail', None)
evalskip = getattr(item, '_evalskip', None)
# unitttest special case, see setting of _unexpectedsuccess
if hasattr(item, '_unexpectedsuccess') and rep.when == "call":
from _pytest.compat import _is_unittest_unexpected_success_a_failure
if item._unexpectedsuccess:
rep.longrepr = "Unexpected success: {0}".format(item._unexpectedsuccess)
else:
rep.longrepr = "Unexpected success"
if _is_unittest_unexpected_success_a_failure():
rep.outcome = "failed"
else:
rep.outcome = "passed"
rep.wasxfail = rep.longrepr
elif item.config.option.runxfail:
pass # don't interefere
elif call.excinfo and call.excinfo.errisinstance(xfail.Exception):
rep.wasxfail = "reason: " + call.excinfo.value.msg
rep.outcome = "skipped"
elif evalxfail and not rep.skipped and evalxfail.wasvalid() and \
evalxfail.istrue():
if call.excinfo:
if evalxfail.invalidraise(call.excinfo.value):
rep.outcome = "failed"
else:
rep.outcome = "skipped"
rep.wasxfail = evalxfail.getexplanation()
elif call.when == "call":
strict_default = item.config.getini('xfail_strict')
is_strict_xfail = evalxfail.get('strict', strict_default)
explanation = evalxfail.getexplanation()
if is_strict_xfail:
rep.outcome = "failed"
rep.longrepr = "[XPASS(strict)] {0}".format(explanation)
else:
rep.outcome = "passed"
rep.wasxfail = explanation
elif evalskip is not None and rep.skipped and type(rep.longrepr) is tuple:
# skipped by mark.skipif; change the location of the failure
# to point to the item definition, otherwise it will display
# the location of where the skip exception was raised within pytest
filename, line, reason = rep.longrepr
filename, line = item.location[:2]
rep.longrepr = filename, line, reason
# called by terminalreporter progress reporting
def pytest_report_teststatus(report):
if hasattr(report, "wasxfail"):
if report.skipped:
return "xfailed", "x", "xfail"
elif report.passed:
return "xpassed", "X", ("XPASS", {'yellow': True})
# called by the terminalreporter instance/plugin
def pytest_terminal_summary(terminalreporter):
tr = terminalreporter
if not tr.reportchars:
#for name in "xfailed skipped failed xpassed":
# if not tr.stats.get(name, 0):
# tr.write_line("HINT: use '-r' option to see extra "
# "summary info about tests")
# break
return
lines = []
for char in tr.reportchars:
if char == "x":
show_xfailed(terminalreporter, lines)
elif char == "X":
show_xpassed(terminalreporter, lines)
elif char in "fF":
show_simple(terminalreporter, lines, 'failed', "FAIL %s")
elif char in "sS":
show_skipped(terminalreporter, lines)
elif char == "E":
show_simple(terminalreporter, lines, 'error', "ERROR %s")
elif char == 'p':
show_simple(terminalreporter, lines, 'passed', "PASSED %s")
if lines:
tr._tw.sep("=", "short test summary info")
for line in lines:
tr._tw.line(line)
def show_simple(terminalreporter, lines, stat, format):
failed = terminalreporter.stats.get(stat)
if failed:
for rep in failed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
lines.append(format % (pos,))
def show_xfailed(terminalreporter, lines):
xfailed = terminalreporter.stats.get("xfailed")
if xfailed:
for rep in xfailed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XFAIL %s" % (pos,))
if reason:
lines.append(" " + str(reason))
def show_xpassed(terminalreporter, lines):
xpassed = terminalreporter.stats.get("xpassed")
if xpassed:
for rep in xpassed:
pos = terminalreporter.config.cwd_relative_nodeid(rep.nodeid)
reason = rep.wasxfail
lines.append("XPASS %s %s" % (pos, reason))
def cached_eval(config, expr, d):
if not hasattr(config, '_evalcache'):
config._evalcache = {}
try:
return config._evalcache[expr]
except KeyError:
import _pytest._code
exprcode = _pytest._code.compile(expr, mode="eval")
config._evalcache[expr] = x = eval(exprcode, d)
return x
def folded_skips(skipped):
d = {}
for event in skipped:
key = event.longrepr
assert len(key) == 3, (event, key)
d.setdefault(key, []).append(event)
l = []
for key, events in d.items():
l.append((len(events),) + key)
return l
def show_skipped(terminalreporter, lines):
tr = terminalreporter
skipped = tr.stats.get('skipped', [])
if skipped:
#if not tr.hasopt('skipped'):
# tr.write_line(
# "%d skipped tests, specify -rs for more info" %
# len(skipped))
# return
fskips = folded_skips(skipped)
if fskips:
#tr.write_sep("_", "skipped test summary")
for num, fspath, lineno, reason in fskips:
if reason.startswith("Skipped: "):
reason = reason[9:]
lines.append(
"SKIP [%d] %s:%d: %s" %
(num, fspath, lineno, reason))
| alexzoo/python | selenium_tests/env/lib/python3.6/site-packages/_pytest/skipping.py | Python | apache-2.0 | 13,618 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import copy
import uuid
from keystoneclient import access
from keystoneclient import exceptions
from keystoneclient import fixture
from keystoneclient.tests.unit.v3 import utils
from keystoneclient.v3.contrib.federation import base
from keystoneclient.v3.contrib.federation import identity_providers
from keystoneclient.v3.contrib.federation import mappings
from keystoneclient.v3.contrib.federation import protocols
from keystoneclient.v3.contrib.federation import service_providers
from keystoneclient.v3 import domains
from keystoneclient.v3 import projects
class IdentityProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(IdentityProviderTests, self).setUp()
self.key = 'identity_provider'
self.collection_key = 'identity_providers'
self.model = identity_providers.IdentityProvider
self.manager = self.client.federation.identity_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
def test_create(self):
ref = self.new_ref()
req_ref = ref.copy()
req_ref.pop('id')
self.stub_entity('PUT', entity=ref, id=ref['id'], status_code=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
class MappingTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(MappingTests, self).setUp()
self.key = 'mapping'
self.collection_key = 'mappings'
self.model = mappings.Mapping
self.manager = self.client.federation.mappings
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('rules', [uuid.uuid4().hex,
uuid.uuid4().hex])
return kwargs
def test_create(self):
ref = self.new_ref()
manager_ref = ref.copy()
mapping_id = manager_ref.pop('id')
req_ref = ref.copy()
self.stub_entity('PUT', entity=req_ref, id=mapping_id,
status_code=201)
returned = self.manager.create(mapping_id=mapping_id, **manager_ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(manager_ref)
class ProtocolTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(ProtocolTests, self).setUp()
self.key = 'protocol'
self.collection_key = 'protocols'
self.model = protocols.Protocol
self.manager = self.client.federation.protocols
self.path_prefix = 'OS-FEDERATION/identity_providers'
def _transform_to_response(self, ref):
"""Rebuild dictionary so it can be used as a
reference response body.
"""
response = copy.deepcopy(ref)
response['id'] = response.pop('protocol_id')
del response['identity_provider']
return response
def new_ref(self, **kwargs):
kwargs.setdefault('mapping', uuid.uuid4().hex)
kwargs.setdefault('identity_provider', uuid.uuid4().hex)
kwargs.setdefault('protocol_id', uuid.uuid4().hex)
return kwargs
def build_parts(self, identity_provider, protocol_id=None):
"""Build array used to construct mocking URL.
Construct and return array with URL parts later used
by methods like utils.TestCase.stub_entity().
Example of URL:
``OS-FEDERATION/identity_providers/{idp_id}/
protocols/{protocol_id}``
"""
parts = ['OS-FEDERATION', 'identity_providers',
identity_provider, 'protocols']
if protocol_id:
parts.append(protocol_id)
return parts
def test_build_url_provide_base_url(self):
base_url = uuid.uuid4().hex
parameters = {'base_url': base_url}
url = self.manager.build_url(dict_args_in_out=parameters)
self.assertEqual('/'.join([base_url, self.collection_key]), url)
def test_build_url_w_idp_id(self):
"""Test whether kwargs ``base_url`` discards object's base_url
This test shows, that when ``base_url`` is specified in the
dict_args_in_out dictionary, values like ``identity_provider_id``
are not taken into consideration while building the url.
"""
base_url, identity_provider_id = uuid.uuid4().hex, uuid.uuid4().hex
parameters = {
'base_url': base_url,
'identity_provider_id': identity_provider_id
}
url = self.manager.build_url(dict_args_in_out=parameters)
self.assertEqual('/'.join([base_url, self.collection_key]), url)
def test_build_url_default_base_url(self):
identity_provider_id = uuid.uuid4().hex
parameters = {
'identity_provider_id': identity_provider_id
}
url = self.manager.build_url(dict_args_in_out=parameters)
self.assertEqual(
'/'.join([self.manager.base_url, identity_provider_id,
self.manager.collection_key]), url)
def test_create(self):
"""Test creating federation protocol tied to an Identity Provider.
URL to be tested: PUT /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
"""
request_args = self.new_ref()
expected = self._transform_to_response(request_args)
parts = self.build_parts(request_args['identity_provider'],
request_args['protocol_id'])
self.stub_entity('PUT', entity=expected,
parts=parts, status_code=201)
returned = self.manager.create(**request_args)
self.assertEqual(expected, returned.to_dict())
request_body = {'mapping_id': request_args['mapping']}
self.assertEntityRequestBodyIs(request_body)
def test_get(self):
"""Fetch federation protocol object.
URL to be tested: GET /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
"""
request_args = self.new_ref()
expected = self._transform_to_response(request_args)
parts = self.build_parts(request_args['identity_provider'],
request_args['protocol_id'])
self.stub_entity('GET', entity=expected,
parts=parts, status_code=201)
returned = self.manager.get(request_args['identity_provider'],
request_args['protocol_id'])
self.assertIsInstance(returned, self.model)
self.assertEqual(expected, returned.to_dict())
def test_delete(self):
"""Delete federation protocol object.
URL to be tested: DELETE /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
"""
request_args = self.new_ref()
parts = self.build_parts(request_args['identity_provider'],
request_args['protocol_id'])
self.stub_entity('DELETE', parts=parts, status_code=204)
self.manager.delete(request_args['identity_provider'],
request_args['protocol_id'])
def test_list(self):
"""Test listing all federation protocols tied to the Identity Provider.
URL to be tested: GET /OS-FEDERATION/identity_providers/
$identity_provider/protocols
"""
def _ref_protocols():
return {
'id': uuid.uuid4().hex,
'mapping_id': uuid.uuid4().hex
}
request_args = self.new_ref()
expected = [_ref_protocols() for _ in range(3)]
parts = self.build_parts(request_args['identity_provider'])
self.stub_entity('GET', parts=parts,
entity=expected, status_code=200)
returned = self.manager.list(request_args['identity_provider'])
for obj, ref_obj in zip(returned, expected):
self.assertEqual(obj.to_dict(), ref_obj)
def test_list_params(self):
request_args = self.new_ref()
filter_kwargs = {uuid.uuid4().hex: uuid.uuid4().hex}
parts = self.build_parts(request_args['identity_provider'])
# Return HTTP 401 as we don't accept such requests.
self.stub_entity('GET', parts=parts, status_code=401)
self.assertRaises(exceptions.Unauthorized,
self.manager.list,
request_args['identity_provider'],
**filter_kwargs)
self.assertQueryStringContains(**filter_kwargs)
def test_update(self):
"""Test updating federation protocol
URL to be tested: PATCH /OS-FEDERATION/identity_providers/
$identity_provider/protocols/$protocol
"""
request_args = self.new_ref()
expected = self._transform_to_response(request_args)
parts = self.build_parts(request_args['identity_provider'],
request_args['protocol_id'])
self.stub_entity('PATCH', parts=parts,
entity=expected, status_code=200)
returned = self.manager.update(request_args['identity_provider'],
request_args['protocol_id'],
mapping=request_args['mapping'])
self.assertIsInstance(returned, self.model)
self.assertEqual(expected, returned.to_dict())
request_body = {'mapping_id': request_args['mapping']}
self.assertEntityRequestBodyIs(request_body)
class EntityManagerTests(utils.TestCase):
def test_create_object_expect_fail(self):
self.assertRaises(TypeError,
base.EntityManager,
self.client)
class FederationProjectTests(utils.TestCase):
def setUp(self):
super(FederationProjectTests, self).setUp()
self.key = 'project'
self.collection_key = 'projects'
self.model = projects.Project
self.manager = self.client.federation.projects
self.URL = "%s%s" % (self.TEST_URL, '/OS-FEDERATION/projects')
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('domain_id', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
kwargs.setdefault('name', uuid.uuid4().hex)
return kwargs
def test_list_accessible_projects(self):
projects_ref = [self.new_ref(), self.new_ref()]
projects_json = {
self.collection_key: [self.new_ref(), self.new_ref()]
}
self.requests_mock.get(self.URL, json=projects_json)
returned_list = self.manager.list()
self.assertEqual(len(projects_ref), len(returned_list))
for project in returned_list:
self.assertIsInstance(project, self.model)
class FederationDomainTests(utils.TestCase):
def setUp(self):
super(FederationDomainTests, self).setUp()
self.key = 'domain'
self.collection_key = 'domains'
self.model = domains.Domain
self.manager = self.client.federation.domains
self.URL = "%s%s" % (self.TEST_URL, '/OS-FEDERATION/domains')
def new_ref(self, **kwargs):
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
kwargs.setdefault('name', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
return kwargs
def test_list_accessible_domains(self):
domains_ref = [self.new_ref(), self.new_ref()]
domains_json = {
self.collection_key: domains_ref
}
self.requests_mock.get(self.URL, json=domains_json)
returned_list = self.manager.list()
self.assertEqual(len(domains_ref), len(returned_list))
for domain in returned_list:
self.assertIsInstance(domain, self.model)
class FederatedTokenTests(utils.TestCase):
def setUp(self):
super(FederatedTokenTests, self).setUp()
token = fixture.V3FederationToken()
token.set_project_scope()
token.add_role()
self.federated_token = access.AccessInfo.factory(body=token)
def test_federated_property_federated_token(self):
"""Check if is_federated property returns expected value."""
self.assertTrue(self.federated_token.is_federated)
def test_get_user_domain_name(self):
"""Ensure a federated user's domain name does not exist."""
self.assertIsNone(self.federated_token.user_domain_name)
def test_get_user_domain_id(self):
"""Ensure a federated user's domain ID does not exist."""
self.assertIsNone(self.federated_token.user_domain_id)
class ServiceProviderTests(utils.TestCase, utils.CrudTests):
def setUp(self):
super(ServiceProviderTests, self).setUp()
self.key = 'service_provider'
self.collection_key = 'service_providers'
self.model = service_providers.ServiceProvider
self.manager = self.client.federation.service_providers
self.path_prefix = 'OS-FEDERATION'
def new_ref(self, **kwargs):
kwargs.setdefault('auth_url', uuid.uuid4().hex)
kwargs.setdefault('description', uuid.uuid4().hex)
kwargs.setdefault('enabled', True)
kwargs.setdefault('id', uuid.uuid4().hex)
kwargs.setdefault('sp_url', uuid.uuid4().hex)
return kwargs
def test_positional_parameters_expect_fail(self):
"""Ensure CrudManager raises TypeError exceptions.
After passing wrong number of positional arguments
an exception should be raised.
Operations to be tested:
* create()
* get()
* list()
* delete()
* update()
"""
POS_PARAM_1 = uuid.uuid4().hex
POS_PARAM_2 = uuid.uuid4().hex
POS_PARAM_3 = uuid.uuid4().hex
PARAMETERS = {
'create': (POS_PARAM_1, POS_PARAM_2),
'get': (POS_PARAM_1, POS_PARAM_2),
'list': (POS_PARAM_1, POS_PARAM_2),
'update': (POS_PARAM_1, POS_PARAM_2, POS_PARAM_3),
'delete': (POS_PARAM_1, POS_PARAM_2)
}
for f_name, args in PARAMETERS.items():
self.assertRaises(TypeError, getattr(self.manager, f_name),
*args)
def test_create(self):
ref = self.new_ref()
# req_ref argument allows you to specify a different
# signature for the request when the manager does some
# conversion before doing the request (e.g. converting
# from datetime object to timestamp string)
req_ref = ref.copy()
req_ref.pop('id')
self.stub_entity('PUT', entity=ref, id=ref['id'], status_code=201)
returned = self.manager.create(**ref)
self.assertIsInstance(returned, self.model)
for attr in req_ref:
self.assertEqual(
getattr(returned, attr),
req_ref[attr],
'Expected different %s' % attr)
self.assertEntityRequestBodyIs(req_ref)
| darren-wang/ksc | keystoneclient/tests/unit/v3/test_federation.py | Python | apache-2.0 | 17,143 |
################################################################################
# Copyright 2015 Samuel Gongora Garcia ([email protected])
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
# Author: s.gongoragarcia[at]gmail.com
################################################################################
class Read_predict_data:
def __init__(self, index_satellite):
from os import getcwd, chdir
index_satellite = index_satellite + 1
directorio_script = getcwd()
# predict routine
self.open_predict(directorio_script)
self.open_files_predict(index_satellite)
chdir(directorio_script)
def open_predict(self, directorio_script):
from os import chdir, listdir, getcwd
chdir(directorio_script + '/results/predict')
self.files_predict = listdir(getcwd())
self.files_predict.sort()
def open_files_predict(self, index_satellite):
for i in range(index_satellite):
self.open_file_predict(self.files_predict[i])
def open_file_predict(self, name):
self.predict_simulation_time = []
self.predict_alt_satellite = []
self.predict_az_satellite = []
import csv
with open(name) as tsv:
for line in csv.reader(tsv, delimiter = "\t"):
if float(line[1]) >= 0:
linea0 = float(line[0])
self.predict_simulation_time.append(linea0)
self.predict_alt_satellite.append(float(line[1]))
self.predict_az_satellite.append(float(line[2])) | satnet-project/propagators | output_predict.py | Python | apache-2.0 | 2,281 |
import pyaf.Bench.TS_datasets as tsds
import tests.artificial.process_artificial_dataset as art
art.process_dataset(N = 32 , FREQ = 'D', seed = 0, trendtype = "MovingAverage", cycle_length = 12, transform = "Difference", sigma = 0.0, exog_count = 20, ar_order = 12); | antoinecarme/pyaf | tests/artificial/transf_Difference/trend_MovingAverage/cycle_12/ar_12/test_artificial_32_Difference_MovingAverage_12_12_20.py | Python | bsd-3-clause | 270 |
from .. import config
from .. import fixtures
from ..assertions import eq_
from ..schema import Column
from ..schema import Table
from ... import ForeignKey
from ... import Integer
from ... import select
from ... import String
from ... import testing
class CTETest(fixtures.TablesTest):
__backend__ = True
__requires__ = ("ctes",)
run_inserts = "each"
run_deletes = "each"
@classmethod
def define_tables(cls, metadata):
Table(
"some_table",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
Column("parent_id", ForeignKey("some_table.id")),
)
Table(
"some_other_table",
metadata,
Column("id", Integer, primary_key=True),
Column("data", String(50)),
Column("parent_id", Integer),
)
@classmethod
def insert_data(cls):
config.db.execute(
cls.tables.some_table.insert(),
[
{"id": 1, "data": "d1", "parent_id": None},
{"id": 2, "data": "d2", "parent_id": 1},
{"id": 3, "data": "d3", "parent_id": 1},
{"id": 4, "data": "d4", "parent_id": 3},
{"id": 5, "data": "d5", "parent_id": 3},
],
)
def test_select_nonrecursive_round_trip(self):
some_table = self.tables.some_table
with config.db.connect() as conn:
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
result = conn.execute(
select([cte.c.data]).where(cte.c.data.in_(["d4", "d5"]))
)
eq_(result.fetchall(), [("d4",)])
def test_select_recursive_round_trip(self):
some_table = self.tables.some_table
with config.db.connect() as conn:
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte", recursive=True)
)
cte_alias = cte.alias("c1")
st1 = some_table.alias()
# note that SQL Server requires this to be UNION ALL,
# can't be UNION
cte = cte.union_all(
select([st1]).where(st1.c.id == cte_alias.c.parent_id)
)
result = conn.execute(
select([cte.c.data])
.where(cte.c.data != "d2")
.order_by(cte.c.data.desc())
)
eq_(
result.fetchall(),
[("d4",), ("d3",), ("d3",), ("d1",), ("d1",), ("d1",)],
)
def test_insert_from_select_round_trip(self):
some_table = self.tables.some_table
some_other_table = self.tables.some_other_table
with config.db.connect() as conn:
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.insert().from_select(
["id", "data", "parent_id"], select([cte])
)
)
eq_(
conn.execute(
select([some_other_table]).order_by(some_other_table.c.id)
).fetchall(),
[(2, "d2", 1), (3, "d3", 1), (4, "d4", 3)],
)
@testing.requires.ctes_with_update_delete
@testing.requires.update_from
def test_update_from_round_trip(self):
some_table = self.tables.some_table
some_other_table = self.tables.some_other_table
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
["id", "data", "parent_id"], select([some_table])
)
)
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.update()
.values(parent_id=5)
.where(some_other_table.c.data == cte.c.data)
)
eq_(
conn.execute(
select([some_other_table]).order_by(some_other_table.c.id)
).fetchall(),
[
(1, "d1", None),
(2, "d2", 5),
(3, "d3", 5),
(4, "d4", 5),
(5, "d5", 3),
],
)
@testing.requires.ctes_with_update_delete
@testing.requires.delete_from
def test_delete_from_round_trip(self):
some_table = self.tables.some_table
some_other_table = self.tables.some_other_table
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
["id", "data", "parent_id"], select([some_table])
)
)
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.delete().where(
some_other_table.c.data == cte.c.data
)
)
eq_(
conn.execute(
select([some_other_table]).order_by(some_other_table.c.id)
).fetchall(),
[(1, "d1", None), (5, "d5", 3)],
)
@testing.requires.ctes_with_update_delete
def test_delete_scalar_subq_round_trip(self):
some_table = self.tables.some_table
some_other_table = self.tables.some_other_table
with config.db.connect() as conn:
conn.execute(
some_other_table.insert().from_select(
["id", "data", "parent_id"], select([some_table])
)
)
cte = (
select([some_table])
.where(some_table.c.data.in_(["d2", "d3", "d4"]))
.cte("some_cte")
)
conn.execute(
some_other_table.delete().where(
some_other_table.c.data
== select([cte.c.data])
.where(cte.c.id == some_other_table.c.id)
.scalar_subquery()
)
)
eq_(
conn.execute(
select([some_other_table]).order_by(some_other_table.c.id)
).fetchall(),
[(1, "d1", None), (5, "d5", 3)],
)
| graingert/sqlalchemy | lib/sqlalchemy/testing/suite/test_cte.py | Python | mit | 6,802 |
# -*- coding: utf-8 -*-
# Copyright 2022 Google LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Generated code. DO NOT EDIT!
#
# Snippet for CreateContext
# NOTE: This snippet has been automatically generated for illustrative purposes only.
# It may require modifications to work in your environment.
# To install the latest published package dependency, execute the following:
# python3 -m pip install google-cloud-aiplatform
# [START aiplatform_v1_generated_MetadataService_CreateContext_sync]
from google.cloud import aiplatform_v1
def sample_create_context():
# Create a client
client = aiplatform_v1.MetadataServiceClient()
# Initialize request argument(s)
request = aiplatform_v1.CreateContextRequest(
parent="parent_value",
)
# Make the request
response = client.create_context(request=request)
# Handle the response
print(response)
# [END aiplatform_v1_generated_MetadataService_CreateContext_sync]
| googleapis/python-aiplatform | samples/generated_samples/aiplatform_v1_generated_metadata_service_create_context_sync.py | Python | apache-2.0 | 1,468 |
from __future__ import absolute_import
import os
import sys
import errno
from .common import TERM_SIGNAL
__all__ = ['Popen']
#
# Start child process using fork
#
class Popen(object):
method = 'fork'
sentinel = None
def __init__(self, process_obj):
sys.stdout.flush()
sys.stderr.flush()
self.returncode = None
self._launch(process_obj)
def duplicate_for_child(self, fd):
return fd
def poll(self, flag=os.WNOHANG):
if self.returncode is None:
while True:
try:
pid, sts = os.waitpid(self.pid, flag)
except OSError as e:
if e.errno == errno.EINTR:
continue
# Child process not yet created. See #1731717
# e.errno == errno.ECHILD == 10
return None
else:
break
if pid == self.pid:
if os.WIFSIGNALED(sts):
self.returncode = -os.WTERMSIG(sts)
else:
assert os.WIFEXITED(sts)
self.returncode = os.WEXITSTATUS(sts)
return self.returncode
def wait(self, timeout=None):
if self.returncode is None:
if timeout is not None:
from .connection import wait
if not wait([self.sentinel], timeout):
return None
# This shouldn't block if wait() returned successfully.
return self.poll(os.WNOHANG if timeout == 0.0 else 0)
return self.returncode
def terminate(self):
if self.returncode is None:
try:
os.kill(self.pid, TERM_SIGNAL)
except OSError as exc:
if getattr(exc, 'errno', None) != errno.ESRCH:
if self.wait(timeout=0.1) is None:
raise
def _launch(self, process_obj):
code = 1
parent_r, child_w = os.pipe()
self.pid = os.fork()
if self.pid == 0:
try:
os.close(parent_r)
if 'random' in sys.modules:
import random
random.seed()
code = process_obj._bootstrap()
finally:
os._exit(code)
else:
os.close(child_w)
self.sentinel = parent_r
def close(self):
if self.sentinel is not None:
try:
os.close(self.sentinel)
finally:
self.sentinel = None
| ammarkhann/FinalSeniorCode | lib/python2.7/site-packages/billiard/popen_fork.py | Python | mit | 2,600 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
Authr: João Juíz
'''
import sys,os, argparse
class Main(object):
def argparser(self):
parser = argparse.ArgumentParser(description='What this program does')
#parser.add_argument("param", type=str, help='Parameter Description')
parser.add_argument("-d",'--database', default=False, action='store_true', help='Update database')
parser.add_argument("-m",'--matrix', default=False, action='store_true', help='Update feature matrix')
parser.add_argument("-c",'--classify', default=False, action='store_true', help='Classify and symlink files predicted as like to config.predictedLikesDir')
#parser.add_argument('--erase', default=False, action='store_true', help='Print destinations of erased links from predictedLikesDir')
parser.add_argument("-x",'--chi2', default=False, action='store_true', help='Print chi² test. deprecated.')
parser.add_argument("-r",'--report', default=False, action='store_true', help='Crossvalidation report')
#parser.add_argument("-v",'--verbose', default=False, action='store_true', help='Fehlerausgabe')
#parser.add_argument("-o",'--destination', type=str, default=".", help='Example Parameter')
self.args = parser.parse_args()
def __init__(self):
pass
def __call__(self):
self.argparser()
from makeMatrix import MatrixFactory
from classify import Classifier
from makeDB import DbPen
dbPen = DbPen()
if self.args.classify:
dbPen.checkEmptyLikesDir()
if self.args.database:
dbPen.writeAllHtml()
dbPen.writeIdsToDb()
if self.args.matrix:
MatrixFactory().writeToFile()
if self.args.chi2:
Classifier().chi2()
if self.args.report:
Classifier().crossvali()
if self.args.classify:
Classifier().classify()
dbPen.mvPredictedLikes()
#if self.args.erase:
# dbPen.eraseFiles()
if __name__ == "__main__":
Main()()
| juix/scripts | motherless-ai/control.py | Python | gpl-3.0 | 2,116 |
"""
dal_camera v1.0.0
Auteur: Bruno DELATTRE
Date : 16/09/2016
"""
class DAL_Camera:
def __init__(self, connection, cursor):
self.connection = connection
self.cursor = cursor
""" Select"""
def get_last_picture_id(self):
rows = self.cursor.execute('SELECT id_picture FROM camera')
index = 0
for row in rows:
index = row[0]
return index
def get_last_video_id(self):
rows = self.cursor.execute('SELECT id_video FROM camera')
index = 0
for row in rows:
index = row[0]
return index
""" Update """
def set_last_picture_id(self, value):
try:
self.cursor.execute('UPDATE camera SET id_picture = "' + str(value) + '"')
self.connection.commit()
except:
self.connection.rollback()
def set_last_video_id(self, value):
try:
self.cursor.execute('UPDATE camera SET id_video = "' + str(value) + '"')
self.connection.commit()
except:
self.connection.rollback()
| delattreb/StratoBalloon | src/dal/dal_camera.py | Python | gpl-3.0 | 1,112 |
"""
WSGI config for leyaproject project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "leyaproject.settings"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "leya.settings.settings")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
from django.core.wsgi import get_wsgi_application
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| paulormart/leyaproject | leya/wsgi.py | Python | mit | 1,436 |
# -*- coding: utf-8 -*-
"""UI view definitions."""
from logging import getLogger
from urllib.parse import parse_qs
from django.http.request import HttpRequest
from django.http.response import HttpResponse
from eventkit_cloud.core.helpers import get_cached_model
from eventkit_cloud.tasks.models import DataProvider
from eventkit_cloud.utils.map_query import get_map_query
from eventkit_cloud.utils.mapproxy import create_mapproxy_app
logger = getLogger(__file__)
def map(request: HttpRequest, slug: str, path: str) -> HttpResponse:
"""
Makes a proxy request to mapproxy used to get map tiles.
:param request: The httprequest.
:param slug: A string matching the slug of a DataProvider.
:param path: The rest of the url context (i.e. path to the tile some_service/0/0/0.png).
:return: The HttpResponse.
"""
mapproxy_app = create_mapproxy_app(slug, request.user)
params = parse_qs(request.META["QUERY_STRING"])
script_name = f"/map/{slug}"
mp_response = mapproxy_app.get(
path, params, request.headers, extra_environ=dict(SCRIPT_NAME=script_name), expect_errors=True
)
response = HttpResponse(mp_response.body, status=mp_response.status_int)
for header, value in mp_response.headers.items():
response[header] = value
if params.get("REQUEST") == ["GetFeatureInfo"]:
provider = get_cached_model(DataProvider, "slug", slug)
if response.status_code in [200, 202]:
try:
map_query = get_map_query(provider.metadata.get("type"))
response = map_query().get_geojson(response)
except Exception as e:
logger.error(e)
response.status_code = 500
response.content = "No data available."
else:
if provider.metadata:
response.content = "The service was unable to provide data for this location."
else:
response.content = "No data is available for this service."
response["Content-length"] = len(response.content)
return response
| venicegeo/eventkit-cloud | eventkit_cloud/utils/views.py | Python | bsd-3-clause | 2,084 |
#
#
# Copyright (C) 2006, 2007, 2008, 2009, 2010, 2011, 2012, 2013 Google Inc.
# All rights reserved.
#
# Redistribution and use in source and binary forms, with or without
# modification, are permitted provided that the following conditions are
# met:
#
# 1. Redistributions of source code must retain the above copyright notice,
# this list of conditions and the following disclaimer.
#
# 2. Redistributions in binary form must reproduce the above copyright
# notice, this list of conditions and the following disclaimer in the
# documentation and/or other materials provided with the distribution.
#
# THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS "AS
# IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT LIMITED
# TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR
# PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT HOLDER OR
# CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
# EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
# PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
# PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
# LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
# NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
# SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
"""Logical units dealing with networks."""
from ganeti import constants
from ganeti import errors
from ganeti import locking
from ganeti import network
from ganeti import objects
from ganeti import qlang
from ganeti import query
from ganeti import utils
from ganeti.cmdlib.base import LogicalUnit, NoHooksLU, QueryBase
from ganeti.cmdlib.common import ShareAll
def _BuildNetworkHookEnv(name, subnet, gateway, network6, gateway6,
mac_prefix, tags):
"""Builds network related env variables for hooks
This builds the hook environment from individual variables.
@type name: string
@param name: the name of the network
@type subnet: string
@param subnet: the ipv4 subnet
@type gateway: string
@param gateway: the ipv4 gateway
@type network6: string
@param network6: the ipv6 subnet
@type gateway6: string
@param gateway6: the ipv6 gateway
@type mac_prefix: string
@param mac_prefix: the mac_prefix
@type tags: list
@param tags: the tags of the network
"""
env = {}
if name:
env["NETWORK_NAME"] = name
if subnet:
env["NETWORK_SUBNET"] = subnet
if gateway:
env["NETWORK_GATEWAY"] = gateway
if network6:
env["NETWORK_SUBNET6"] = network6
if gateway6:
env["NETWORK_GATEWAY6"] = gateway6
if mac_prefix:
env["NETWORK_MAC_PREFIX"] = mac_prefix
if tags:
env["NETWORK_TAGS"] = " ".join(tags)
return env
class LUNetworkAdd(LogicalUnit):
"""Logical unit for creating networks.
"""
HPATH = "network-add"
HTYPE = constants.HTYPE_NETWORK
REQ_BGL = False
def BuildHooksNodes(self):
"""Build hooks nodes.
"""
mn = self.cfg.GetMasterNode()
return ([mn], [mn])
def CheckArguments(self):
if self.op.mac_prefix:
self.op.mac_prefix = \
utils.NormalizeAndValidateThreeOctetMacPrefix(self.op.mac_prefix)
def ExpandNames(self):
self.network_uuid = self.cfg.GenerateUniqueID(self.proc.GetECId())
if self.op.conflicts_check:
self.share_locks[locking.LEVEL_NODE] = 1
self.share_locks[locking.LEVEL_NODE_ALLOC] = 1
self.needed_locks = {
locking.LEVEL_NODE: locking.ALL_SET,
locking.LEVEL_NODE_ALLOC: locking.ALL_SET,
}
else:
self.needed_locks = {}
self.add_locks[locking.LEVEL_NETWORK] = self.network_uuid
def CheckPrereq(self):
if self.op.network is None:
raise errors.OpPrereqError("Network must be given",
errors.ECODE_INVAL)
try:
existing_uuid = self.cfg.LookupNetwork(self.op.network_name)
except errors.OpPrereqError:
pass
else:
raise errors.OpPrereqError("Desired network name '%s' already exists as a"
" network (UUID: %s)" %
(self.op.network_name, existing_uuid),
errors.ECODE_EXISTS)
# Check tag validity
for tag in self.op.tags:
objects.TaggableObject.ValidateTag(tag)
def BuildHooksEnv(self):
"""Build hooks env.
"""
args = {
"name": self.op.network_name,
"subnet": self.op.network,
"gateway": self.op.gateway,
"network6": self.op.network6,
"gateway6": self.op.gateway6,
"mac_prefix": self.op.mac_prefix,
"tags": self.op.tags,
}
return _BuildNetworkHookEnv(**args) # pylint: disable=W0142
def Exec(self, feedback_fn):
"""Add the ip pool to the cluster.
"""
nobj = objects.Network(name=self.op.network_name,
network=self.op.network,
gateway=self.op.gateway,
network6=self.op.network6,
gateway6=self.op.gateway6,
mac_prefix=self.op.mac_prefix,
uuid=self.network_uuid)
# Initialize the associated address pool
try:
pool = network.AddressPool.InitializeNetwork(nobj)
except errors.AddressPoolError, err:
raise errors.OpExecError("Cannot create IP address pool for network"
" '%s': %s" % (self.op.network_name, err))
# Check if we need to reserve the nodes and the cluster master IP
# These may not be allocated to any instances in routed mode, as
# they wouldn't function anyway.
if self.op.conflicts_check:
for node in self.cfg.GetAllNodesInfo().values():
for ip in [node.primary_ip, node.secondary_ip]:
try:
if pool.Contains(ip):
pool.Reserve(ip, external=True)
self.LogInfo("Reserved IP address of node '%s' (%s)",
node.name, ip)
except errors.AddressPoolError, err:
self.LogWarning("Cannot reserve IP address '%s' of node '%s': %s",
ip, node.name, err)
master_ip = self.cfg.GetClusterInfo().master_ip
try:
if pool.Contains(master_ip):
pool.Reserve(master_ip, external=True)
self.LogInfo("Reserved cluster master IP address (%s)", master_ip)
except errors.AddressPoolError, err:
self.LogWarning("Cannot reserve cluster master IP address (%s): %s",
master_ip, err)
if self.op.add_reserved_ips:
for ip in self.op.add_reserved_ips:
try:
pool.Reserve(ip, external=True)
except errors.AddressPoolError, err:
raise errors.OpExecError("Cannot reserve IP address '%s': %s" %
(ip, err))
if self.op.tags:
for tag in self.op.tags:
nobj.AddTag(tag)
self.cfg.AddNetwork(nobj, self.proc.GetECId(), check_uuid=False)
del self.remove_locks[locking.LEVEL_NETWORK]
class LUNetworkRemove(LogicalUnit):
HPATH = "network-remove"
HTYPE = constants.HTYPE_NETWORK
REQ_BGL = False
def ExpandNames(self):
self.network_uuid = self.cfg.LookupNetwork(self.op.network_name)
self.share_locks[locking.LEVEL_NODEGROUP] = 1
self.needed_locks = {
locking.LEVEL_NETWORK: [self.network_uuid],
locking.LEVEL_NODEGROUP: locking.ALL_SET,
}
def CheckPrereq(self):
"""Check prerequisites.
This checks that the given network name exists as a network, that is
empty (i.e., contains no nodes), and that is not the last group of the
cluster.
"""
# Verify that the network is not conncted.
node_groups = [group.name
for group in self.cfg.GetAllNodeGroupsInfo().values()
if self.network_uuid in group.networks]
if node_groups:
self.LogWarning("Network '%s' is connected to the following"
" node groups: %s" %
(self.op.network_name,
utils.CommaJoin(utils.NiceSort(node_groups))))
raise errors.OpPrereqError("Network still connected", errors.ECODE_STATE)
def BuildHooksEnv(self):
"""Build hooks env.
"""
return {
"NETWORK_NAME": self.op.network_name,
}
def BuildHooksNodes(self):
"""Build hooks nodes.
"""
mn = self.cfg.GetMasterNode()
return ([mn], [mn])
def Exec(self, feedback_fn):
"""Remove the network.
"""
try:
self.cfg.RemoveNetwork(self.network_uuid)
except errors.ConfigurationError:
raise errors.OpExecError("Network '%s' with UUID %s disappeared" %
(self.op.network_name, self.network_uuid))
class LUNetworkSetParams(LogicalUnit):
"""Modifies the parameters of a network.
"""
HPATH = "network-modify"
HTYPE = constants.HTYPE_NETWORK
REQ_BGL = False
def CheckArguments(self):
if (self.op.gateway and
(self.op.add_reserved_ips or self.op.remove_reserved_ips)):
raise errors.OpPrereqError("Cannot modify gateway and reserved ips"
" at once", errors.ECODE_INVAL)
def ExpandNames(self):
self.network_uuid = self.cfg.LookupNetwork(self.op.network_name)
self.needed_locks = {
locking.LEVEL_NETWORK: [self.network_uuid],
}
def CheckPrereq(self):
"""Check prerequisites.
"""
self.network = self.cfg.GetNetwork(self.network_uuid)
self.gateway = self.network.gateway
self.mac_prefix = self.network.mac_prefix
self.network6 = self.network.network6
self.gateway6 = self.network.gateway6
self.tags = self.network.tags
self.pool = network.AddressPool(self.network)
if self.op.gateway:
if self.op.gateway == constants.VALUE_NONE:
self.gateway = None
else:
self.gateway = self.op.gateway
if self.pool.IsReserved(self.gateway):
raise errors.OpPrereqError("Gateway IP address '%s' is already"
" reserved" % self.gateway,
errors.ECODE_STATE)
if self.op.mac_prefix:
if self.op.mac_prefix == constants.VALUE_NONE:
self.mac_prefix = None
else:
self.mac_prefix = \
utils.NormalizeAndValidateThreeOctetMacPrefix(self.op.mac_prefix)
if self.op.gateway6:
if self.op.gateway6 == constants.VALUE_NONE:
self.gateway6 = None
else:
self.gateway6 = self.op.gateway6
if self.op.network6:
if self.op.network6 == constants.VALUE_NONE:
self.network6 = None
else:
self.network6 = self.op.network6
def BuildHooksEnv(self):
"""Build hooks env.
"""
args = {
"name": self.op.network_name,
"subnet": self.network.network,
"gateway": self.gateway,
"network6": self.network6,
"gateway6": self.gateway6,
"mac_prefix": self.mac_prefix,
"tags": self.tags,
}
return _BuildNetworkHookEnv(**args) # pylint: disable=W0142
def BuildHooksNodes(self):
"""Build hooks nodes.
"""
mn = self.cfg.GetMasterNode()
return ([mn], [mn])
def Exec(self, feedback_fn):
"""Modifies the network.
"""
#TODO: reserve/release via temporary reservation manager
# extend cfg.ReserveIp/ReleaseIp with the external flag
if self.op.gateway:
if self.gateway == self.network.gateway:
self.LogWarning("Gateway is already %s", self.gateway)
else:
if self.gateway:
self.pool.Reserve(self.gateway, external=True)
if self.network.gateway:
self.pool.Release(self.network.gateway, external=True)
self.network.gateway = self.gateway
if self.op.add_reserved_ips:
for ip in self.op.add_reserved_ips:
try:
self.pool.Reserve(ip, external=True)
except errors.AddressPoolError, err:
self.LogWarning("Cannot reserve IP address %s: %s", ip, err)
if self.op.remove_reserved_ips:
for ip in self.op.remove_reserved_ips:
if ip == self.network.gateway:
self.LogWarning("Cannot unreserve Gateway's IP")
continue
try:
self.pool.Release(ip, external=True)
except errors.AddressPoolError, err:
self.LogWarning("Cannot release IP address %s: %s", ip, err)
if self.op.mac_prefix:
self.network.mac_prefix = self.mac_prefix
if self.op.network6:
self.network.network6 = self.network6
if self.op.gateway6:
self.network.gateway6 = self.gateway6
self.pool.Validate()
self.cfg.Update(self.network, feedback_fn)
class NetworkQuery(QueryBase):
FIELDS = query.NETWORK_FIELDS
def ExpandNames(self, lu):
lu.needed_locks = {}
lu.share_locks = ShareAll()
self.do_locking = self.use_locking
all_networks = lu.cfg.GetAllNetworksInfo()
name_to_uuid = dict((n.name, n.uuid) for n in all_networks.values())
if self.names:
missing = []
self.wanted = []
for name in self.names:
if name in name_to_uuid:
self.wanted.append(name_to_uuid[name])
else:
missing.append(name)
if missing:
raise errors.OpPrereqError("Some networks do not exist: %s" % missing,
errors.ECODE_NOENT)
else:
self.wanted = locking.ALL_SET
if self.do_locking:
lu.needed_locks[locking.LEVEL_NETWORK] = self.wanted
if query.NETQ_INST in self.requested_data:
lu.needed_locks[locking.LEVEL_INSTANCE] = locking.ALL_SET
if query.NETQ_GROUP in self.requested_data:
lu.needed_locks[locking.LEVEL_NODEGROUP] = locking.ALL_SET
def DeclareLocks(self, lu, level):
pass
def _GetQueryData(self, lu):
"""Computes the list of networks and their attributes.
"""
all_networks = lu.cfg.GetAllNetworksInfo()
network_uuids = self._GetNames(lu, all_networks.keys(),
locking.LEVEL_NETWORK)
do_instances = query.NETQ_INST in self.requested_data
do_groups = query.NETQ_GROUP in self.requested_data
network_to_instances = None
network_to_groups = None
# For NETQ_GROUP, we need to map network->[groups]
if do_groups:
all_groups = lu.cfg.GetAllNodeGroupsInfo()
network_to_groups = dict((uuid, []) for uuid in network_uuids)
for _, group in all_groups.iteritems():
for net_uuid in network_uuids:
netparams = group.networks.get(net_uuid, None)
if netparams:
info = (group.name, netparams[constants.NIC_MODE],
netparams[constants.NIC_LINK])
network_to_groups[net_uuid].append(info)
if do_instances:
all_instances = lu.cfg.GetAllInstancesInfo()
network_to_instances = dict((uuid, []) for uuid in network_uuids)
for instance in all_instances.values():
for nic in instance.nics:
if nic.network in network_uuids:
if instance.name not in network_to_instances[nic.network]:
network_to_instances[nic.network].append(instance.name)
if query.NETQ_STATS in self.requested_data:
stats = \
dict((uuid,
self._GetStats(network.AddressPool(all_networks[uuid])))
for uuid in network_uuids)
else:
stats = None
return query.NetworkQueryData([all_networks[uuid]
for uuid in network_uuids],
network_to_groups,
network_to_instances,
stats)
@staticmethod
def _GetStats(pool):
"""Returns statistics for a network address pool.
"""
return {
"free_count": pool.GetFreeCount(),
"reserved_count": pool.GetReservedCount(),
"map": pool.GetMap(),
"external_reservations":
utils.CommaJoin(pool.GetExternalReservations()),
}
class LUNetworkQuery(NoHooksLU):
"""Logical unit for querying networks.
"""
REQ_BGL = False
def CheckArguments(self):
self.nq = NetworkQuery(qlang.MakeSimpleFilter("name", self.op.names),
self.op.output_fields, self.op.use_locking)
def ExpandNames(self):
self.nq.ExpandNames(self)
def Exec(self, feedback_fn):
return self.nq.OldStyleQuery(self)
def _FmtNetworkConflict(details):
"""Utility for L{_NetworkConflictCheck}.
"""
return utils.CommaJoin("nic%s/%s" % (idx, ipaddr)
for (idx, ipaddr) in details)
def _NetworkConflictCheck(lu, check_fn, action, instances):
"""Checks for network interface conflicts with a network.
@type lu: L{LogicalUnit}
@type check_fn: callable receiving one parameter (L{objects.NIC}) and
returning boolean
@param check_fn: Function checking for conflict
@type action: string
@param action: Part of error message (see code)
@param instances: the instances to check
@type instances: list of instance objects
@raise errors.OpPrereqError: If conflicting IP addresses are found.
"""
conflicts = []
for instance in instances:
instconflicts = [(idx, nic.ip)
for (idx, nic) in enumerate(instance.nics)
if check_fn(nic)]
if instconflicts:
conflicts.append((instance.name, instconflicts))
if conflicts:
lu.LogWarning("IP addresses from network '%s', which is about to %s"
" node group '%s', are in use: %s" %
(lu.network_name, action, lu.group.name,
utils.CommaJoin(("%s: %s" %
(name, _FmtNetworkConflict(details)))
for (name, details) in conflicts)))
raise errors.OpPrereqError("Conflicting IP addresses found; "
" remove/modify the corresponding network"
" interfaces", errors.ECODE_STATE)
class LUNetworkConnect(LogicalUnit):
"""Connect a network to a nodegroup
"""
HPATH = "network-connect"
HTYPE = constants.HTYPE_NETWORK
REQ_BGL = False
def ExpandNames(self):
self.network_name = self.op.network_name
self.group_name = self.op.group_name
self.network_mode = self.op.network_mode
self.network_link = self.op.network_link
self.network_vlan = self.op.network_vlan
self.network_uuid = self.cfg.LookupNetwork(self.network_name)
self.group_uuid = self.cfg.LookupNodeGroup(self.group_name)
self.needed_locks = {
locking.LEVEL_NODEGROUP: [self.group_uuid],
}
if self.op.conflicts_check:
self.needed_locks[locking.LEVEL_INSTANCE] = locking.ALL_SET
self.needed_locks[locking.LEVEL_NETWORK] = [self.network_uuid]
self.share_locks[locking.LEVEL_NETWORK] = 1
self.share_locks[locking.LEVEL_INSTANCE] = 1
def DeclareLocks(self, level):
pass
def BuildHooksEnv(self):
ret = {
"GROUP_NAME": self.group_name,
"GROUP_NETWORK_MODE": self.network_mode,
"GROUP_NETWORK_LINK": self.network_link,
"GROUP_NETWORK_VLAN": self.network_vlan,
}
return ret
def BuildHooksNodes(self):
node_uuids = self.cfg.GetNodeGroup(self.group_uuid).members
return (node_uuids, node_uuids)
def CheckPrereq(self):
owned_groups = frozenset(self.owned_locks(locking.LEVEL_NODEGROUP))
assert self.group_uuid in owned_groups
# Check if locked instances are still correct
owned_instance_names = frozenset(self.owned_locks(locking.LEVEL_INSTANCE))
self.netparams = {
constants.NIC_MODE: self.network_mode,
constants.NIC_LINK: self.network_link,
constants.NIC_VLAN: self.network_vlan,
}
objects.NIC.CheckParameterSyntax(self.netparams)
self.group = self.cfg.GetNodeGroup(self.group_uuid)
#if self.network_mode == constants.NIC_MODE_BRIDGED:
# _CheckNodeGroupBridgesExist(self, self.network_link, self.group_uuid)
self.connected = False
if self.network_uuid in self.group.networks:
self.LogWarning("Network '%s' is already mapped to group '%s'" %
(self.network_name, self.group.name))
self.connected = True
# check only if not already connected
elif self.op.conflicts_check:
pool = network.AddressPool(self.cfg.GetNetwork(self.network_uuid))
_NetworkConflictCheck(
self, lambda nic: pool.Contains(nic.ip), "connect to",
[instance_info for (_, instance_info) in
self.cfg.GetMultiInstanceInfoByName(owned_instance_names)])
def Exec(self, feedback_fn):
# Connect the network and update the group only if not already connected
if not self.connected:
self.group.networks[self.network_uuid] = self.netparams
self.cfg.Update(self.group, feedback_fn)
class LUNetworkDisconnect(LogicalUnit):
"""Disconnect a network to a nodegroup
"""
HPATH = "network-disconnect"
HTYPE = constants.HTYPE_NETWORK
REQ_BGL = False
def ExpandNames(self):
self.network_name = self.op.network_name
self.group_name = self.op.group_name
self.network_uuid = self.cfg.LookupNetwork(self.network_name)
self.group_uuid = self.cfg.LookupNodeGroup(self.group_name)
self.needed_locks = {
locking.LEVEL_INSTANCE: locking.ALL_SET,
locking.LEVEL_NODEGROUP: [self.group_uuid],
}
self.share_locks[locking.LEVEL_INSTANCE] = 1
def DeclareLocks(self, level):
pass
def BuildHooksEnv(self):
ret = {
"GROUP_NAME": self.group_name,
}
if self.connected:
ret.update({
"GROUP_NETWORK_MODE": self.netparams[constants.NIC_MODE],
"GROUP_NETWORK_LINK": self.netparams[constants.NIC_LINK],
"GROUP_NETWORK_VLAN": self.netparams[constants.NIC_VLAN],
})
return ret
def BuildHooksNodes(self):
nodes = self.cfg.GetNodeGroup(self.group_uuid).members
return (nodes, nodes)
def CheckPrereq(self):
owned_groups = frozenset(self.owned_locks(locking.LEVEL_NODEGROUP))
assert self.group_uuid in owned_groups
# Check if locked instances are still correct
owned_instances = frozenset(self.owned_locks(locking.LEVEL_INSTANCE))
self.group = self.cfg.GetNodeGroup(self.group_uuid)
self.connected = True
if self.network_uuid not in self.group.networks:
self.LogWarning("Network '%s' is not mapped to group '%s'",
self.network_name, self.group.name)
self.connected = False
# We need this check only if network is not already connected
else:
_NetworkConflictCheck(
self, lambda nic: nic.network == self.network_uuid, "disconnect from",
[instance_info for (_, instance_info) in
self.cfg.GetMultiInstanceInfoByName(owned_instances)])
self.netparams = self.group.networks.get(self.network_uuid)
def Exec(self, feedback_fn):
# Disconnect the network and update the group only if network is connected
if self.connected:
del self.group.networks[self.network_uuid]
self.cfg.Update(self.group, feedback_fn)
| apyrgio/snf-ganeti | lib/cmdlib/network.py | Python | bsd-2-clause | 23,099 |
#!/usr/bin/env python
# Copyright (c) 2012 Cloudera, Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Superclass of all HS2 tests containing commonly used functions.
from getpass import getuser
from TCLIService import TCLIService
from ImpalaService import ImpalaHiveServer2Service
from thrift.transport.TSocket import TSocket
from thrift.transport.TTransport import TBufferedTransport
from thrift.protocol import TBinaryProtocol
from tests.common.impala_test_suite import ImpalaTestSuite, IMPALAD_HS2_HOST_PORT
def needs_session(protocol_version=
TCLIService.TProtocolVersion.HIVE_CLI_SERVICE_PROTOCOL_V6):
def session_decorator(fn):
"""Decorator that establishes a session and sets self.session_handle. When the test is
finished, the session is closed.
"""
def add_session(self):
open_session_req = TCLIService.TOpenSessionReq()
open_session_req.username = getuser()
open_session_req.configuration = dict()
open_session_req.client_protocol = protocol_version
resp = self.hs2_client.OpenSession(open_session_req)
HS2TestSuite.check_response(resp)
self.session_handle = resp.sessionHandle
assert protocol_version <= resp.serverProtocolVersion
try:
fn(self)
finally:
close_session_req = TCLIService.TCloseSessionReq()
close_session_req.sessionHandle = resp.sessionHandle
HS2TestSuite.check_response(self.hs2_client.CloseSession(close_session_req))
self.session_handle = None
return add_session
return session_decorator
def operation_id_to_query_id(operation_id):
lo, hi = operation_id.guid[:8], operation_id.guid[8:]
lo = ''.join(['%0.2X' % ord(c) for c in lo[::-1]])
hi = ''.join(['%0.2X' % ord(c) for c in hi[::-1]])
return "%s:%s" % (lo, hi)
class HS2TestSuite(ImpalaTestSuite):
TEST_DB = 'hs2_db'
HS2_V6_COLUMN_TYPES = ['boolVal', 'stringVal', 'byteVal', 'i16Val', 'i32Val', 'i64Val',
'doubleVal', 'binaryVal']
def setup(self):
self.cleanup_db(self.TEST_DB)
host, port = IMPALAD_HS2_HOST_PORT.split(":")
self.socket = TSocket(host, port)
self.transport = TBufferedTransport(self.socket)
self.transport.open()
self.protocol = TBinaryProtocol.TBinaryProtocol(self.transport)
self.hs2_client = ImpalaHiveServer2Service.Client(self.protocol)
def teardown(self):
self.cleanup_db(self.TEST_DB)
if self.socket:
self.socket.close()
@staticmethod
def check_response(response,
expected_status_code = TCLIService.TStatusCode.SUCCESS_STATUS,
expected_error_prefix = None):
assert response.status.statusCode == expected_status_code
if expected_status_code != TCLIService.TStatusCode.SUCCESS_STATUS\
and expected_error_prefix is not None:
assert response.status.errorMessage.startswith(expected_error_prefix)
def close(self, op_handle):
close_op_req = TCLIService.TCloseOperationReq()
close_op_req.operationHandle = op_handle
close_op_resp = self.hs2_client.CloseOperation(close_op_req)
assert close_op_resp.status.statusCode == TCLIService.TStatusCode.SUCCESS_STATUS
def get_num_rows(self, result_set):
# rows will always be set, so the only way to tell if we should use it is to see if
# any columns are set
if result_set.columns is None or len(result_set.columns) == 0:
return len(result_set.rows)
assert result_set.columns is not None
for col_type in HS2TestSuite.HS2_V6_COLUMN_TYPES:
typed_col = getattr(result_set.columns[0], col_type)
if typed_col != None:
return len(typed_col.values)
assert False
def fetch(self, handle, orientation, size, expected_num_rows = None):
"""Fetches at most size number of rows from the query identified by the given
operation handle. Uses the given fetch orientation. Asserts that the fetch returns
a success status, and that the number of rows returned is equal to size, or
equal to the given expected_num_rows (if one was given)."""
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = handle
fetch_results_req.orientation = orientation
fetch_results_req.maxRows = size
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
num_rows = size
if expected_num_rows is not None:
num_rows = expected_num_rows
assert self.get_num_rows(fetch_results_resp.results) == num_rows
return fetch_results_resp
def fetch_until(self, handle, orientation, size):
"""Tries to fetch exactly 'size' rows from the given query handle, with the given
fetch orientation. If fewer rows than 'size' are returned by the first fetch, repeated
fetches are issued until either 0 rows are returned, or the number of rows fetched is
equal to 'size'"""
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = handle
fetch_results_req.orientation = orientation
fetch_results_req.maxRows = size
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
num_rows = size
num_rows_fetched = self.get_num_rows(fetch_results_resp.results)
while num_rows_fetched < size:
fetch_results_req.maxRows = size - num_rows_fetched
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp)
last_fetch_size = self.get_num_rows(fetch_results_resp.results)
assert last_fetch_size > 0
num_rows_fetched += last_fetch_size
assert num_rows_fetched == size
def fetch_fail(self, handle, orientation, expected_error_prefix):
"""Attempts to fetch rows from the query identified by the given operation handle.
Asserts that the fetch returns an error with an error message matching the given
expected_error_prefix."""
fetch_results_req = TCLIService.TFetchResultsReq()
fetch_results_req.operationHandle = handle
fetch_results_req.orientation = orientation
fetch_results_req.maxRows = 100
fetch_results_resp = self.hs2_client.FetchResults(fetch_results_req)
HS2TestSuite.check_response(fetch_results_resp, TCLIService.TStatusCode.ERROR_STATUS,
expected_error_prefix)
return fetch_results_resp
def result_metadata(self, handle):
""" Gets the schema for the query identified by the handle """
req = TCLIService.TGetResultSetMetadataReq()
req.operationHandle = handle
resp = self.hs2_client.GetResultSetMetadata(req)
HS2TestSuite.check_response(resp)
return resp
| grundprinzip/Impala | tests/hs2/hs2_test_suite.py | Python | apache-2.0 | 7,230 |
#-----------------------------------------------------------------------------
# Copyright (c) 2012 - 2019, Anaconda, Inc., and Bokeh Contributors.
# All rights reserved.
#
# The full license is in the file LICENSE.txt, distributed with this software.
#-----------------------------------------------------------------------------
''' Provide events that represent various changes to Bokeh Documents.
These events are used internally to signal changes to Documents. For
information about user-facing (e.g. UI or tool) events, see the reference
for :ref:`bokeh.events`.
'''
#-----------------------------------------------------------------------------
# Boilerplate
#-----------------------------------------------------------------------------
from __future__ import absolute_import, division, print_function, unicode_literals
import logging
log = logging.getLogger(__name__)
#-----------------------------------------------------------------------------
# Imports
#-----------------------------------------------------------------------------
# Standard library imports
# External imports
# Bokeh imports
from ..util.dependencies import import_optional
#-----------------------------------------------------------------------------
# Globals and constants
#-----------------------------------------------------------------------------
pd = import_optional('pandas')
__all__ = (
'ColumnDataChangedEvent',
'ColumnsStreamedEvent',
'ColumnsPatchedEvent',
'DocumentChangedEvent',
'DocumentPatchedEvent',
'ModelChangedEvent',
'RootAddedEvent',
'RootRemovedEvent',
'SessionCallbackAdded',
'SessionCallbackRemoved',
'TitleChangedEvent',
'TitleChangedEvent',
)
#-----------------------------------------------------------------------------
# General API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Dev API
#-----------------------------------------------------------------------------
class DocumentChangedEvent(object):
''' Base class for all internal events representing a change to a
Bokeh Document.
'''
def __init__(self, document, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
In the context of a Bokeh server application, incoming updates
to properties will be annotated with the session that is
doing the updating. This value is propagated through any
subsequent change notifications that the update triggers.
The session can compare the event setter to itself, and
suppress any updates that originate from itself.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
self.document = document
self.setter = setter
self.callback_invoker = callback_invoker
def combine(self, event):
'''
'''
return False
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_changed`` if it exists.
'''
if hasattr(receiver, '_document_changed'):
receiver._document_changed(self)
class DocumentPatchedEvent(DocumentChangedEvent):
''' A Base class for events that represent updating Bokeh Models and
their properties.
'''
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_patched`` if it exists.
'''
super(DocumentPatchedEvent, self).dispatch(receiver)
if hasattr(receiver, '_document_patched'):
receiver._document_patched(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
*Sub-classes must implement this method.*
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
raise NotImplementedError()
class ModelChangedEvent(DocumentPatchedEvent):
''' A concrete event representing updating an attribute and value of a
specific Bokeh Model.
This is the "standard" way of updating most Bokeh model attributes. For
special casing situations that can optimized (e.g. streaming, etc.), a
``hint`` may be supplied that overrides normal mechanisms.
'''
def __init__(self, document, model, attr, old, new, serializable_new, hint=None, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
A Model to update
attr (str) :
The name of the attribute to update on the model.
old (object) :
The old value of the attribute
new (object) :
The new value of the attribute
serializable_new (object) :
A serialized (JSON) version of the new value. It may be
``None`` if a hint is supplied.
hint (DocumentPatchedEvent, optional) :
When appropriate, a secondary event may be supplied that
modifies the normal update process. For example, in order
to stream or patch data more efficiently than the standard
update mechanism.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
if setter is None and isinstance(hint, (ColumnsStreamedEvent, ColumnsPatchedEvent)):
setter = hint.setter
super(ModelChangedEvent, self).__init__(document, setter, callback_invoker)
self.model = model
self.attr = attr
self.old = old
self.new = new
self.serializable_new = serializable_new
self.hint = hint
def combine(self, event):
'''
'''
if not isinstance(event, ModelChangedEvent): return False
# If these are not true something weird is going on, maybe updates from
# Python bokeh.client, don't try to combine
if self.setter != event.setter: return False
if self.document != event.document: return False
if self.hint:
return self.hint.combine(event.hint)
if (self.model == event.model) and (self.attr == event.attr):
self.new = event.new
self.serializable_new = event.serializable_new
self.callback_invoker = event.callback_invoker
return True
return False
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._document_model_dhanged`` if it
exists.
'''
super(ModelChangedEvent, self).dispatch(receiver)
if hasattr(receiver, '_document_model_changed'):
receiver._document_model_changed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
from ..model import collect_models
if self.hint is not None:
return self.hint.generate(references, buffers)
value = self.serializable_new
# the new value is an object that may have
# not-yet-in-the-remote-doc references, and may also
# itself not be in the remote doc yet. the remote may
# already have some of the references, but
# unfortunately we don't have an easy way to know
# unless we were to check BEFORE the attr gets changed
# (we need the old _all_models before setting the
# property). So we have to send all the references the
# remote could need, even though it could be inefficient.
# If it turns out we need to fix this we could probably
# do it by adding some complexity.
value_refs = set(collect_models(value))
# we know we don't want a whole new copy of the obj we're patching
# unless it's also the new value
if self.model != value:
value_refs.discard(self.model)
references.update(value_refs)
return { 'kind' : 'ModelChanged',
'model' : self.model.ref,
'attr' : self.attr,
'new' : value }
class ColumnDataChangedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently replacing *all*
existing data for a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, cols=None, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
cols (list[str]) :
optional explicit list of column names to update. If None, all
columns will be updated (default: None)
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(ColumnDataChangedEvent, self).__init__(document, setter, callback_invoker)
self.column_source = column_source
self.cols = cols
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._column_data_changed`` if it exists.
'''
super(ColumnDataChangedEvent, self).dispatch(receiver)
if hasattr(receiver, '_column_data_changed'):
receiver._column_data_changed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnDataChanged'
'column_source' : <reference to a CDS>
'new' : <new data to steam to column_source>
'cols' : <specific columns to update>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
from ..util.serialization import transform_column_source_data
data_dict = transform_column_source_data(self.column_source.data, buffers=buffers, cols=self.cols)
return { 'kind' : 'ColumnDataChanged',
'column_source' : self.column_source.ref,
'new' : data_dict,
'cols' : self.cols}
class ColumnsStreamedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently streaming new data
to a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, data, rollover, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
The data source to stream new data to.
data (dict or DataFrame) :
New data to stream.
If a DataFrame, will be stored as ``{c: df[c] for c in df.columns}``
rollover (int) :
A rollover limit. If the data source columns exceed this
limit, earlier values will be discarded to maintain the
column length under the limit.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(ColumnsStreamedEvent, self).__init__(document, setter, callback_invoker)
self.column_source = column_source
if pd and isinstance(data, pd.DataFrame):
data = {c: data[c] for c in data.columns}
self.data = data
self.rollover = rollover
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._columns_streamed`` if it exists.
'''
super(ColumnsStreamedEvent, self).dispatch(receiver)
if hasattr(receiver, '_columns_streamed'):
receiver._columns_streamed(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnsStreamed'
'column_source' : <reference to a CDS>
'data' : <new data to steam to column_source>
'rollover' : <rollover limit>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'ColumnsStreamed',
'column_source' : self.column_source.ref,
'data' : self.data,
'rollover' : self.rollover }
class ColumnsPatchedEvent(DocumentPatchedEvent):
''' A concrete event representing efficiently applying data patches
to a :class:`~bokeh.models.sources.ColumnDataSource`
'''
def __init__(self, document, column_source, patches, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
column_source (ColumnDataSource) :
The data source to apply patches to.
patches (list) :
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(ColumnsPatchedEvent, self).__init__(document, setter, callback_invoker)
self.column_source = column_source
self.patches = patches
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._columns_patched`` if it exists.
'''
super(ColumnsPatchedEvent, self).dispatch(receiver)
if hasattr(receiver, '_columns_patched'):
receiver._columns_patched(self)
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'ColumnsPatched'
'column_source' : <reference to a CDS>
'patches' : <patches to apply to column_source>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'ColumnsPatched',
'column_source' : self.column_source.ref,
'patches' : self.patches }
class TitleChangedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to the title of a Bokeh
Document.
'''
def __init__(self, document, title, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
title (str) :
The new title to set on the Document
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(TitleChangedEvent, self).__init__(document, setter, callback_invoker)
self.title = title
def combine(self, event):
'''
'''
if not isinstance(event, TitleChangedEvent): return False
# If these are not true something weird is going on, maybe updates from
# Python bokeh.client, don't try to combine
if self.setter != event.setter: return False
if self.document != event.document: return False
self.title = event.title
self.callback_invoker = event.callback_invoker
return True
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'TitleChanged'
'title' : <new title to set>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'TitleChanged',
'title' : self.title }
class RootAddedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to add a new Model to a
Document's collection of "root" models.
'''
def __init__(self, document, model, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
The Bokeh Model to add as a Document root.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(RootAddedEvent, self).__init__(document, setter, callback_invoker)
self.model = model
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'RootAdded'
'title' : <reference to a Model>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
references.update(self.model.references())
return { 'kind' : 'RootAdded',
'model' : self.model.ref }
class RootRemovedEvent(DocumentPatchedEvent):
''' A concrete event representing a change to remove an existing Model
from a Document's collection of "root" models.
'''
def __init__(self, document, model, setter=None, callback_invoker=None):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
model (Model) :
The Bokeh Model to remove as a Document root.
setter (ClientSession or ServerSession or None, optional) :
This is used to prevent "boomerang" updates to Bokeh apps.
(default: None)
See :class:`~bokeh.document.events.DocumentChangedEvent`
for more details.
callback_invoker (callable, optional) :
A callable that will invoke any Model callbacks that should
be executed in response to the change that triggered this
event. (default: None)
'''
super(RootRemovedEvent, self).__init__(document, setter, callback_invoker)
self.model = model
def generate(self, references, buffers):
''' Create a JSON representation of this event suitable for sending
to clients.
.. code-block:: python
{
'kind' : 'RootRemoved'
'title' : <reference to a Model>
}
Args:
references (dict[str, Model]) :
If the event requires references to certain models in order to
function, they may be collected here.
**This is an "out" parameter**. The values it contains will be
modified in-place.
buffers (set) :
If the event needs to supply any additional Bokeh protocol
buffers, they may be added to this set.
**This is an "out" parameter**. The values it contains will be
modified in-place.
'''
return { 'kind' : 'RootRemoved',
'model' : self.model.ref }
class SessionCallbackAdded(DocumentChangedEvent):
''' A concrete event representing a change to add a new callback (e.g.
periodic, timeout, or "next tick") to a Document.
'''
def __init__(self, document, callback):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
callback (SessionCallback) :
The callback to add
'''
super(SessionCallbackAdded, self).__init__(document)
self.callback = callback
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._session_callback_added`` if
it exists.
'''
super(SessionCallbackAdded, self).dispatch(receiver)
if hasattr(receiver, '_session_callback_added'):
receiver._session_callback_added(self)
class SessionCallbackRemoved(DocumentChangedEvent):
''' A concrete event representing a change to remove an existing callback
(e.g. periodic, timeout, or "next tick") from a Document.
'''
def __init__(self, document, callback):
'''
Args:
document (Document) :
A Bokeh document that is to be updated.
callback (SessionCallback) :
The callback to remove
'''
super(SessionCallbackRemoved, self).__init__(document)
self.callback = callback
def dispatch(self, receiver):
''' Dispatch handling of this event to a receiver.
This method will invoke ``receiver._session_callback_removed`` if
it exists.
'''
super(SessionCallbackRemoved, self).dispatch(receiver)
if hasattr(receiver, '_session_callback_removed'):
receiver._session_callback_removed(self)
#-----------------------------------------------------------------------------
# Private API
#-----------------------------------------------------------------------------
#-----------------------------------------------------------------------------
# Code
#-----------------------------------------------------------------------------
| timsnyder/bokeh | bokeh/document/events.py | Python | bsd-3-clause | 28,451 |
class BankAccount(object):
def __init__(self, initial_balance=0):
self.balance = initial_balance
def deposit(self, amount):
self.balance += amount
def withdraw(self, amount):
self.balance -= amount
def overdrawn(self):
return self.balance < 0
my_account = BankAccount(15)
my_account.withdraw(5)
print(my_account.balance) | 2014c2g5/2014cadp | wsgi/local_data/brython_programs/class2.py | Python | gpl-3.0 | 368 |
# -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
class Migration(migrations.Migration):
dependencies = [
('hhlregistrations', '0004_auto_20150411_1935'),
]
operations = [
migrations.AddField(
model_name='event',
name='payment_due',
field=models.DateTimeField(null=True, blank=True),
),
migrations.AddField(
model_name='event',
name='require_registration',
field=models.BooleanField(default=False),
),
]
| hacklab-fi/hhlevents | hhlevents/apps/hhlregistrations/migrations/0005_auto_20150412_1806.py | Python | bsd-3-clause | 592 |
#
# Copyright (c) 2008-2015 Citrix Systems, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License")
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_resource
from nssrc.com.citrix.netscaler.nitro.resource.base.base_resource import base_response
from nssrc.com.citrix.netscaler.nitro.service.options import options
from nssrc.com.citrix.netscaler.nitro.exception.nitro_exception import nitro_exception
from nssrc.com.citrix.netscaler.nitro.util.nitro_util import nitro_util
class gslbservice(base_resource) :
""" Configuration for GSLB service resource. """
def __init__(self) :
self._servicename = ""
self._cnameentry = ""
self._ip = ""
self._servername = ""
self._servicetype = ""
self._port = 0
self._publicip = ""
self._publicport = 0
self._maxclient = 0
self._healthmonitor = ""
self._sitename = ""
self._state = ""
self._cip = ""
self._cipheader = ""
self._sitepersistence = ""
self._cookietimeout = 0
self._siteprefix = ""
self._clttimeout = 0
self._svrtimeout = 0
self._maxbandwidth = 0
self._downstateflush = ""
self._maxaaausers = 0
self._monthreshold = 0
self._hashid = 0
self._comment = ""
self._appflowlog = ""
self._ipaddress = ""
self._viewname = ""
self._viewip = ""
self._weight = 0
self._monitor_name_svc = ""
self._newname = ""
self._gslb = ""
self._svrstate = ""
self._svreffgslbstate = ""
self._gslbthreshold = 0
self._gslbsvcstats = 0
self._monstate = ""
self._preferredlocation = ""
self._monitor_state = ""
self._threshold = ""
self._clmonowner = 0
self._clmonview = 0
self.___count = 0
@property
def servicename(self) :
"""Name for the GSLB service. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Can be changed after the GSLB service is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my gslbsvc" or 'my gslbsvc').<br/>Minimum length = 1.
"""
try :
return self._servicename
except Exception as e:
raise e
@servicename.setter
def servicename(self, servicename) :
"""Name for the GSLB service. Must begin with an ASCII alphanumeric or underscore (_) character, and must contain only ASCII alphanumeric, underscore, hash (#), period (.), space, colon (:), at (@), equals (=), and hyphen (-) characters. Can be changed after the GSLB service is created.
CLI Users: If the name includes one or more spaces, enclose the name in double or single quotation marks (for example, "my gslbsvc" or 'my gslbsvc').<br/>Minimum length = 1
"""
try :
self._servicename = servicename
except Exception as e:
raise e
@property
def cnameentry(self) :
"""Canonical name of the GSLB service. Used in CNAME-based GSLB.<br/>Minimum length = 1.
"""
try :
return self._cnameentry
except Exception as e:
raise e
@cnameentry.setter
def cnameentry(self, cnameentry) :
"""Canonical name of the GSLB service. Used in CNAME-based GSLB.<br/>Minimum length = 1
"""
try :
self._cnameentry = cnameentry
except Exception as e:
raise e
@property
def ip(self) :
"""IP address for the GSLB service. Should represent a load balancing, content switching, or VPN virtual server on the NetScaler appliance, or the IP address of another load balancing device.<br/>Minimum length = 1.
"""
try :
return self._ip
except Exception as e:
raise e
@ip.setter
def ip(self, ip) :
"""IP address for the GSLB service. Should represent a load balancing, content switching, or VPN virtual server on the NetScaler appliance, or the IP address of another load balancing device.<br/>Minimum length = 1
"""
try :
self._ip = ip
except Exception as e:
raise e
@property
def servername(self) :
"""Name of the server hosting the GSLB service.<br/>Minimum length = 1.
"""
try :
return self._servername
except Exception as e:
raise e
@servername.setter
def servername(self, servername) :
"""Name of the server hosting the GSLB service.<br/>Minimum length = 1
"""
try :
self._servername = servername
except Exception as e:
raise e
@property
def servicetype(self) :
"""Type of service to create.<br/>Default value: NSSVC_SERVICE_UNKNOWN<br/>Possible values = HTTP, FTP, TCP, UDP, SSL, SSL_BRIDGE, SSL_TCP, NNTP, ANY, SIP_UDP, RADIUS, RDP, RTSP, MYSQL, MSSQL, ORACLE.
"""
try :
return self._servicetype
except Exception as e:
raise e
@servicetype.setter
def servicetype(self, servicetype) :
"""Type of service to create.<br/>Default value: NSSVC_SERVICE_UNKNOWN<br/>Possible values = HTTP, FTP, TCP, UDP, SSL, SSL_BRIDGE, SSL_TCP, NNTP, ANY, SIP_UDP, RADIUS, RDP, RTSP, MYSQL, MSSQL, ORACLE
"""
try :
self._servicetype = servicetype
except Exception as e:
raise e
@property
def port(self) :
"""Port on which the load balancing entity represented by this GSLB service listens.<br/>Minimum length = 1<br/>Range 1 - 65535.
"""
try :
return self._port
except Exception as e:
raise e
@port.setter
def port(self, port) :
"""Port on which the load balancing entity represented by this GSLB service listens.<br/>Minimum length = 1<br/>Range 1 - 65535
"""
try :
self._port = port
except Exception as e:
raise e
@property
def publicip(self) :
"""The public IP address that a NAT device translates to the GSLB service's private IP address. Optional.
"""
try :
return self._publicip
except Exception as e:
raise e
@publicip.setter
def publicip(self, publicip) :
"""The public IP address that a NAT device translates to the GSLB service's private IP address. Optional.
"""
try :
self._publicip = publicip
except Exception as e:
raise e
@property
def publicport(self) :
"""The public port associated with the GSLB service's public IP address. The port is mapped to the service's private port number. Applicable to the local GSLB service. Optional.
"""
try :
return self._publicport
except Exception as e:
raise e
@publicport.setter
def publicport(self, publicport) :
"""The public port associated with the GSLB service's public IP address. The port is mapped to the service's private port number. Applicable to the local GSLB service. Optional.
"""
try :
self._publicport = publicport
except Exception as e:
raise e
@property
def maxclient(self) :
"""The maximum number of open connections that the service can support at any given time. A GSLB service whose connection count reaches the maximum is not considered when a GSLB decision is made, until the connection count drops below the maximum.<br/>Maximum length = 4294967294.
"""
try :
return self._maxclient
except Exception as e:
raise e
@maxclient.setter
def maxclient(self, maxclient) :
"""The maximum number of open connections that the service can support at any given time. A GSLB service whose connection count reaches the maximum is not considered when a GSLB decision is made, until the connection count drops below the maximum.<br/>Maximum length = 4294967294
"""
try :
self._maxclient = maxclient
except Exception as e:
raise e
@property
def healthmonitor(self) :
"""Monitor the health of the GSLB service.<br/>Default value: YES<br/>Possible values = YES, NO.
"""
try :
return self._healthmonitor
except Exception as e:
raise e
@healthmonitor.setter
def healthmonitor(self, healthmonitor) :
"""Monitor the health of the GSLB service.<br/>Default value: YES<br/>Possible values = YES, NO
"""
try :
self._healthmonitor = healthmonitor
except Exception as e:
raise e
@property
def sitename(self) :
"""Name of the GSLB site to which the service belongs.<br/>Minimum length = 1.
"""
try :
return self._sitename
except Exception as e:
raise e
@sitename.setter
def sitename(self, sitename) :
"""Name of the GSLB site to which the service belongs.<br/>Minimum length = 1
"""
try :
self._sitename = sitename
except Exception as e:
raise e
@property
def state(self) :
"""Enable or disable the service.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._state
except Exception as e:
raise e
@state.setter
def state(self, state) :
"""Enable or disable the service.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED
"""
try :
self._state = state
except Exception as e:
raise e
@property
def cip(self) :
"""In the request that is forwarded to the GSLB service, insert a header that stores the client's IP address. Client IP header insertion is used in connection-proxy based site persistence.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._cip
except Exception as e:
raise e
@cip.setter
def cip(self, cip) :
"""In the request that is forwarded to the GSLB service, insert a header that stores the client's IP address. Client IP header insertion is used in connection-proxy based site persistence.<br/>Default value: DISABLED<br/>Possible values = ENABLED, DISABLED
"""
try :
self._cip = cip
except Exception as e:
raise e
@property
def cipheader(self) :
"""Name for the HTTP header that stores the client's IP address. Used with the Client IP option. If client IP header insertion is enabled on the service and a name is not specified for the header, the NetScaler appliance uses the name specified by the cipHeader parameter in the set ns param command or, in the GUI, the Client IP Header parameter in the Configure HTTP Parameters dialog box.<br/>Minimum length = 1.
"""
try :
return self._cipheader
except Exception as e:
raise e
@cipheader.setter
def cipheader(self, cipheader) :
"""Name for the HTTP header that stores the client's IP address. Used with the Client IP option. If client IP header insertion is enabled on the service and a name is not specified for the header, the NetScaler appliance uses the name specified by the cipHeader parameter in the set ns param command or, in the GUI, the Client IP Header parameter in the Configure HTTP Parameters dialog box.<br/>Minimum length = 1
"""
try :
self._cipheader = cipheader
except Exception as e:
raise e
@property
def sitepersistence(self) :
"""Use cookie-based site persistence. Applicable only to HTTP and SSL GSLB services.<br/>Possible values = ConnectionProxy, HTTPRedirect, NONE.
"""
try :
return self._sitepersistence
except Exception as e:
raise e
@sitepersistence.setter
def sitepersistence(self, sitepersistence) :
"""Use cookie-based site persistence. Applicable only to HTTP and SSL GSLB services.<br/>Possible values = ConnectionProxy, HTTPRedirect, NONE
"""
try :
self._sitepersistence = sitepersistence
except Exception as e:
raise e
@property
def cookietimeout(self) :
"""Timeout value, in minutes, for the cookie, when cookie based site persistence is enabled.<br/>Maximum length = 1440.
"""
try :
return self._cookietimeout
except Exception as e:
raise e
@cookietimeout.setter
def cookietimeout(self, cookietimeout) :
"""Timeout value, in minutes, for the cookie, when cookie based site persistence is enabled.<br/>Maximum length = 1440
"""
try :
self._cookietimeout = cookietimeout
except Exception as e:
raise e
@property
def siteprefix(self) :
"""The site's prefix string. When the service is bound to a GSLB virtual server, a GSLB site domain is generated internally for each bound service-domain pair by concatenating the site prefix of the service and the name of the domain. If the special string NONE is specified, the site-prefix string is unset. When implementing HTTP redirect site persistence, the NetScaler appliance redirects GSLB requests to GSLB services by using their site domains.
"""
try :
return self._siteprefix
except Exception as e:
raise e
@siteprefix.setter
def siteprefix(self, siteprefix) :
"""The site's prefix string. When the service is bound to a GSLB virtual server, a GSLB site domain is generated internally for each bound service-domain pair by concatenating the site prefix of the service and the name of the domain. If the special string NONE is specified, the site-prefix string is unset. When implementing HTTP redirect site persistence, the NetScaler appliance redirects GSLB requests to GSLB services by using their site domains.
"""
try :
self._siteprefix = siteprefix
except Exception as e:
raise e
@property
def clttimeout(self) :
"""Idle time, in seconds, after which a client connection is terminated. Applicable if connection proxy based site persistence is used.<br/>Maximum length = 31536000.
"""
try :
return self._clttimeout
except Exception as e:
raise e
@clttimeout.setter
def clttimeout(self, clttimeout) :
"""Idle time, in seconds, after which a client connection is terminated. Applicable if connection proxy based site persistence is used.<br/>Maximum length = 31536000
"""
try :
self._clttimeout = clttimeout
except Exception as e:
raise e
@property
def svrtimeout(self) :
"""Idle time, in seconds, after which a server connection is terminated. Applicable if connection proxy based site persistence is used.<br/>Maximum length = 31536000.
"""
try :
return self._svrtimeout
except Exception as e:
raise e
@svrtimeout.setter
def svrtimeout(self, svrtimeout) :
"""Idle time, in seconds, after which a server connection is terminated. Applicable if connection proxy based site persistence is used.<br/>Maximum length = 31536000
"""
try :
self._svrtimeout = svrtimeout
except Exception as e:
raise e
@property
def maxbandwidth(self) :
"""Integer specifying the maximum bandwidth allowed for the service. A GSLB service whose bandwidth reaches the maximum is not considered when a GSLB decision is made, until its bandwidth consumption drops below the maximum.
"""
try :
return self._maxbandwidth
except Exception as e:
raise e
@maxbandwidth.setter
def maxbandwidth(self, maxbandwidth) :
"""Integer specifying the maximum bandwidth allowed for the service. A GSLB service whose bandwidth reaches the maximum is not considered when a GSLB decision is made, until its bandwidth consumption drops below the maximum.
"""
try :
self._maxbandwidth = maxbandwidth
except Exception as e:
raise e
@property
def downstateflush(self) :
"""Flush all active transactions associated with the GSLB service when its state transitions from UP to DOWN. Do not enable this option for services that must complete their transactions. Applicable if connection proxy based site persistence is used.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._downstateflush
except Exception as e:
raise e
@downstateflush.setter
def downstateflush(self, downstateflush) :
"""Flush all active transactions associated with the GSLB service when its state transitions from UP to DOWN. Do not enable this option for services that must complete their transactions. Applicable if connection proxy based site persistence is used.<br/>Possible values = ENABLED, DISABLED
"""
try :
self._downstateflush = downstateflush
except Exception as e:
raise e
@property
def maxaaausers(self) :
"""Maximum number of SSL VPN users that can be logged on concurrently to the VPN virtual server that is represented by this GSLB service. A GSLB service whose user count reaches the maximum is not considered when a GSLB decision is made, until the count drops below the maximum.<br/>Maximum length = 65535.
"""
try :
return self._maxaaausers
except Exception as e:
raise e
@maxaaausers.setter
def maxaaausers(self, maxaaausers) :
"""Maximum number of SSL VPN users that can be logged on concurrently to the VPN virtual server that is represented by this GSLB service. A GSLB service whose user count reaches the maximum is not considered when a GSLB decision is made, until the count drops below the maximum.<br/>Maximum length = 65535
"""
try :
self._maxaaausers = maxaaausers
except Exception as e:
raise e
@property
def monthreshold(self) :
"""Monitoring threshold value for the GSLB service. If the sum of the weights of the monitors that are bound to this GSLB service and are in the UP state is not equal to or greater than this threshold value, the service is marked as DOWN.<br/>Maximum length = 65535.
"""
try :
return self._monthreshold
except Exception as e:
raise e
@monthreshold.setter
def monthreshold(self, monthreshold) :
"""Monitoring threshold value for the GSLB service. If the sum of the weights of the monitors that are bound to this GSLB service and are in the UP state is not equal to or greater than this threshold value, the service is marked as DOWN.<br/>Maximum length = 65535
"""
try :
self._monthreshold = monthreshold
except Exception as e:
raise e
@property
def hashid(self) :
"""Unique hash identifier for the GSLB service, used by hash based load balancing methods.<br/>Minimum length = 1.
"""
try :
return self._hashid
except Exception as e:
raise e
@hashid.setter
def hashid(self, hashid) :
"""Unique hash identifier for the GSLB service, used by hash based load balancing methods.<br/>Minimum length = 1
"""
try :
self._hashid = hashid
except Exception as e:
raise e
@property
def comment(self) :
"""Any comments that you might want to associate with the GSLB service.
"""
try :
return self._comment
except Exception as e:
raise e
@comment.setter
def comment(self, comment) :
"""Any comments that you might want to associate with the GSLB service.
"""
try :
self._comment = comment
except Exception as e:
raise e
@property
def appflowlog(self) :
"""Enable logging appflow flow information.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._appflowlog
except Exception as e:
raise e
@appflowlog.setter
def appflowlog(self, appflowlog) :
"""Enable logging appflow flow information.<br/>Default value: ENABLED<br/>Possible values = ENABLED, DISABLED
"""
try :
self._appflowlog = appflowlog
except Exception as e:
raise e
@property
def ipaddress(self) :
"""The new IP address of the service.
"""
try :
return self._ipaddress
except Exception as e:
raise e
@ipaddress.setter
def ipaddress(self, ipaddress) :
"""The new IP address of the service.
"""
try :
self._ipaddress = ipaddress
except Exception as e:
raise e
@property
def viewname(self) :
"""Name of the DNS view of the service. A DNS view is used in global server load balancing (GSLB) to return a predetermined IP address to a specific group of clients, which are identified by using a DNS policy.<br/>Minimum length = 1.
"""
try :
return self._viewname
except Exception as e:
raise e
@viewname.setter
def viewname(self, viewname) :
"""Name of the DNS view of the service. A DNS view is used in global server load balancing (GSLB) to return a predetermined IP address to a specific group of clients, which are identified by using a DNS policy.<br/>Minimum length = 1
"""
try :
self._viewname = viewname
except Exception as e:
raise e
@property
def viewip(self) :
"""IP address to be used for the given view.
"""
try :
return self._viewip
except Exception as e:
raise e
@viewip.setter
def viewip(self, viewip) :
"""IP address to be used for the given view.
"""
try :
self._viewip = viewip
except Exception as e:
raise e
@property
def weight(self) :
"""Weight to assign to the monitor-service binding. A larger number specifies a greater weight. Contributes to the monitoring threshold, which determines the state of the service.<br/>Minimum length = 1<br/>Maximum length = 100.
"""
try :
return self._weight
except Exception as e:
raise e
@weight.setter
def weight(self, weight) :
"""Weight to assign to the monitor-service binding. A larger number specifies a greater weight. Contributes to the monitoring threshold, which determines the state of the service.<br/>Minimum length = 1<br/>Maximum length = 100
"""
try :
self._weight = weight
except Exception as e:
raise e
@property
def monitor_name_svc(self) :
"""Name of the monitor to bind to the service.<br/>Minimum length = 1.
"""
try :
return self._monitor_name_svc
except Exception as e:
raise e
@monitor_name_svc.setter
def monitor_name_svc(self, monitor_name_svc) :
"""Name of the monitor to bind to the service.<br/>Minimum length = 1
"""
try :
self._monitor_name_svc = monitor_name_svc
except Exception as e:
raise e
@property
def newname(self) :
"""New name for the GSLB service.<br/>Minimum length = 1.
"""
try :
return self._newname
except Exception as e:
raise e
@newname.setter
def newname(self, newname) :
"""New name for the GSLB service.<br/>Minimum length = 1
"""
try :
self._newname = newname
except Exception as e:
raise e
@property
def gslb(self) :
""".<br/>Default value: GSLB<br/>Possible values = REMOTE, LOCAL.
"""
try :
return self._gslb
except Exception as e:
raise e
@property
def svrstate(self) :
"""Server state.<br/>Possible values = UP, DOWN, UNKNOWN, BUSY, OUT OF SERVICE, GOING OUT OF SERVICE, DOWN WHEN GOING OUT OF SERVICE, NS_EMPTY_STR, Unknown, DISABLED.
"""
try :
return self._svrstate
except Exception as e:
raise e
@property
def svreffgslbstate(self) :
"""Effective state of the gslb svc.<br/>Possible values = UP, DOWN, UNKNOWN, BUSY, OUT OF SERVICE, GOING OUT OF SERVICE, DOWN WHEN GOING OUT OF SERVICE, NS_EMPTY_STR, Unknown, DISABLED.
"""
try :
return self._svreffgslbstate
except Exception as e:
raise e
@property
def gslbthreshold(self) :
"""Indicates if gslb svc has reached threshold.
"""
try :
return self._gslbthreshold
except Exception as e:
raise e
@property
def gslbsvcstats(self) :
"""Indicates if gslb svc has stats of the primary or the whole chain.
"""
try :
return self._gslbsvcstats
except Exception as e:
raise e
@property
def monstate(self) :
"""State of the monitor bound to gslb service.<br/>Possible values = ENABLED, DISABLED.
"""
try :
return self._monstate
except Exception as e:
raise e
@property
def preferredlocation(self) :
"""Prefered location.
"""
try :
return self._preferredlocation
except Exception as e:
raise e
@property
def monitor_state(self) :
"""The running state of the monitor on this service.<br/>Possible values = UP, DOWN, UNKNOWN, BUSY, OUT OF SERVICE, GOING OUT OF SERVICE, DOWN WHEN GOING OUT OF SERVICE, NS_EMPTY_STR, Unknown, DISABLED.
"""
try :
return self._monitor_state
except Exception as e:
raise e
@property
def threshold(self) :
""".<br/>Possible values = ABOVE, BELOW.
"""
try :
return self._threshold
except Exception as e:
raise e
@property
def clmonowner(self) :
"""Tells the mon owner of the gslb service.<br/>Minimum value = 0<br/>Maximum value = 32.
"""
try :
return self._clmonowner
except Exception as e:
raise e
@property
def clmonview(self) :
"""Tells the view id of the monitoring owner.
"""
try :
return self._clmonview
except Exception as e:
raise e
def _get_nitro_response(self, service, response) :
""" converts nitro response into object and returns the object array in case of get request.
"""
try :
result = service.payload_formatter.string_to_resource(gslbservice_response, response, self.__class__.__name__)
if(result.errorcode != 0) :
if (result.errorcode == 444) :
service.clear_session(self)
if result.severity :
if (result.severity == "ERROR") :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
else :
raise nitro_exception(result.errorcode, str(result.message), str(result.severity))
return result.gslbservice
except Exception as e :
raise e
def _get_object_name(self) :
""" Returns the value of object identifier argument
"""
try :
if (self.servicename) :
return str(self.servicename)
return None
except Exception as e :
raise e
@classmethod
def add(cls, client, resource) :
""" Use this API to add gslbservice.
"""
try :
if type(resource) is not list :
addresource = gslbservice()
addresource.servicename = resource.servicename
addresource.cnameentry = resource.cnameentry
addresource.ip = resource.ip
addresource.servername = resource.servername
addresource.servicetype = resource.servicetype
addresource.port = resource.port
addresource.publicip = resource.publicip
addresource.publicport = resource.publicport
addresource.maxclient = resource.maxclient
addresource.healthmonitor = resource.healthmonitor
addresource.sitename = resource.sitename
addresource.state = resource.state
addresource.cip = resource.cip
addresource.cipheader = resource.cipheader
addresource.sitepersistence = resource.sitepersistence
addresource.cookietimeout = resource.cookietimeout
addresource.siteprefix = resource.siteprefix
addresource.clttimeout = resource.clttimeout
addresource.svrtimeout = resource.svrtimeout
addresource.maxbandwidth = resource.maxbandwidth
addresource.downstateflush = resource.downstateflush
addresource.maxaaausers = resource.maxaaausers
addresource.monthreshold = resource.monthreshold
addresource.hashid = resource.hashid
addresource.comment = resource.comment
addresource.appflowlog = resource.appflowlog
return addresource.add_resource(client)
else :
if (resource and len(resource) > 0) :
addresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
addresources[i].servicename = resource[i].servicename
addresources[i].cnameentry = resource[i].cnameentry
addresources[i].ip = resource[i].ip
addresources[i].servername = resource[i].servername
addresources[i].servicetype = resource[i].servicetype
addresources[i].port = resource[i].port
addresources[i].publicip = resource[i].publicip
addresources[i].publicport = resource[i].publicport
addresources[i].maxclient = resource[i].maxclient
addresources[i].healthmonitor = resource[i].healthmonitor
addresources[i].sitename = resource[i].sitename
addresources[i].state = resource[i].state
addresources[i].cip = resource[i].cip
addresources[i].cipheader = resource[i].cipheader
addresources[i].sitepersistence = resource[i].sitepersistence
addresources[i].cookietimeout = resource[i].cookietimeout
addresources[i].siteprefix = resource[i].siteprefix
addresources[i].clttimeout = resource[i].clttimeout
addresources[i].svrtimeout = resource[i].svrtimeout
addresources[i].maxbandwidth = resource[i].maxbandwidth
addresources[i].downstateflush = resource[i].downstateflush
addresources[i].maxaaausers = resource[i].maxaaausers
addresources[i].monthreshold = resource[i].monthreshold
addresources[i].hashid = resource[i].hashid
addresources[i].comment = resource[i].comment
addresources[i].appflowlog = resource[i].appflowlog
result = cls.add_bulk_request(client, addresources)
return result
except Exception as e :
raise e
@classmethod
def delete(cls, client, resource) :
""" Use this API to delete gslbservice.
"""
try :
if type(resource) is not list :
deleteresource = gslbservice()
if type(resource) != type(deleteresource):
deleteresource.servicename = resource
else :
deleteresource.servicename = resource.servicename
return deleteresource.delete_resource(client)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
deleteresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].servicename = resource[i]
else :
if (resource and len(resource) > 0) :
deleteresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
deleteresources[i].servicename = resource[i].servicename
result = cls.delete_bulk_request(client, deleteresources)
return result
except Exception as e :
raise e
@classmethod
def update(cls, client, resource) :
""" Use this API to update gslbservice.
"""
try :
if type(resource) is not list :
updateresource = gslbservice()
updateresource.servicename = resource.servicename
updateresource.ipaddress = resource.ipaddress
updateresource.publicip = resource.publicip
updateresource.publicport = resource.publicport
updateresource.cip = resource.cip
updateresource.cipheader = resource.cipheader
updateresource.sitepersistence = resource.sitepersistence
updateresource.siteprefix = resource.siteprefix
updateresource.maxclient = resource.maxclient
updateresource.healthmonitor = resource.healthmonitor
updateresource.maxbandwidth = resource.maxbandwidth
updateresource.downstateflush = resource.downstateflush
updateresource.maxaaausers = resource.maxaaausers
updateresource.viewname = resource.viewname
updateresource.viewip = resource.viewip
updateresource.monthreshold = resource.monthreshold
updateresource.weight = resource.weight
updateresource.monitor_name_svc = resource.monitor_name_svc
updateresource.hashid = resource.hashid
updateresource.comment = resource.comment
updateresource.appflowlog = resource.appflowlog
return updateresource.update_resource(client)
else :
if (resource and len(resource) > 0) :
updateresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
updateresources[i].servicename = resource[i].servicename
updateresources[i].ipaddress = resource[i].ipaddress
updateresources[i].publicip = resource[i].publicip
updateresources[i].publicport = resource[i].publicport
updateresources[i].cip = resource[i].cip
updateresources[i].cipheader = resource[i].cipheader
updateresources[i].sitepersistence = resource[i].sitepersistence
updateresources[i].siteprefix = resource[i].siteprefix
updateresources[i].maxclient = resource[i].maxclient
updateresources[i].healthmonitor = resource[i].healthmonitor
updateresources[i].maxbandwidth = resource[i].maxbandwidth
updateresources[i].downstateflush = resource[i].downstateflush
updateresources[i].maxaaausers = resource[i].maxaaausers
updateresources[i].viewname = resource[i].viewname
updateresources[i].viewip = resource[i].viewip
updateresources[i].monthreshold = resource[i].monthreshold
updateresources[i].weight = resource[i].weight
updateresources[i].monitor_name_svc = resource[i].monitor_name_svc
updateresources[i].hashid = resource[i].hashid
updateresources[i].comment = resource[i].comment
updateresources[i].appflowlog = resource[i].appflowlog
result = cls.update_bulk_request(client, updateresources)
return result
except Exception as e :
raise e
@classmethod
def unset(cls, client, resource, args) :
""" Use this API to unset the properties of gslbservice resource.
Properties that need to be unset are specified in args array.
"""
try :
if type(resource) is not list :
unsetresource = gslbservice()
if type(resource) != type(unsetresource):
unsetresource.servicename = resource
else :
unsetresource.servicename = resource.servicename
return unsetresource.unset_resource(client, args)
else :
if type(resource[0]) != cls :
if (resource and len(resource) > 0) :
unsetresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].servicename = resource[i]
else :
if (resource and len(resource) > 0) :
unsetresources = [ gslbservice() for _ in range(len(resource))]
for i in range(len(resource)) :
unsetresources[i].servicename = resource[i].servicename
result = cls.unset_bulk_request(client, unsetresources, args)
return result
except Exception as e :
raise e
@classmethod
def rename(cls, client, resource, new_servicename) :
""" Use this API to rename a gslbservice resource.
"""
try :
renameresource = gslbservice()
if type(resource) == cls :
renameresource.servicename = resource.servicename
else :
renameresource.servicename = resource
return renameresource.rename_resource(client,new_servicename)
except Exception as e :
raise e
@classmethod
def get(cls, client, name="", option_="") :
""" Use this API to fetch all the gslbservice resources that are configured on netscaler.
"""
try :
if not name :
obj = gslbservice()
response = obj.get_resources(client, option_)
else :
if type(name) != cls :
if type(name) is not list :
obj = gslbservice()
obj.servicename = name
response = obj.get_resource(client, option_)
else :
if name and len(name) > 0 :
response = [gslbservice() for _ in range(len(name))]
obj = [gslbservice() for _ in range(len(name))]
for i in range(len(name)) :
obj[i] = gslbservice()
obj[i].servicename = name[i]
response[i] = obj[i].get_resource(client, option_)
return response
except Exception as e :
raise e
@classmethod
def get_filtered(cls, client, filter_) :
""" Use this API to fetch filtered set of gslbservice resources.
filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = gslbservice()
option_ = options()
option_.filter = filter_
response = obj.getfiltered(client, option_)
return response
except Exception as e :
raise e
@classmethod
def count(cls, client) :
""" Use this API to count the gslbservice resources configured on NetScaler.
"""
try :
obj = gslbservice()
option_ = options()
option_.count = True
response = obj.get_resources(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
@classmethod
def count_filtered(cls, client, filter_) :
""" Use this API to count filtered the set of gslbservice resources.
Filter string should be in JSON format.eg: "port:80,servicetype:HTTP".
"""
try :
obj = gslbservice()
option_ = options()
option_.count = True
option_.filter = filter_
response = obj.getfiltered(client, option_)
if response :
return response[0].__dict__['___count']
return 0
except Exception as e :
raise e
class State:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Downstateflush:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Servicetype:
HTTP = "HTTP"
FTP = "FTP"
TCP = "TCP"
UDP = "UDP"
SSL = "SSL"
SSL_BRIDGE = "SSL_BRIDGE"
SSL_TCP = "SSL_TCP"
NNTP = "NNTP"
ANY = "ANY"
SIP_UDP = "SIP_UDP"
RADIUS = "RADIUS"
RDP = "RDP"
RTSP = "RTSP"
MYSQL = "MYSQL"
MSSQL = "MSSQL"
ORACLE = "ORACLE"
class Svrstate:
UP = "UP"
DOWN = "DOWN"
UNKNOWN = "UNKNOWN"
BUSY = "BUSY"
OUT_OF_SERVICE = "OUT OF SERVICE"
GOING_OUT_OF_SERVICE = "GOING OUT OF SERVICE"
DOWN_WHEN_GOING_OUT_OF_SERVICE = "DOWN WHEN GOING OUT OF SERVICE"
NS_EMPTY_STR = "NS_EMPTY_STR"
Unknown = "Unknown"
DISABLED = "DISABLED"
class Monitor_state:
UP = "UP"
DOWN = "DOWN"
UNKNOWN = "UNKNOWN"
BUSY = "BUSY"
OUT_OF_SERVICE = "OUT OF SERVICE"
GOING_OUT_OF_SERVICE = "GOING OUT OF SERVICE"
DOWN_WHEN_GOING_OUT_OF_SERVICE = "DOWN WHEN GOING OUT OF SERVICE"
NS_EMPTY_STR = "NS_EMPTY_STR"
Unknown = "Unknown"
DISABLED = "DISABLED"
class Gslb:
REMOTE = "REMOTE"
LOCAL = "LOCAL"
class Svreffgslbstate:
UP = "UP"
DOWN = "DOWN"
UNKNOWN = "UNKNOWN"
BUSY = "BUSY"
OUT_OF_SERVICE = "OUT OF SERVICE"
GOING_OUT_OF_SERVICE = "GOING OUT OF SERVICE"
DOWN_WHEN_GOING_OUT_OF_SERVICE = "DOWN WHEN GOING OUT OF SERVICE"
NS_EMPTY_STR = "NS_EMPTY_STR"
Unknown = "Unknown"
DISABLED = "DISABLED"
class Threshold:
ABOVE = "ABOVE"
BELOW = "BELOW"
class Cip:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Monstate:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class Sitepersistence:
ConnectionProxy = "ConnectionProxy"
HTTPRedirect = "HTTPRedirect"
NONE = "NONE"
class Healthmonitor:
YES = "YES"
NO = "NO"
class Appflowlog:
ENABLED = "ENABLED"
DISABLED = "DISABLED"
class gslbservice_response(base_response) :
def __init__(self, length=1) :
self.gslbservice = []
self.errorcode = 0
self.message = ""
self.severity = ""
self.sessionid = ""
self.gslbservice = [gslbservice() for _ in range(length)]
| mahabs/nitro | nssrc/com/citrix/netscaler/nitro/resource/config/gslb/gslbservice.py | Python | apache-2.0 | 37,663 |
#!/usr/bin/python
# Copyright 2015 The ANGLE Project Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
#
# gen_dxgi_support_tables.py:
# Code generation for the DXGI support tables. Determines which formats
# are natively support in D3D10+.
# NOTE: don't run this script directly. Run scripts/run_code_generation.py.
#
# NOTE: The "never supported" formats should not be combined with the
# "supported" and "optional" ones. At the moment, this does not cause
# any issues as ANGLE does not internally check for "never supported".
#
# MSDN links:
# 10Level9 9_3: https://msdn.microsoft.com/en-us/library/windows/desktop/mt790740.aspx
# 10_0: https://msdn.microsoft.com/en-us/library/windows/desktop/cc627090.aspx
# 10_1: https://msdn.microsoft.com/en-us/library/windows/desktop/cc627091.aspx
# 11_0: https://msdn.microsoft.com/en-us/library/windows/desktop/ff471325.aspx
# 11_1: https://msdn.microsoft.com/en-us/library/windows/desktop/hh404483.aspx
import sys
import json
macro_prefix = 'F_'
template = """// GENERATED FILE - DO NOT EDIT. See dxgi_support_data.json.
//
// Copyright 2015 The ANGLE Project Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
//
// dxgi_support_table:
// Queries for DXGI support of various texture formats. Depends on DXGI
// version, D3D feature level, and is sometimes guaranteed or optional.
//
#include "libANGLE/renderer/dxgi_support_table.h"
#include "common/debug.h"
namespace rx
{{
namespace d3d11
{{
#define {prefix}2D D3D11_FORMAT_SUPPORT_TEXTURE2D
#define {prefix}3D D3D11_FORMAT_SUPPORT_TEXTURE3D
#define {prefix}CUBE D3D11_FORMAT_SUPPORT_TEXTURECUBE
#define {prefix}SAMPLE D3D11_FORMAT_SUPPORT_SHADER_SAMPLE
#define {prefix}RT D3D11_FORMAT_SUPPORT_RENDER_TARGET
#define {prefix}MS D3D11_FORMAT_SUPPORT_MULTISAMPLE_RENDERTARGET
#define {prefix}DS D3D11_FORMAT_SUPPORT_DEPTH_STENCIL
#define {prefix}MIPGEN D3D11_FORMAT_SUPPORT_MIP_AUTOGEN
namespace
{{
const DXGISupport &GetDefaultSupport()
{{
static UINT AllSupportFlags =
D3D11_FORMAT_SUPPORT_TEXTURE2D | D3D11_FORMAT_SUPPORT_TEXTURE3D |
D3D11_FORMAT_SUPPORT_TEXTURECUBE | D3D11_FORMAT_SUPPORT_SHADER_SAMPLE |
D3D11_FORMAT_SUPPORT_RENDER_TARGET | D3D11_FORMAT_SUPPORT_MULTISAMPLE_RENDERTARGET |
D3D11_FORMAT_SUPPORT_DEPTH_STENCIL | D3D11_FORMAT_SUPPORT_MIP_AUTOGEN;
static const DXGISupport defaultSupport(0, 0, AllSupportFlags);
return defaultSupport;
}}
const DXGISupport &GetDXGISupport_9_3(DXGI_FORMAT dxgiFormat)
{{
// clang-format off
switch (dxgiFormat)
{{
{table_data_9_3}
default:
UNREACHABLE();
return GetDefaultSupport();
}}
// clang-format on
}}
const DXGISupport &GetDXGISupport_10_0(DXGI_FORMAT dxgiFormat)
{{
// clang-format off
switch (dxgiFormat)
{{
{table_data_10_0}
default:
UNREACHABLE();
return GetDefaultSupport();
}}
// clang-format on
}}
const DXGISupport &GetDXGISupport_10_1(DXGI_FORMAT dxgiFormat)
{{
// clang-format off
switch (dxgiFormat)
{{
{table_data_10_1}
default:
UNREACHABLE();
return GetDefaultSupport();
}}
// clang-format on
}}
const DXGISupport &GetDXGISupport_11_0(DXGI_FORMAT dxgiFormat)
{{
// clang-format off
switch (dxgiFormat)
{{
{table_data_11_0}
default:
UNREACHABLE();
return GetDefaultSupport();
}}
// clang-format on
}}
const DXGISupport &GetDXGISupport_11_1(DXGI_FORMAT dxgiFormat)
{{
// clang-format off
switch (dxgiFormat)
{{
{table_data_11_1}
default:
UNREACHABLE();
return GetDefaultSupport();
}}
// clang-format on
}}
}}
#undef {prefix}2D
#undef {prefix}3D
#undef {prefix}CUBE
#undef {prefix}SAMPLE
#undef {prefix}RT
#undef {prefix}MS
#undef {prefix}DS
#undef {prefix}MIPGEN
const DXGISupport &GetDXGISupport(DXGI_FORMAT dxgiFormat, D3D_FEATURE_LEVEL featureLevel)
{{
switch (featureLevel)
{{
case D3D_FEATURE_LEVEL_9_3:
return GetDXGISupport_9_3(dxgiFormat);
case D3D_FEATURE_LEVEL_10_0:
return GetDXGISupport_10_0(dxgiFormat);
case D3D_FEATURE_LEVEL_10_1:
return GetDXGISupport_10_1(dxgiFormat);
case D3D_FEATURE_LEVEL_11_0:
return GetDXGISupport_11_0(dxgiFormat);
case D3D_FEATURE_LEVEL_11_1:
return GetDXGISupport_11_1(dxgiFormat);
default:
return GetDefaultSupport();
}}
}}
}} // namespace d3d11
}} // namespace rx
"""
def do_format(format_data):
table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''}
json_flag_to_d3d = {
'texture2D': macro_prefix + '2D',
'texture3D': macro_prefix + '3D',
'textureCube': macro_prefix + 'CUBE',
'shaderSample': macro_prefix + 'SAMPLE',
'renderTarget': macro_prefix + 'RT',
'multisampleRT': macro_prefix + 'MS',
'depthStencil': macro_prefix + 'DS',
'mipAutoGen': macro_prefix + 'MIPGEN'
}
for format_name, format_support in sorted(format_data.iteritems()):
always_supported = set()
never_supported = set()
optionally_supported = set()
fl_9_3_supported = set()
fl_9_3_check = set()
fl_10_0_supported = set()
fl_10_1_supported = set()
fl_11_0_supported = set()
fl_11_0_check = set()
fl_11_1_supported = set()
fl_10_0_check_10_1_supported = set()
fl_10_0_check_11_0_supported = set()
for json_flag, support in format_support.iteritems():
d3d_flag = [json_flag_to_d3d[json_flag]]
if support == 'check':
optionally_supported.update(d3d_flag)
elif support == 'always':
always_supported.update(d3d_flag)
elif support == 'never':
never_supported.update(d3d_flag)
elif support == '10_0':
fl_10_0_supported.update(d3d_flag)
elif support == '10_1':
fl_10_1_supported.update(d3d_flag)
elif support == '11_0':
fl_11_0_supported.update(d3d_flag)
elif support == '11_1':
fl_11_1_supported.update(d3d_flag)
elif support == 'dxgi1_2':
# TODO(jmadill): DXGI 1.2 handling.
always_supported.update(d3d_flag)
elif support == '10_0check10_1always':
fl_10_0_check_10_1_supported.update(d3d_flag)
elif support == '10_0check11_0always':
fl_10_0_check_11_0_supported.update(d3d_flag)
elif support == '11_0check':
fl_11_0_check.update(d3d_flag)
elif support == '9_3always_10_0check11_0always':
fl_9_3_supported.update(d3d_flag)
fl_10_0_check_11_0_supported.update(d3d_flag)
elif support == '9_3check_10_0always':
fl_9_3_check.update(d3d_flag)
fl_10_0_supported.update(d3d_flag)
elif support == '9_3check11_0always':
fl_9_3_check.update(d3d_flag)
fl_10_0_check_11_0_supported.update(d3d_flag)
else:
print("Data specification error: " + support)
sys.exit(1)
for feature_level in ['9_3', '10_0', '10_1', '11_0', '11_1']:
always_for_fl = always_supported
optional_for_fl = optionally_supported
if feature_level == '9_3':
always_for_fl = fl_9_3_supported.union(always_for_fl)
optional_for_fl = fl_9_3_check.union(optional_for_fl)
elif feature_level == '10_0':
always_for_fl = fl_10_0_supported.union(always_for_fl)
optional_for_fl = fl_10_0_check_10_1_supported.union(optional_for_fl)
optional_for_fl = fl_10_0_check_11_0_supported.union(optional_for_fl)
elif feature_level == '10_1':
always_for_fl = fl_10_0_supported.union(always_for_fl)
always_for_fl = fl_10_1_supported.union(always_for_fl)
always_for_fl = fl_10_0_check_10_1_supported.union(always_for_fl)
optional_for_fl = fl_10_0_check_11_0_supported.union(optional_for_fl)
elif feature_level == '11_0':
always_for_fl = fl_10_0_supported.union(always_for_fl)
always_for_fl = fl_10_0_check_10_1_supported.union(always_for_fl)
always_for_fl = fl_10_0_check_11_0_supported.union(always_for_fl)
always_for_fl = fl_10_1_supported.union(always_for_fl)
always_for_fl = fl_11_0_supported.union(always_for_fl)
elif feature_level == '11_1':
always_for_fl = fl_10_0_supported.union(always_for_fl)
always_for_fl = fl_10_0_check_10_1_supported.union(always_for_fl)
always_for_fl = fl_10_0_check_11_0_supported.union(always_for_fl)
always_for_fl = fl_10_1_supported.union(always_for_fl)
always_for_fl = fl_11_0_supported.union(always_for_fl)
always_for_fl = fl_11_1_supported.union(always_for_fl)
always = ' | '.join(sorted(always_for_fl))
never = ' | '.join(sorted(never_supported))
optional = ' | '.join(sorted(optional_for_fl))
if not always:
always = '0'
if not never:
never = '0'
if not optional:
optional = '0'
table_data[feature_level] += ' case ' + format_name + ':\n'
table_data[feature_level] += ' {\n'
table_data[
feature_level] += ' static const DXGISupport info(' + always + ', ' + never + ', ' + optional + ');\n'
table_data[feature_level] += ' return info;\n'
table_data[feature_level] += ' }\n'
return table_data
def join_table_data(table_data_1, table_data_2):
return {
'9_3': table_data_1['9_3'] + table_data_2['9_3'],
'10_0': table_data_1['10_0'] + table_data_2['10_0'],
'10_1': table_data_1['10_1'] + table_data_2['10_1'],
'11_0': table_data_1['11_0'] + table_data_2['11_0'],
'11_1': table_data_1['11_1'] + table_data_2['11_1']
}
def main():
# auto_script parameters.
if len(sys.argv) > 1:
inputs = ['dxgi_support_data.json']
outputs = ['dxgi_support_table_autogen.cpp']
if sys.argv[1] == 'inputs':
print ','.join(inputs)
elif sys.argv[1] == 'outputs':
print ','.join(outputs)
else:
print('Invalid script parameters')
return 1
return 0
with open('dxgi_support_data.json') as dxgi_file:
file_data = dxgi_file.read()
dxgi_file.close()
json_data = json.loads(file_data)
table_data = {'9_3': '', '10_0': '', '10_1': '', '11_0': '', '11_1': ''}
for format_data in json_data:
table_data = join_table_data(table_data, do_format(format_data))
out_data = template.format(
prefix=macro_prefix,
table_data_9_3=table_data['9_3'],
table_data_10_0=table_data['10_0'],
table_data_10_1=table_data['10_1'],
table_data_11_0=table_data['11_0'],
table_data_11_1=table_data['11_1'])
with open('dxgi_support_table_autogen.cpp', 'wt') as out_file:
out_file.write(out_data)
out_file.close()
return 0
if __name__ == '__main__':
sys.exit(main())
| ppy/angle | src/libANGLE/renderer/gen_dxgi_support_tables.py | Python | bsd-3-clause | 11,799 |
import pytest
from sondra.suite import SuiteException
from .api import *
from sondra.collection import Collection
def _ignore_ex(f):
try:
f()
except SuiteException:
pass
@pytest.fixture(scope='module')
def s(request):
v = ConcreteSuite()
_ignore_ex(lambda: EmptyApp(v))
_ignore_ex(lambda: DerivedApp(v))
_ignore_ex(lambda: SimpleApp(v))
_ignore_ex(lambda: SimpleApp(v, "Alt"))
v.ensure_database_objects()
return v
def test_collection_methods_local(s):
assert s['simple-app']['simple-documents'].simple_none_return() is None
assert s['simple-app']['simple-documents'].simple_int_return() == 1
assert s['simple-app']['simple-documents'].simple_number_return() == 1.0
assert s['simple-app']['simple-documents'].simple_str_return() == "String"
assert s['simple-app']['simple-documents'].list_return() == ["0", "1", "2", "3"]
assert s['simple-app']['simple-documents'].dict_return() == {'a': 0, 'b': 1, 'c': 2}
assert s['simple-app']['simple-documents'].operates_on_self() == s['simple-app']['simple-documents'].title
def test_derived_collection_inheritance(s):
"""Make sure that inheritance never modifies the base class and that appropriate attributes are merged"""
base_coll = s['simple-app']['simple-documents']
derived_coll = s['derived-app']['derived-collection']
assert hasattr(derived_coll, 'simple_none_return')
assert hasattr(derived_coll, 'derived_method')
assert not hasattr(base_coll, 'derived_method')
def test_collection_construction(s):
coll = s['simple-app']['simple-documents']
def test_collection_properties(s):
coll = s['simple-app']['simple-documents']
assert coll.suite
assert coll.application
assert coll.url == coll.application.url + '/' + coll.slug
assert coll.table
def test_collection_schema(s):
assert 'id' in s['simple-app']['simple-documents'].schema
assert s['simple-app']['simple-documents'].schema['id'].startswith(s['simple-app']['simple-documents'].url)
def test_abstract_collection(s):
class AbstractCollection(Collection):
"An abstract collection"
@expose_method
def exposed_method(self) -> None:
return None
class ConcreteCollection(AbstractCollection):
document_class = SimpleDocument
assert AbstractCollection.abstract
assert not ConcreteCollection.abstract
def test_collection_help(s):
assert s['simple-app']['simple-documents'].help()
assert s['simple-app']['simple-points'].help()
assert s['simple-app']['foreign-key-docs'].help() | JeffHeard/sondra | sondra/tests/test_collections.py | Python | apache-2.0 | 2,602 |
#! /usr/bin/python
# -*- coding: utf-8 -*-
# $Id: vboxshell.py $
"""
VirtualBox Python Shell.
This program is a simple interactive shell for VirtualBox. You can query
information and issue commands from a simple command line.
It also provides you with examples on how to use VirtualBox's Python API.
This shell is even somewhat documented, supports TAB-completion and
history if you have Python readline installed.
Finally, shell allows arbitrary custom extensions, just create
.VirtualBox/shexts/ and drop your extensions there.
Enjoy.
P.S. Our apologies for the code quality.
"""
__copyright__ = \
"""
Copyright (C) 2009-2013 Oracle Corporation
This file is part of VirtualBox Open Source Edition (OSE), as
available from http://www.virtualbox.org. This file is free software;
you can redistribute it and/or modify it under the terms of the GNU
General Public License (GPL) as published by the Free Software
Foundation, in version 2 as it comes in the "COPYING" file of the
VirtualBox OSE distribution. VirtualBox OSE is distributed in the
hope that it will be useful, but WITHOUT ANY WARRANTY of any kind.
"""
__version__ = "$Revision: 92173 $"
import os, sys
import traceback
import shlex
import time
import re
import platform
from optparse import OptionParser
from pprint import pprint
#
# Global Variables
#
g_fBatchMode = False
g_sScriptFile = None
g_sCmd = None
g_fHasReadline = True
try:
import readline
import rlcompleter
except ImportError:
g_fHasReadline = False
g_sPrompt = "vbox> "
g_fHasColors = True
g_dTermColors = {
'red': '\033[31m',
'blue': '\033[94m',
'green': '\033[92m',
'yellow': '\033[93m',
'magenta': '\033[35m',
'cyan': '\033[36m'
}
def colored(strg, color):
"""
Translates a string to one including coloring settings, if enabled.
"""
if not g_fHasColors:
return strg
col = g_dTermColors.get(color, None)
if col:
return col+str(strg)+'\033[0m'
return strg
if g_fHasReadline:
class CompleterNG(rlcompleter.Completer):
def __init__(self, dic, ctx):
self.ctx = ctx
rlcompleter.Completer.__init__(self, dic)
def complete(self, text, state):
"""
taken from:
http://aspn.activestate.com/ASPN/Cookbook/Python/Recipe/496812
"""
if False and text == "":
return ['\t', None][state]
else:
return rlcompleter.Completer.complete(self, text, state)
def canBePath(self, _phrase, word):
return word.startswith('/')
def canBeCommand(self, phrase, _word):
spaceIdx = phrase.find(" ")
begIdx = readline.get_begidx()
firstWord = (spaceIdx == -1 or begIdx < spaceIdx)
if firstWord:
return True
if phrase.startswith('help'):
return True
return False
def canBeMachine(self, phrase, word):
return not self.canBePath(phrase, word) and not self.canBeCommand(phrase, word)
def global_matches(self, text):
"""
Compute matches when text is a simple name.
Return a list of all names currently defined
in self.namespace that match.
"""
matches = []
phrase = readline.get_line_buffer()
try:
if self.canBePath(phrase, text):
(directory, rest) = os.path.split(text)
c = len(rest)
for word in os.listdir(directory):
if c == 0 or word[:c] == rest:
matches.append(os.path.join(directory, word))
if self.canBeCommand(phrase, text):
c = len(text)
for lst in [ self.namespace ]:
for word in lst:
if word[:c] == text:
matches.append(word)
if self.canBeMachine(phrase, text):
c = len(text)
for mach in getMachines(self.ctx, False, True):
# although it has autoconversion, we need to cast
# explicitly for subscripts to work
word = re.sub("(?<!\\\\) ", "\\ ", str(mach.name))
if word[:c] == text:
matches.append(word)
word = str(mach.id)
if word[:c] == text:
matches.append(word)
except Exception, e:
printErr(self.ctx, e)
if g_fVerbose:
traceback.print_exc()
return matches
def autoCompletion(cmds, ctx):
if not g_fHasReadline:
return
comps = {}
for (key, _value) in cmds.items():
comps[key] = None
completer = CompleterNG(comps, ctx)
readline.set_completer(completer.complete)
delims = readline.get_completer_delims()
readline.set_completer_delims(re.sub("[\\./-]", "", delims)) # remove some of the delimiters
readline.parse_and_bind("set editing-mode emacs")
# OSX need it
if platform.system() == 'Darwin':
# see http://www.certif.com/spec_help/readline.html
readline.parse_and_bind ("bind ^I rl_complete")
readline.parse_and_bind ("bind ^W ed-delete-prev-word")
# Doesn't work well
# readline.parse_and_bind ("bind ^R em-inc-search-prev")
readline.parse_and_bind("tab: complete")
g_fVerbose = False
def split_no_quotes(s):
return shlex.split(s)
def progressBar(ctx, progress, wait=1000):
try:
while not progress.completed:
print "%s %%\r" % (colored(str(progress.percent), 'red')),
sys.stdout.flush()
progress.waitForCompletion(wait)
ctx['global'].waitForEvents(0)
if int(progress.resultCode) != 0:
reportError(ctx, progress)
return 1
except KeyboardInterrupt:
print "Interrupted."
ctx['interrupt'] = True
if progress.cancelable:
print "Canceling task..."
progress.cancel()
return 0
def printErr(_ctx, e):
oVBoxMgr = _ctx['global'];
if oVBoxMgr.errIsOurXcptKind(e):
print colored('%s: %s' % (oVBoxMgr.xcptToString(e), oVBoxMgr.xcptGetMessage(e)), 'red');
else:
print colored(str(e), 'red')
def reportError(_ctx, progress):
errorinfo = progress.errorInfo
if errorinfo:
print colored("Error in module '%s': %s" % (errorinfo.component, errorinfo.text), 'red')
def colCat(_ctx, strg):
return colored(strg, 'magenta')
def colVm(_ctx, vmname):
return colored(vmname, 'blue')
def colPath(_ctx, path):
return colored(path, 'green')
def colSize(_ctx, byte):
return colored(byte, 'red')
def colPci(_ctx, pcidev):
return colored(pcidev, 'green')
def colDev(_ctx, pcidev):
return colored(pcidev, 'cyan')
def colSizeM(_ctx, mbyte):
return colored(str(mbyte)+'M', 'red')
def createVm(ctx, name, kind):
vbox = ctx['vb']
mach = vbox.createMachine("", name, [], kind, "")
mach.saveSettings()
print "created machine with UUID", mach.id
vbox.registerMachine(mach)
# update cache
getMachines(ctx, True)
def removeVm(ctx, mach):
uuid = mach.id
print "removing machine ", mach.name, "with UUID", uuid
cmdClosedVm(ctx, mach, detachVmDevice, ["ALL"])
mach = mach.unregister(ctx['global'].constants.CleanupMode_Full)
if mach:
mach.deleteSettings()
# update cache
getMachines(ctx, True)
def startVm(ctx, mach, vmtype):
vbox = ctx['vb']
perf = ctx['perf']
session = ctx['global'].getSessionObject(vbox)
progress = mach.launchVMProcess(session, vmtype, "")
if progressBar(ctx, progress, 100) and int(progress.resultCode) == 0:
# we ignore exceptions to allow starting VM even if
# perf collector cannot be started
if perf:
try:
perf.setup(['*'], [mach], 10, 15)
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
session.unlockMachine()
class CachedMach:
def __init__(self, mach):
if mach.accessible:
self.name = mach.name
else:
self.name = '<inaccessible>'
self.id = mach.id
def cacheMachines(_ctx, lst):
result = []
for mach in lst:
elem = CachedMach(mach)
result.append(elem)
return result
def getMachines(ctx, invalidate = False, simple=False):
if ctx['vb'] is not None:
if ctx['_machlist'] is None or invalidate:
ctx['_machlist'] = ctx['global'].getArray(ctx['vb'], 'machines')
ctx['_machlistsimple'] = cacheMachines(ctx, ctx['_machlist'])
if simple:
return ctx['_machlistsimple']
else:
return ctx['_machlist']
else:
return []
def asState(var):
if var:
return colored('on', 'green')
else:
return colored('off', 'green')
def asFlag(var):
if var:
return 'yes'
else:
return 'no'
def getFacilityStatus(ctx, guest, facilityType):
(status, _timestamp) = guest.getFacilityStatus(facilityType)
return asEnumElem(ctx, 'AdditionsFacilityStatus', status)
def perfStats(ctx, mach):
if not ctx['perf']:
return
for metric in ctx['perf'].query(["*"], [mach]):
print metric['name'], metric['values_as_string']
def guestExec(ctx, machine, console, cmds):
exec cmds
def printMouseEvent(_ctx, mev):
print "Mouse : mode=%d x=%d y=%d z=%d w=%d buttons=%x" % (mev.mode, mev.x, mev.y, mev.z, mev.w, mev.buttons)
def printKbdEvent(ctx, kev):
print "Kbd: ", ctx['global'].getArray(kev, 'scancodes')
def printMultiTouchEvent(ctx, mtev):
print "MultiTouch : contacts=%d time=%d" % (mtev.contactCount, mtev.scanTime)
xPositions = ctx['global'].getArray(mtev, 'xPositions')
yPositions = ctx['global'].getArray(mtev, 'yPositions')
contactIds = ctx['global'].getArray(mtev, 'contactIds')
contactFlags = ctx['global'].getArray(mtev, 'contactFlags')
for i in range(0, mtev.contactCount):
print " [%d] %d,%d %d %d" % (i, xPositions[i], yPositions[i], contactIds[i], contactFlags[i])
def monitorSource(ctx, eventSource, active, dur):
def handleEventImpl(event):
evtype = event.type
print "got event: %s %s" % (str(evtype), asEnumElem(ctx, 'VBoxEventType', evtype))
if evtype == ctx['global'].constants.VBoxEventType_OnMachineStateChanged:
scev = ctx['global'].queryInterface(event, 'IMachineStateChangedEvent')
if scev:
print "machine state event: mach=%s state=%s" % (scev.machineId, scev.state)
elif evtype == ctx['global'].constants.VBoxEventType_OnSnapshotTaken:
stev = ctx['global'].queryInterface(event, 'ISnapshotTakenEvent')
if stev:
print "snapshot taken event: mach=%s snap=%s" % (stev.machineId, stev.snapshotId)
elif evtype == ctx['global'].constants.VBoxEventType_OnGuestPropertyChanged:
gpcev = ctx['global'].queryInterface(event, 'IGuestPropertyChangedEvent')
if gpcev:
print "guest property change: name=%s value=%s" % (gpcev.name, gpcev.value)
elif evtype == ctx['global'].constants.VBoxEventType_OnMousePointerShapeChanged:
psev = ctx['global'].queryInterface(event, 'IMousePointerShapeChangedEvent')
if psev:
shape = ctx['global'].getArray(psev, 'shape')
if shape is None:
print "pointer shape event - empty shape"
else:
print "pointer shape event: w=%d h=%d shape len=%d" % (psev.width, psev.height, len(shape))
elif evtype == ctx['global'].constants.VBoxEventType_OnGuestMouse:
mev = ctx['global'].queryInterface(event, 'IGuestMouseEvent')
if mev:
printMouseEvent(ctx, mev)
elif evtype == ctx['global'].constants.VBoxEventType_OnGuestKeyboard:
kev = ctx['global'].queryInterface(event, 'IGuestKeyboardEvent')
if kev:
printKbdEvent(ctx, kev)
elif evtype == ctx['global'].constants.VBoxEventType_OnGuestMultiTouch:
mtev = ctx['global'].queryInterface(event, 'IGuestMultiTouchEvent')
if mtev:
printMultiTouchEvent(ctx, mtev)
class EventListener:
def __init__(self, arg):
pass
def handleEvent(self, event):
try:
# a bit convoluted QI to make it work with MS COM
handleEventImpl(ctx['global'].queryInterface(event, 'IEvent'))
except:
traceback.print_exc()
pass
if active:
listener = ctx['global'].createListener(EventListener)
else:
listener = eventSource.createListener()
registered = False
if dur == -1:
# not infinity, but close enough
dur = 100000
try:
eventSource.registerListener(listener, [ctx['global'].constants.VBoxEventType_Any], active)
registered = True
end = time.time() + dur
while time.time() < end:
if active:
ctx['global'].waitForEvents(500)
else:
event = eventSource.getEvent(listener, 500)
if event:
handleEventImpl(event)
# otherwise waitable events will leak (active listeners ACK automatically)
eventSource.eventProcessed(listener, event)
# We need to catch all exceptions here, otherwise listener will never be unregistered
except:
traceback.print_exc()
pass
if listener and registered:
eventSource.unregisterListener(listener)
g_tsLast = 0
def recordDemo(ctx, console, filename, dur):
demo = open(filename, 'w')
header = "VM=" + console.machine.name + "\n"
demo.write(header)
global g_tsLast
g_tsLast = time.time()
def stamp():
global g_tsLast
tsCur = time.time()
timePassed = int((tsCur-g_tsLast)*1000)
g_tsLast = tsCur
return timePassed
def handleEventImpl(event):
evtype = event.type
#print "got event: %s %s" % (str(evtype), asEnumElem(ctx, 'VBoxEventType', evtype))
if evtype == ctx['global'].constants.VBoxEventType_OnGuestMouse:
mev = ctx['global'].queryInterface(event, 'IGuestMouseEvent')
if mev:
line = "%d: m %d %d %d %d %d %d\n" % (stamp(), mev.mode, mev.x, mev.y, mev.z, mev.w, mev.buttons)
demo.write(line)
elif evtype == ctx['global'].constants.VBoxEventType_OnGuestKeyboard:
kev = ctx['global'].queryInterface(event, 'IGuestKeyboardEvent')
if kev:
line = "%d: k %s\n" % (stamp(), str(ctx['global'].getArray(kev, 'scancodes')))
demo.write(line)
listener = console.eventSource.createListener()
registered = False
# we create an aggregated event source to listen for multiple event sources (keyboard and mouse in our case)
agg = console.eventSource.createAggregator([console.keyboard.eventSource, console.mouse.eventSource])
demo = open(filename, 'w')
header = "VM=" + console.machine.name + "\n"
demo.write(header)
if dur == -1:
# not infinity, but close enough
dur = 100000
try:
agg.registerListener(listener, [ctx['global'].constants.VBoxEventType_Any], False)
registered = True
end = time.time() + dur
while time.time() < end:
event = agg.getEvent(listener, 1000)
if event:
handleEventImpl(event)
# keyboard/mouse events aren't waitable, so no need for eventProcessed
# We need to catch all exceptions here, otherwise listener will never be unregistered
except:
traceback.print_exc()
pass
demo.close()
if listener and registered:
agg.unregisterListener(listener)
def playbackDemo(ctx, console, filename, dur):
demo = open(filename, 'r')
if dur == -1:
# not infinity, but close enough
dur = 100000
header = demo.readline()
print "Header is", header
basere = re.compile(r'(?P<s>\d+): (?P<t>[km]) (?P<p>.*)')
mre = re.compile(r'(?P<a>\d+) (?P<x>-*\d+) (?P<y>-*\d+) (?P<z>-*\d+) (?P<w>-*\d+) (?P<b>-*\d+)')
kre = re.compile(r'\d+')
kbd = console.keyboard
mouse = console.mouse
try:
end = time.time() + dur
for line in demo:
if time.time() > end:
break
match = basere.search(line)
if match is None:
continue
rdict = match.groupdict()
stamp = rdict['s']
params = rdict['p']
rtype = rdict['t']
time.sleep(float(stamp)/1000)
if rtype == 'k':
codes = kre.findall(params)
#print "KBD:", codes
kbd.putScancodes(codes)
elif rtype == 'm':
mm = mre.search(params)
if mm is not None:
mdict = mm.groupdict()
if mdict['a'] == '1':
# absolute
#print "MA: ", mdict['x'], mdict['y'], mdict['z'], mdict['b']
mouse.putMouseEventAbsolute(int(mdict['x']), int(mdict['y']), int(mdict['z']), int(mdict['w']), int(mdict['b']))
else:
#print "MR: ", mdict['x'], mdict['y'], mdict['b']
mouse.putMouseEvent(int(mdict['x']), int(mdict['y']), int(mdict['z']), int(mdict['w']), int(mdict['b']))
# We need to catch all exceptions here, to close file
except KeyboardInterrupt:
ctx['interrupt'] = True
except:
traceback.print_exc()
pass
demo.close()
def takeScreenshotOld(_ctx, console, args):
from PIL import Image
display = console.display
if len(args) > 0:
f = args[0]
else:
f = "/tmp/screenshot.png"
if len(args) > 3:
screen = int(args[3])
else:
screen = 0
(fbw, fbh, _fbbpp, fbx, fby) = display.getScreenResolution(screen)
if len(args) > 1:
w = int(args[1])
else:
w = fbw
if len(args) > 2:
h = int(args[2])
else:
h = fbh
print "Saving screenshot (%d x %d) screen %d in %s..." % (w, h, screen, f)
data = display.takeScreenShotToArray(screen, w, h)
size = (w, h)
mode = "RGBA"
im = Image.frombuffer(mode, size, str(data), "raw", mode, 0, 1)
im.save(f, "PNG")
def takeScreenshot(_ctx, console, args):
display = console.display
if len(args) > 0:
f = args[0]
else:
f = "/tmp/screenshot.png"
if len(args) > 3:
screen = int(args[3])
else:
screen = 0
(fbw, fbh, _fbbpp, fbx, fby) = display.getScreenResolution(screen)
if len(args) > 1:
w = int(args[1])
else:
w = fbw
if len(args) > 2:
h = int(args[2])
else:
h = fbh
print "Saving screenshot (%d x %d) screen %d in %s..." % (w, h, screen, f)
data = display.takeScreenShotPNGToArray(screen, w, h)
pngfile = open(f, 'wb')
pngfile.write(data)
pngfile.close()
def teleport(ctx, _session, console, args):
if args[0].find(":") == -1:
print "Use host:port format for teleport target"
return
(host, port) = args[0].split(":")
if len(args) > 1:
passwd = args[1]
else:
passwd = ""
if len(args) > 2:
maxDowntime = int(args[2])
else:
maxDowntime = 250
port = int(port)
print "Teleporting to %s:%d..." % (host, port)
progress = console.teleport(host, port, passwd, maxDowntime)
if progressBar(ctx, progress, 100) and int(progress.resultCode) == 0:
print "Success!"
else:
reportError(ctx, progress)
def guestStats(ctx, console, args):
guest = console.guest
# we need to set up guest statistics
if len(args) > 0 :
update = args[0]
else:
update = 1
if guest.statisticsUpdateInterval != update:
guest.statisticsUpdateInterval = update
try:
time.sleep(float(update)+0.1)
except:
# to allow sleep interruption
pass
all_stats = ctx['const'].all_values('GuestStatisticType')
cpu = 0
for s in all_stats.keys():
try:
val = guest.getStatistic( cpu, all_stats[s])
print "%s: %d" % (s, val)
except:
# likely not implemented
pass
def plugCpu(_ctx, machine, _session, args):
cpu = int(args[0])
print "Adding CPU %d..." % (cpu)
machine.hotPlugCPU(cpu)
def unplugCpu(_ctx, machine, _session, args):
cpu = int(args[0])
print "Removing CPU %d..." % (cpu)
machine.hotUnplugCPU(cpu)
def mountIso(_ctx, machine, _session, args):
machine.mountMedium(args[0], args[1], args[2], args[3], args[4])
machine.saveSettings()
def cond(c, v1, v2):
if c:
return v1
else:
return v2
def printHostUsbDev(ctx, ud):
print " %s: %s (vendorId=%d productId=%d serial=%s) %s" % (ud.id, colored(ud.product, 'blue'), ud.vendorId, ud.productId, ud.serialNumber, asEnumElem(ctx, 'USBDeviceState', ud.state))
def printUsbDev(_ctx, ud):
print " %s: %s (vendorId=%d productId=%d serial=%s)" % (ud.id, colored(ud.product, 'blue'), ud.vendorId, ud.productId, ud.serialNumber)
def printSf(ctx, sf):
print " name=%s host=%s %s %s" % (sf.name, colPath(ctx, sf.hostPath), cond(sf.accessible, "accessible", "not accessible"), cond(sf.writable, "writable", "read-only"))
def ginfo(ctx, console, _args):
guest = console.guest
if guest.additionsRunLevel != ctx['const'].AdditionsRunLevelType_None:
print "Additions active, version %s" % (guest.additionsVersion)
print "Support seamless: %s" % (getFacilityStatus(ctx, guest, ctx['const'].AdditionsFacilityType_Seamless))
print "Support graphics: %s" % (getFacilityStatus(ctx, guest, ctx['const'].AdditionsFacilityType_Graphics))
print "Balloon size: %d" % (guest.memoryBalloonSize)
print "Statistic update interval: %d" % (guest.statisticsUpdateInterval)
else:
print "No additions"
usbs = ctx['global'].getArray(console, 'USBDevices')
print "Attached USB:"
for ud in usbs:
printUsbDev(ctx, ud)
rusbs = ctx['global'].getArray(console, 'remoteUSBDevices')
print "Remote USB:"
for ud in rusbs:
printHostUsbDev(ctx, ud)
print "Transient shared folders:"
sfs = rusbs = ctx['global'].getArray(console, 'sharedFolders')
for sf in sfs:
printSf(ctx, sf)
def cmdExistingVm(ctx, mach, cmd, args):
session = None
try:
vbox = ctx['vb']
session = ctx['global'].getSessionObject(vbox)
mach.lockMachine(session, ctx['global'].constants.LockType_Shared)
except Exception, e:
printErr(ctx, "Session to '%s' not open: %s" % (mach.name, str(e)))
if g_fVerbose:
traceback.print_exc()
return
if session.state != ctx['const'].SessionState_Locked:
print "Session to '%s' in wrong state: %s" % (mach.name, session.state)
session.unlockMachine()
return
# this could be an example how to handle local only (i.e. unavailable
# in Webservices) functionality
if ctx['remote'] and cmd == 'some_local_only_command':
print 'Trying to use local only functionality, ignored'
session.unlockMachine()
return
console = session.console
ops = {'pause': lambda: console.pause(),
'resume': lambda: console.resume(),
'powerdown': lambda: console.powerDown(),
'powerbutton': lambda: console.powerButton(),
'stats': lambda: perfStats(ctx, mach),
'guest': lambda: guestExec(ctx, mach, console, args),
'ginfo': lambda: ginfo(ctx, console, args),
'guestlambda': lambda: args[0](ctx, mach, console, args[1:]),
'save': lambda: progressBar(ctx, console.saveState()),
'screenshot': lambda: takeScreenshot(ctx, console, args),
'teleport': lambda: teleport(ctx, session, console, args),
'gueststats': lambda: guestStats(ctx, console, args),
'plugcpu': lambda: plugCpu(ctx, session.machine, session, args),
'unplugcpu': lambda: unplugCpu(ctx, session.machine, session, args),
'mountiso': lambda: mountIso(ctx, session.machine, session, args),
}
try:
ops[cmd]()
except KeyboardInterrupt:
ctx['interrupt'] = True
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
session.unlockMachine()
def cmdClosedVm(ctx, mach, cmd, args=[], save=True):
session = ctx['global'].openMachineSession(mach, True)
mach = session.machine
try:
cmd(ctx, mach, args)
except Exception, e:
save = False
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
if save:
try:
mach.saveSettings()
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
ctx['global'].closeMachineSession(session)
def cmdAnyVm(ctx, mach, cmd, args=[], save=False):
session = ctx['global'].openMachineSession(mach)
mach = session.machine
try:
cmd(ctx, mach, session.console, args)
except Exception, e:
save = False
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
if save:
mach.saveSettings()
ctx['global'].closeMachineSession(session)
def machById(ctx, uuid):
try:
mach = ctx['vb'].getMachine(uuid)
except:
mach = ctx['vb'].findMachine(uuid)
return mach
class XPathNode:
def __init__(self, parent, obj, ntype):
self.parent = parent
self.obj = obj
self.ntype = ntype
def lookup(self, subpath):
children = self.enum()
matches = []
for e in children:
if e.matches(subpath):
matches.append(e)
return matches
def enum(self):
return []
def matches(self, subexp):
if subexp == self.ntype:
return True
if not subexp.startswith(self.ntype):
return False
match = re.search(r"@(?P<a>\w+)=(?P<v>[^\'\[\]]+)", subexp)
matches = False
try:
if match is not None:
xdict = match.groupdict()
attr = xdict['a']
val = xdict['v']
matches = (str(getattr(self.obj, attr)) == val)
except:
pass
return matches
def apply(self, cmd):
exec(cmd, {'obj':self.obj, 'node':self, 'ctx':self.getCtx()}, {})
def getCtx(self):
if hasattr(self, 'ctx'):
return self.ctx
return self.parent.getCtx()
class XPathNodeHolder(XPathNode):
def __init__(self, parent, obj, attr, heldClass, xpathname):
XPathNode.__init__(self, parent, obj, 'hld '+xpathname)
self.attr = attr
self.heldClass = heldClass
self.xpathname = xpathname
def enum(self):
children = []
for node in self.getCtx()['global'].getArray(self.obj, self.attr):
nodexml = self.heldClass(self, node)
children.append(nodexml)
return children
def matches(self, subexp):
return subexp == self.xpathname
class XPathNodeValue(XPathNode):
def __init__(self, parent, obj, xpathname):
XPathNode.__init__(self, parent, obj, 'val '+xpathname)
self.xpathname = xpathname
def matches(self, subexp):
return subexp == self.xpathname
class XPathNodeHolderVM(XPathNodeHolder):
def __init__(self, parent, vbox):
XPathNodeHolder.__init__(self, parent, vbox, 'machines', XPathNodeVM, 'vms')
class XPathNodeVM(XPathNode):
def __init__(self, parent, obj):
XPathNode.__init__(self, parent, obj, 'vm')
#def matches(self, subexp):
# return subexp=='vm'
def enum(self):
return [XPathNodeHolderNIC(self, self.obj),
XPathNodeValue(self, self.obj.BIOSSettings, 'bios'), ]
class XPathNodeHolderNIC(XPathNodeHolder):
def __init__(self, parent, mach):
XPathNodeHolder.__init__(self, parent, mach, 'nics', XPathNodeVM, 'nics')
self.maxNic = self.getCtx()['vb'].systemProperties.getMaxNetworkAdapters(self.obj.chipsetType)
def enum(self):
children = []
for i in range(0, self.maxNic):
node = XPathNodeNIC(self, self.obj.getNetworkAdapter(i))
children.append(node)
return children
class XPathNodeNIC(XPathNode):
def __init__(self, parent, obj):
XPathNode.__init__(self, parent, obj, 'nic')
def matches(self, subexp):
return subexp == 'nic'
class XPathNodeRoot(XPathNode):
def __init__(self, ctx):
XPathNode.__init__(self, None, None, 'root')
self.ctx = ctx
def enum(self):
return [XPathNodeHolderVM(self, self.ctx['vb'])]
def matches(self, subexp):
return True
def eval_xpath(ctx, scope):
pathnames = scope.split("/")[2:]
nodes = [XPathNodeRoot(ctx)]
for path in pathnames:
seen = []
while len(nodes) > 0:
node = nodes.pop()
seen.append(node)
for s in seen:
matches = s.lookup(path)
for match in matches:
nodes.append(match)
if len(nodes) == 0:
break
return nodes
def argsToMach(ctx, args):
if len(args) < 2:
print "usage: %s [vmname|uuid]" % (args[0])
return None
uuid = args[1]
mach = machById(ctx, uuid)
if mach == None:
print "Machine '%s' is unknown, use list command to find available machines" % (uuid)
return mach
def helpSingleCmd(cmd, h, sp):
if sp != 0:
spec = " [ext from "+sp+"]"
else:
spec = ""
print " %s: %s%s" % (colored(cmd, 'blue'), h, spec)
def helpCmd(_ctx, args):
if len(args) == 1:
print "Help page:"
names = commands.keys()
names.sort()
for i in names:
helpSingleCmd(i, commands[i][0], commands[i][2])
else:
cmd = args[1]
c = commands.get(cmd)
if c == None:
print "Command '%s' not known" % (cmd)
else:
helpSingleCmd(cmd, c[0], c[2])
return 0
def asEnumElem(ctx, enum, elem):
enumVals = ctx['const'].all_values(enum)
for e in enumVals.keys():
if str(elem) == str(enumVals[e]):
return colored(e, 'green')
return colored("<unknown>", 'green')
def enumFromString(ctx, enum, strg):
enumVals = ctx['const'].all_values(enum)
return enumVals.get(strg, None)
def listCmd(ctx, _args):
for mach in getMachines(ctx, True):
try:
if mach.teleporterEnabled:
tele = "[T] "
else:
tele = " "
print "%sMachine '%s' [%s], machineState=%s, sessionState=%s" % (tele, colVm(ctx, mach.name), mach.id, asEnumElem(ctx, "MachineState", mach.state), asEnumElem(ctx, "SessionState", mach.sessionState))
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
return 0
def infoCmd(ctx, args):
if (len(args) < 2):
print "usage: info [vmname|uuid]"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
vmos = ctx['vb'].getGuestOSType(mach.OSTypeId)
print " One can use setvar <mach> <var> <value> to change variable, using name in []."
print " Name [name]: %s" % (colVm(ctx, mach.name))
print " Description [description]: %s" % (mach.description)
print " ID [n/a]: %s" % (mach.id)
print " OS Type [via OSTypeId]: %s" % (vmos.description)
print " Firmware [firmwareType]: %s (%s)" % (asEnumElem(ctx, "FirmwareType", mach.firmwareType), mach.firmwareType)
print
print " CPUs [CPUCount]: %d" % (mach.CPUCount)
print " RAM [memorySize]: %dM" % (mach.memorySize)
print " VRAM [VRAMSize]: %dM" % (mach.VRAMSize)
print " Monitors [monitorCount]: %d" % (mach.monitorCount)
print " Chipset [chipsetType]: %s (%s)" % (asEnumElem(ctx, "ChipsetType", mach.chipsetType), mach.chipsetType)
print
print " Clipboard mode [clipboardMode]: %s (%s)" % (asEnumElem(ctx, "ClipboardMode", mach.clipboardMode), mach.clipboardMode)
print " Machine status [n/a]: %s (%s)" % (asEnumElem(ctx, "SessionState", mach.sessionState), mach.sessionState)
print
if mach.teleporterEnabled:
print " Teleport target on port %d (%s)" % (mach.teleporterPort, mach.teleporterPassword)
print
bios = mach.BIOSSettings
print " ACPI [BIOSSettings.ACPIEnabled]: %s" % (asState(bios.ACPIEnabled))
print " APIC [BIOSSettings.IOAPICEnabled]: %s" % (asState(bios.IOAPICEnabled))
hwVirtEnabled = mach.getHWVirtExProperty(ctx['global'].constants.HWVirtExPropertyType_Enabled)
print " Hardware virtualization [guest win machine.setHWVirtExProperty(ctx[\\'const\\'].HWVirtExPropertyType_Enabled, value)]: " + asState(hwVirtEnabled)
hwVirtVPID = mach.getHWVirtExProperty(ctx['const'].HWVirtExPropertyType_VPID)
print " VPID support [guest win machine.setHWVirtExProperty(ctx[\\'const\\'].HWVirtExPropertyType_VPID, value)]: " + asState(hwVirtVPID)
hwVirtNestedPaging = mach.getHWVirtExProperty(ctx['const'].HWVirtExPropertyType_NestedPaging)
print " Nested paging [guest win machine.setHWVirtExProperty(ctx[\\'const\\'].HWVirtExPropertyType_NestedPaging, value)]: " + asState(hwVirtNestedPaging)
print " Hardware 3d acceleration [accelerate3DEnabled]: " + asState(mach.accelerate3DEnabled)
print " Hardware 2d video acceleration [accelerate2DVideoEnabled]: " + asState(mach.accelerate2DVideoEnabled)
print " Use universal time [RTCUseUTC]: %s" % (asState(mach.RTCUseUTC))
print " HPET [HPETEnabled]: %s" % (asState(mach.HPETEnabled))
if mach.audioAdapter.enabled:
print " Audio [via audioAdapter]: chip %s; host driver %s" % (asEnumElem(ctx, "AudioControllerType", mach.audioAdapter.audioController), asEnumElem(ctx, "AudioDriverType", mach.audioAdapter.audioDriver))
print " CPU hotplugging [CPUHotPlugEnabled]: %s" % (asState(mach.CPUHotPlugEnabled))
print " Keyboard [keyboardHIDType]: %s (%s)" % (asEnumElem(ctx, "KeyboardHIDType", mach.keyboardHIDType), mach.keyboardHIDType)
print " Pointing device [pointingHIDType]: %s (%s)" % (asEnumElem(ctx, "PointingHIDType", mach.pointingHIDType), mach.pointingHIDType)
print " Last changed [n/a]: " + time.asctime(time.localtime(long(mach.lastStateChange)/1000))
# OSE has no VRDE
try:
print " VRDE server [VRDEServer.enabled]: %s" % (asState(mach.VRDEServer.enabled))
except:
pass
print
print colCat(ctx, " USB Controllers:")
for oUsbCtrl in ctx['global'].getArray(mach, 'USBControllers'):
print " '%s': type %s standard: %#x" \
% (oUsbCtrl.name, asEnumElem(ctx, "USBControllerType", oUsbCtrl.type), oUsbCtrl.USBStandard);
print
print colCat(ctx, " I/O subsystem info:")
print " Cache enabled [IOCacheEnabled]: %s" % (asState(mach.IOCacheEnabled))
print " Cache size [IOCacheSize]: %dM" % (mach.IOCacheSize)
controllers = ctx['global'].getArray(mach, 'storageControllers')
if controllers:
print
print colCat(ctx, " Storage Controllers:")
for controller in controllers:
print " '%s': bus %s type %s" % (controller.name, asEnumElem(ctx, "StorageBus", controller.bus), asEnumElem(ctx, "StorageControllerType", controller.controllerType))
attaches = ctx['global'].getArray(mach, 'mediumAttachments')
if attaches:
print
print colCat(ctx, " Media:")
for a in attaches:
print " Controller: '%s' port/device: %d:%d type: %s (%s):" % (a.controller, a.port, a.device, asEnumElem(ctx, "DeviceType", a.type), a.type)
medium = a.medium
if a.type == ctx['global'].constants.DeviceType_HardDisk:
print " HDD:"
print " Id: %s" % (medium.id)
print " Location: %s" % (colPath(ctx, medium.location))
print " Name: %s" % (medium.name)
print " Format: %s" % (medium.format)
if a.type == ctx['global'].constants.DeviceType_DVD:
print " DVD:"
if medium:
print " Id: %s" % (medium.id)
print " Name: %s" % (medium.name)
if medium.hostDrive:
print " Host DVD %s" % (colPath(ctx, medium.location))
if a.passthrough:
print " [passthrough mode]"
else:
print " Virtual image at %s" % (colPath(ctx, medium.location))
print " Size: %s" % (medium.size)
if a.type == ctx['global'].constants.DeviceType_Floppy:
print " Floppy:"
if medium:
print " Id: %s" % (medium.id)
print " Name: %s" % (medium.name)
if medium.hostDrive:
print " Host floppy %s" % (colPath(ctx, medium.location))
else:
print " Virtual image at %s" % (colPath(ctx, medium.location))
print " Size: %s" % (medium.size)
print
print colCat(ctx, " Shared folders:")
for sf in ctx['global'].getArray(mach, 'sharedFolders'):
printSf(ctx, sf)
return 0
def startCmd(ctx, args):
if len(args) < 2:
print "usage: start name <frontend>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
if len(args) > 2:
vmtype = args[2]
else:
vmtype = "gui"
startVm(ctx, mach, vmtype)
return 0
def createVmCmd(ctx, args):
if (len(args) != 3):
print "usage: createvm name ostype"
return 0
name = args[1]
oskind = args[2]
try:
ctx['vb'].getGuestOSType(oskind)
except Exception:
print 'Unknown OS type:', oskind
return 0
createVm(ctx, name, oskind)
return 0
def ginfoCmd(ctx, args):
if (len(args) < 2):
print "usage: ginfo [vmname|uuid]"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'ginfo', '')
return 0
def execInGuest(ctx, console, args, env, user, passwd, tmo, inputPipe=None, outputPipe=None):
if len(args) < 1:
print "exec in guest needs at least program name"
return
guest = console.guest
guestSession = guest.createSession(user, passwd, "", "vboxshell guest exec")
# shall contain program name as argv[0]
gargs = args
print "executing %s with args %s as %s" % (args[0], gargs, user)
flags = 0
if inputPipe is not None:
flags = 1 # set WaitForProcessStartOnly
print args[0]
process = guestSession.processCreate(args[0], gargs, env, [], tmo)
print "executed with pid %d" % (process.PID)
if pid != 0:
try:
while True:
if inputPipe is not None:
indata = inputPipe(ctx)
if indata is not None:
write = len(indata)
off = 0
while write > 0:
w = guest.setProcessInput(pid, 0, 10*1000, indata[off:])
off = off + w
write = write - w
else:
# EOF
try:
guest.setProcessInput(pid, 1, 10*1000, " ")
except:
pass
data = guest.getProcessOutput(pid, 0, 10000, 4096)
if data and len(data) > 0:
sys.stdout.write(data)
continue
progress.waitForCompletion(100)
ctx['global'].waitForEvents(0)
data = guest.getProcessOutput(pid, 0, 0, 4096)
if data and len(data) > 0:
if outputPipe is not None:
outputPipe(ctx, data)
else:
sys.stdout.write(data)
continue
if progress.completed:
break
except KeyboardInterrupt:
print "Interrupted."
ctx['interrupt'] = True
if progress.cancelable:
progress.cancel()
(_reason, code, _flags) = guest.getProcessStatus(pid)
print "Exit code: %d" % (code)
return 0
else:
reportError(ctx, progress)
def copyToGuest(ctx, console, args, user, passwd):
src = args[0]
dst = args[1]
flags = 0
print "Copying host %s to guest %s" % (src, dst)
progress = console.guest.copyToGuest(src, dst, user, passwd, flags)
progressBar(ctx, progress)
def nh_raw_input(prompt=""):
stream = sys.stdout
prompt = str(prompt)
if prompt:
stream.write(prompt)
line = sys.stdin.readline()
if not line:
raise EOFError
if line[-1] == '\n':
line = line[:-1]
return line
def getCred(_ctx):
import getpass
user = getpass.getuser()
user_inp = nh_raw_input("User (%s): " % (user))
if len (user_inp) > 0:
user = user_inp
passwd = getpass.getpass()
return (user, passwd)
def gexecCmd(ctx, args):
if (len(args) < 2):
print "usage: gexec [vmname|uuid] command args"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
gargs = args[2:]
env = [] # ["DISPLAY=:0"]
(user, passwd) = getCred(ctx)
gargs.insert(0, lambda ctx, mach, console, args: execInGuest(ctx, console, args, env, user, passwd, 10000))
cmdExistingVm(ctx, mach, 'guestlambda', gargs)
return 0
def gcopyCmd(ctx, args):
if (len(args) < 2):
print "usage: gcopy [vmname|uuid] host_path guest_path"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
gargs = args[2:]
(user, passwd) = getCred(ctx)
gargs.insert(0, lambda ctx, mach, console, args: copyToGuest(ctx, console, args, user, passwd))
cmdExistingVm(ctx, mach, 'guestlambda', gargs)
return 0
def readCmdPipe(ctx, _hcmd):
try:
return ctx['process'].communicate()[0]
except:
return None
def gpipeCmd(ctx, args):
if (len(args) < 4):
print "usage: gpipe [vmname|uuid] hostProgram guestProgram, such as gpipe linux '/bin/uname -a' '/bin/sh -c \"/usr/bin/tee; /bin/uname -a\"'"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
hcmd = args[2]
gcmd = args[3]
(user, passwd) = getCred(ctx)
import subprocess
ctx['process'] = subprocess.Popen(split_no_quotes(hcmd), stdout=subprocess.PIPE)
gargs = split_no_quotes(gcmd)
env = []
gargs.insert(0, lambda ctx, mach, console, args: execInGuest(ctx, console, args, env, user, passwd, 10000, lambda ctx:readCmdPipe(ctx, hcmd)))
cmdExistingVm(ctx, mach, 'guestlambda', gargs)
try:
ctx['process'].terminate()
except:
pass
ctx['process'] = None
return 0
def removeVmCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
removeVm(ctx, mach)
return 0
def pauseCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'pause', '')
return 0
def powerdownCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'powerdown', '')
return 0
def powerbuttonCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'powerbutton', '')
return 0
def resumeCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'resume', '')
return 0
def saveCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'save', '')
return 0
def statsCmd(ctx, args):
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'stats', '')
return 0
def guestCmd(ctx, args):
if (len(args) < 3):
print "usage: guest name commands"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
if mach.state != ctx['const'].MachineState_Running:
cmdClosedVm(ctx, mach, lambda ctx, mach, a: guestExec (ctx, mach, None, ' '.join(args[2:])))
else:
cmdExistingVm(ctx, mach, 'guest', ' '.join(args[2:]))
return 0
def screenshotCmd(ctx, args):
if (len(args) < 2):
print "usage: screenshot vm <file> <width> <height> <monitor>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'screenshot', args[2:])
return 0
def teleportCmd(ctx, args):
if (len(args) < 3):
print "usage: teleport name host:port <password>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'teleport', args[2:])
return 0
def portalsettings(_ctx, mach, args):
enabled = args[0]
mach.teleporterEnabled = enabled
if enabled:
port = args[1]
passwd = args[2]
mach.teleporterPort = port
mach.teleporterPassword = passwd
def openportalCmd(ctx, args):
if (len(args) < 3):
print "usage: openportal name port <password>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
port = int(args[2])
if (len(args) > 3):
passwd = args[3]
else:
passwd = ""
if not mach.teleporterEnabled or mach.teleporterPort != port or passwd:
cmdClosedVm(ctx, mach, portalsettings, [True, port, passwd])
startVm(ctx, mach, "gui")
return 0
def closeportalCmd(ctx, args):
if (len(args) < 2):
print "usage: closeportal name"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
if mach.teleporterEnabled:
cmdClosedVm(ctx, mach, portalsettings, [False])
return 0
def gueststatsCmd(ctx, args):
if (len(args) < 2):
print "usage: gueststats name <check interval>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'gueststats', args[2:])
return 0
def plugcpu(_ctx, mach, args):
plug = args[0]
cpu = args[1]
if plug:
print "Adding CPU %d..." % (cpu)
mach.hotPlugCPU(cpu)
else:
print "Removing CPU %d..." % (cpu)
mach.hotUnplugCPU(cpu)
def plugcpuCmd(ctx, args):
if (len(args) < 2):
print "usage: plugcpu name cpuid"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
if str(mach.sessionState) != str(ctx['const'].SessionState_Locked):
if mach.CPUHotPlugEnabled:
cmdClosedVm(ctx, mach, plugcpu, [True, int(args[2])])
else:
cmdExistingVm(ctx, mach, 'plugcpu', args[2])
return 0
def unplugcpuCmd(ctx, args):
if (len(args) < 2):
print "usage: unplugcpu name cpuid"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
if str(mach.sessionState) != str(ctx['const'].SessionState_Locked):
if mach.CPUHotPlugEnabled:
cmdClosedVm(ctx, mach, plugcpu, [False, int(args[2])])
else:
cmdExistingVm(ctx, mach, 'unplugcpu', args[2])
return 0
def setvar(_ctx, _mach, args):
expr = 'mach.'+args[0]+' = '+args[1]
print "Executing", expr
exec expr
def setvarCmd(ctx, args):
if (len(args) < 4):
print "usage: setvar [vmname|uuid] expr value"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdClosedVm(ctx, mach, setvar, args[2:])
return 0
def setvmextra(_ctx, mach, args):
key = args[0]
value = args[1]
print "%s: setting %s to %s" % (mach.name, key, value)
mach.setExtraData(key, value)
def setExtraDataCmd(ctx, args):
if (len(args) < 3):
print "usage: setextra [vmname|uuid|global] key <value>"
return 0
key = args[2]
if len(args) == 4:
value = args[3]
else:
value = None
if args[1] == 'global':
ctx['vb'].setExtraData(key, value)
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdClosedVm(ctx, mach, setvmextra, [key, value])
return 0
def printExtraKey(obj, key, value):
print "%s: '%s' = '%s'" % (obj, key, value)
def getExtraDataCmd(ctx, args):
if (len(args) < 2):
print "usage: getextra [vmname|uuid|global] <key>"
return 0
if len(args) == 3:
key = args[2]
else:
key = None
if args[1] == 'global':
obj = ctx['vb']
else:
obj = argsToMach(ctx, args)
if obj == None:
return 0
if key == None:
keys = obj.getExtraDataKeys()
else:
keys = [ key ]
for k in keys:
printExtraKey(args[1], k, obj.getExtraData(k))
return 0
def quitCmd(_ctx, _args):
return 1
def aliasCmd(ctx, args):
if (len(args) == 3):
aliases[args[1]] = args[2]
return 0
for (key, value) in aliases.items():
print "'%s' is an alias for '%s'" % (key, value)
return 0
def verboseCmd(ctx, args):
global g_fVerbose
if (len(args) > 1):
g_fVerbose = (args[1]=='on')
else:
g_fVerbose = not g_fVerbose
return 0
def colorsCmd(ctx, args):
global g_fHasColors
if (len(args) > 1):
g_fHasColors = (args[1] == 'on')
else:
g_fHasColors = not g_fHasColors
return 0
def hostCmd(ctx, args):
vbox = ctx['vb']
try:
print "VirtualBox version %s" % (colored(vbox.version, 'blue'))
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
props = vbox.systemProperties
print "Machines: %s" % (colPath(ctx, props.defaultMachineFolder))
#print "Global shared folders:"
#for ud in ctx['global'].getArray(vbox, 'sharedFolders'):
# printSf(ctx, sf)
host = vbox.host
cnt = host.processorCount
print colCat(ctx, "Processors:")
print " available/online: %d/%d " % (cnt, host.processorOnlineCount)
for i in range(0, cnt):
print " processor #%d speed: %dMHz %s" % (i, host.getProcessorSpeed(i), host.getProcessorDescription(i))
print colCat(ctx, "RAM:")
print " %dM (free %dM)" % (host.memorySize, host.memoryAvailable)
print colCat(ctx, "OS:")
print " %s (%s)" % (host.operatingSystem, host.OSVersion)
if host.acceleration3DAvailable:
print colCat(ctx, "3D acceleration available")
else:
print colCat(ctx, "3D acceleration NOT available")
print colCat(ctx, "Network interfaces:")
for ni in ctx['global'].getArray(host, 'networkInterfaces'):
print " %s (%s)" % (ni.name, ni.IPAddress)
print colCat(ctx, "DVD drives:")
for dd in ctx['global'].getArray(host, 'DVDDrives'):
print " %s - %s" % (dd.name, dd.description)
print colCat(ctx, "Floppy drives:")
for dd in ctx['global'].getArray(host, 'floppyDrives'):
print " %s - %s" % (dd.name, dd.description)
print colCat(ctx, "USB devices:")
for ud in ctx['global'].getArray(host, 'USBDevices'):
printHostUsbDev(ctx, ud)
if ctx['perf']:
for metric in ctx['perf'].query(["*"], [host]):
print metric['name'], metric['values_as_string']
return 0
def monitorGuestCmd(ctx, args):
if (len(args) < 2):
print "usage: monitorGuest name (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
dur = 5
if len(args) > 2:
dur = float(args[2])
active = False
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: monitorSource(ctx, console.eventSource, active, dur)])
return 0
def monitorGuestKbdCmd(ctx, args):
if (len(args) < 2):
print "usage: monitorGuestKbd name (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
dur = 5
if len(args) > 2:
dur = float(args[2])
active = False
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: monitorSource(ctx, console.keyboard.eventSource, active, dur)])
return 0
def monitorGuestMouseCmd(ctx, args):
if (len(args) < 2):
print "usage: monitorGuestMouse name (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
dur = 5
if len(args) > 2:
dur = float(args[2])
active = False
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: monitorSource(ctx, console.mouse.eventSource, active, dur)])
return 0
def monitorGuestMultiTouchCmd(ctx, args):
if (len(args) < 2):
print "usage: monitorGuestMultiTouch name (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
dur = 5
if len(args) > 2:
dur = float(args[2])
active = False
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: monitorSource(ctx, console.mouse.eventSource, active, dur)])
return 0
def monitorVBoxCmd(ctx, args):
if (len(args) > 2):
print "usage: monitorVBox (duration)"
return 0
dur = 5
if len(args) > 1:
dur = float(args[1])
vbox = ctx['vb']
active = False
monitorSource(ctx, vbox.eventSource, active, dur)
return 0
def getAdapterType(ctx, natype):
if (natype == ctx['global'].constants.NetworkAdapterType_Am79C970A or
natype == ctx['global'].constants.NetworkAdapterType_Am79C973):
return "pcnet"
elif (natype == ctx['global'].constants.NetworkAdapterType_I82540EM or
natype == ctx['global'].constants.NetworkAdapterType_I82545EM or
natype == ctx['global'].constants.NetworkAdapterType_I82543GC):
return "e1000"
elif (natype == ctx['global'].constants.NetworkAdapterType_Virtio):
return "virtio"
elif (natype == ctx['global'].constants.NetworkAdapterType_Null):
return None
else:
raise Exception("Unknown adapter type: "+natype)
def portForwardCmd(ctx, args):
if (len(args) != 5):
print "usage: portForward <vm> <adapter> <hostPort> <guestPort>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
adapterNum = int(args[2])
hostPort = int(args[3])
guestPort = int(args[4])
proto = "TCP"
session = ctx['global'].openMachineSession(mach)
mach = session.machine
adapter = mach.getNetworkAdapter(adapterNum)
adapterType = getAdapterType(ctx, adapter.adapterType)
profile_name = proto+"_"+str(hostPort)+"_"+str(guestPort)
config = "VBoxInternal/Devices/" + adapterType + "/"
config = config + str(adapter.slot) +"/LUN#0/Config/" + profile_name
mach.setExtraData(config + "/Protocol", proto)
mach.setExtraData(config + "/HostPort", str(hostPort))
mach.setExtraData(config + "/GuestPort", str(guestPort))
mach.saveSettings()
session.unlockMachine()
return 0
def showLogCmd(ctx, args):
if (len(args) < 2):
print "usage: showLog vm <num>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
log = 0
if (len(args) > 2):
log = args[2]
uOffset = 0
while True:
data = mach.readLog(log, uOffset, 4096)
if (len(data) == 0):
break
# print adds either NL or space to chunks not ending with a NL
sys.stdout.write(str(data))
uOffset += len(data)
return 0
def findLogCmd(ctx, args):
if (len(args) < 3):
print "usage: findLog vm pattern <num>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
log = 0
if (len(args) > 3):
log = args[3]
pattern = args[2]
uOffset = 0
while True:
# to reduce line splits on buffer boundary
data = mach.readLog(log, uOffset, 512*1024)
if (len(data) == 0):
break
d = str(data).split("\n")
for s in d:
match = re.findall(pattern, s)
if len(match) > 0:
for mt in match:
s = s.replace(mt, colored(mt, 'red'))
print s
uOffset += len(data)
return 0
def findAssertCmd(ctx, args):
if (len(args) < 2):
print "usage: findAssert vm <num>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
log = 0
if (len(args) > 2):
log = args[2]
uOffset = 0
ere = re.compile(r'(Expression:|\!\!\!\!\!\!)')
active = False
context = 0
while True:
# to reduce line splits on buffer boundary
data = mach.readLog(log, uOffset, 512*1024)
if (len(data) == 0):
break
d = str(data).split("\n")
for s in d:
if active:
print s
if context == 0:
active = False
else:
context = context - 1
continue
match = ere.findall(s)
if len(match) > 0:
active = True
context = 50
print s
uOffset += len(data)
return 0
def evalCmd(ctx, args):
expr = ' '.join(args[1:])
try:
exec expr
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
return 0
def reloadExtCmd(ctx, args):
# maybe will want more args smartness
checkUserExtensions(ctx, commands, getHomeFolder(ctx))
autoCompletion(commands, ctx)
return 0
def runScriptCmd(ctx, args):
if (len(args) != 2):
print "usage: runScript <script>"
return 0
try:
lf = open(args[1], 'r')
except IOError, e:
print "cannot open:", args[1], ":", e
return 0
try:
lines = lf.readlines()
ctx['scriptLine'] = 0
ctx['interrupt'] = False
while ctx['scriptLine'] < len(lines):
line = lines[ctx['scriptLine']]
ctx['scriptLine'] = ctx['scriptLine'] + 1
done = runCommand(ctx, line)
if done != 0 or ctx['interrupt']:
break
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
lf.close()
return 0
def sleepCmd(ctx, args):
if (len(args) != 2):
print "usage: sleep <secs>"
return 0
try:
time.sleep(float(args[1]))
except:
# to allow sleep interrupt
pass
return 0
def shellCmd(ctx, args):
if (len(args) < 2):
print "usage: shell <commands>"
return 0
cmd = ' '.join(args[1:])
try:
os.system(cmd)
except KeyboardInterrupt:
# to allow shell command interruption
pass
return 0
def connectCmd(ctx, args):
if (len(args) > 4):
print "usage: connect url <username> <passwd>"
return 0
if ctx['vb'] is not None:
print "Already connected, disconnect first..."
return 0
if (len(args) > 1):
url = args[1]
else:
url = None
if (len(args) > 2):
user = args[2]
else:
user = ""
if (len(args) > 3):
passwd = args[3]
else:
passwd = ""
ctx['wsinfo'] = [url, user, passwd]
vbox = ctx['global'].platform.connect(url, user, passwd)
ctx['vb'] = vbox
try:
print "Running VirtualBox version %s" % (vbox.version)
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
ctx['perf'] = ctx['global'].getPerfCollector(ctx['vb'])
return 0
def disconnectCmd(ctx, args):
if (len(args) != 1):
print "usage: disconnect"
return 0
if ctx['vb'] is None:
print "Not connected yet."
return 0
try:
ctx['global'].platform.disconnect()
except:
ctx['vb'] = None
raise
ctx['vb'] = None
return 0
def reconnectCmd(ctx, args):
if ctx['wsinfo'] is None:
print "Never connected..."
return 0
try:
ctx['global'].platform.disconnect()
except:
pass
[url, user, passwd] = ctx['wsinfo']
ctx['vb'] = ctx['global'].platform.connect(url, user, passwd)
try:
print "Running VirtualBox version %s" % (ctx['vb'].version)
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
return 0
def exportVMCmd(ctx, args):
if len(args) < 3:
print "usage: exportVm <machine> <path> <format> <license>"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
path = args[2]
if (len(args) > 3):
fmt = args[3]
else:
fmt = "ovf-1.0"
if (len(args) > 4):
lic = args[4]
else:
lic = "GPL"
app = ctx['vb'].createAppliance()
desc = mach.export(app)
desc.addDescription(ctx['global'].constants.VirtualSystemDescriptionType_License, lic, "")
progress = app.write(fmt, path)
if (progressBar(ctx, progress) and int(progress.resultCode) == 0):
print "Exported to %s in format %s" % (path, fmt)
else:
reportError(ctx, progress)
return 0
# PC XT scancodes
scancodes = {
'a': 0x1e,
'b': 0x30,
'c': 0x2e,
'd': 0x20,
'e': 0x12,
'f': 0x21,
'g': 0x22,
'h': 0x23,
'i': 0x17,
'j': 0x24,
'k': 0x25,
'l': 0x26,
'm': 0x32,
'n': 0x31,
'o': 0x18,
'p': 0x19,
'q': 0x10,
'r': 0x13,
's': 0x1f,
't': 0x14,
'u': 0x16,
'v': 0x2f,
'w': 0x11,
'x': 0x2d,
'y': 0x15,
'z': 0x2c,
'0': 0x0b,
'1': 0x02,
'2': 0x03,
'3': 0x04,
'4': 0x05,
'5': 0x06,
'6': 0x07,
'7': 0x08,
'8': 0x09,
'9': 0x0a,
' ': 0x39,
'-': 0xc,
'=': 0xd,
'[': 0x1a,
']': 0x1b,
';': 0x27,
'\'': 0x28,
',': 0x33,
'.': 0x34,
'/': 0x35,
'\t': 0xf,
'\n': 0x1c,
'`': 0x29
}
extScancodes = {
'ESC' : [0x01],
'BKSP': [0xe],
'SPACE': [0x39],
'TAB': [0x0f],
'CAPS': [0x3a],
'ENTER': [0x1c],
'LSHIFT': [0x2a],
'RSHIFT': [0x36],
'INS': [0xe0, 0x52],
'DEL': [0xe0, 0x53],
'END': [0xe0, 0x4f],
'HOME': [0xe0, 0x47],
'PGUP': [0xe0, 0x49],
'PGDOWN': [0xe0, 0x51],
'LGUI': [0xe0, 0x5b], # GUI, aka Win, aka Apple key
'RGUI': [0xe0, 0x5c],
'LCTR': [0x1d],
'RCTR': [0xe0, 0x1d],
'LALT': [0x38],
'RALT': [0xe0, 0x38],
'APPS': [0xe0, 0x5d],
'F1': [0x3b],
'F2': [0x3c],
'F3': [0x3d],
'F4': [0x3e],
'F5': [0x3f],
'F6': [0x40],
'F7': [0x41],
'F8': [0x42],
'F9': [0x43],
'F10': [0x44 ],
'F11': [0x57],
'F12': [0x58],
'UP': [0xe0, 0x48],
'LEFT': [0xe0, 0x4b],
'DOWN': [0xe0, 0x50],
'RIGHT': [0xe0, 0x4d],
}
def keyDown(ch):
code = scancodes.get(ch, 0x0)
if code != 0:
return [code]
extCode = extScancodes.get(ch, [])
if len(extCode) == 0:
print "bad ext", ch
return extCode
def keyUp(ch):
codes = keyDown(ch)[:] # make a copy
if len(codes) > 0:
codes[len(codes)-1] += 0x80
return codes
def typeInGuest(console, text, delay):
pressed = []
group = False
modGroupEnd = True
i = 0
kbd = console.keyboard
while i < len(text):
ch = text[i]
i = i+1
if ch == '{':
# start group, all keys to be pressed at the same time
group = True
continue
if ch == '}':
# end group, release all keys
for c in pressed:
kbd.putScancodes(keyUp(c))
pressed = []
group = False
continue
if ch == 'W':
# just wait a bit
time.sleep(0.3)
continue
if ch == '^' or ch == '|' or ch == '$' or ch == '_':
if ch == '^':
ch = 'LCTR'
if ch == '|':
ch = 'LSHIFT'
if ch == '_':
ch = 'LALT'
if ch == '$':
ch = 'LGUI'
if not group:
modGroupEnd = False
else:
if ch == '\\':
if i < len(text):
ch = text[i]
i = i+1
if ch == 'n':
ch = '\n'
elif ch == '&':
combo = ""
while i < len(text):
ch = text[i]
i = i+1
if ch == ';':
break
combo += ch
ch = combo
modGroupEnd = True
kbd.putScancodes(keyDown(ch))
pressed.insert(0, ch)
if not group and modGroupEnd:
for c in pressed:
kbd.putScancodes(keyUp(c))
pressed = []
modGroupEnd = True
time.sleep(delay)
def typeGuestCmd(ctx, args):
if len(args) < 3:
print "usage: typeGuest <machine> <text> <charDelay>"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
text = args[2]
if len(args) > 3:
delay = float(args[3])
else:
delay = 0.1
gargs = [lambda ctx, mach, console, args: typeInGuest(console, text, delay)]
cmdExistingVm(ctx, mach, 'guestlambda', gargs)
return 0
def optId(verbose, uuid):
if verbose:
return ": "+uuid
else:
return ""
def asSize(val, inBytes):
if inBytes:
return int(val)/(1024*1024)
else:
return int(val)
def listMediaCmd(ctx, args):
if len(args) > 1:
verbose = int(args[1])
else:
verbose = False
hdds = ctx['global'].getArray(ctx['vb'], 'hardDisks')
print colCat(ctx, "Hard disks:")
for hdd in hdds:
if hdd.state != ctx['global'].constants.MediumState_Created:
hdd.refreshState()
print " %s (%s)%s %s [logical %s]" % (colPath(ctx, hdd.location), hdd.format, optId(verbose, hdd.id), colSizeM(ctx, asSize(hdd.size, True)), colSizeM(ctx, asSize(hdd.logicalSize, True)))
dvds = ctx['global'].getArray(ctx['vb'], 'DVDImages')
print colCat(ctx, "CD/DVD disks:")
for dvd in dvds:
if dvd.state != ctx['global'].constants.MediumState_Created:
dvd.refreshState()
print " %s (%s)%s %s" % (colPath(ctx, dvd.location), dvd.format, optId(verbose, dvd.id), colSizeM(ctx, asSize(dvd.size, True)))
floppys = ctx['global'].getArray(ctx['vb'], 'floppyImages')
print colCat(ctx, "Floppy disks:")
for floppy in floppys:
if floppy.state != ctx['global'].constants.MediumState_Created:
floppy.refreshState()
print " %s (%s)%s %s" % (colPath(ctx, floppy.location), floppy.format, optId(verbose, floppy.id), colSizeM(ctx, asSize(floppy.size, True)))
return 0
def listUsbCmd(ctx, args):
if (len(args) > 1):
print "usage: listUsb"
return 0
host = ctx['vb'].host
for ud in ctx['global'].getArray(host, 'USBDevices'):
printHostUsbDev(ctx, ud)
return 0
def findDevOfType(ctx, mach, devtype):
atts = ctx['global'].getArray(mach, 'mediumAttachments')
for a in atts:
if a.type == devtype:
return [a.controller, a.port, a.device]
return [None, 0, 0]
def createHddCmd(ctx, args):
if (len(args) < 3):
print "usage: createHdd sizeM location type"
return 0
size = int(args[1])
loc = args[2]
if len(args) > 3:
fmt = args[3]
else:
fmt = "vdi"
hdd = ctx['vb'].createHardDisk(format, loc)
progress = hdd.createBaseStorage(size, (ctx['global'].constants.MediumVariant_Standard, ))
if progressBar(ctx,progress) and hdd.id:
print "created HDD at %s as %s" % (colPath(ctx,hdd.location), hdd.id)
else:
print "cannot create disk (file %s exist?)" % (loc)
reportError(ctx,progress)
return 0
return 0
def registerHddCmd(ctx, args):
if (len(args) < 2):
print "usage: registerHdd location"
return 0
vbox = ctx['vb']
loc = args[1]
setImageId = False
imageId = ""
setParentId = False
parentId = ""
hdd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_HardDisk, ctx['global'].constants.AccessMode_ReadWrite, false)
print "registered HDD as %s" % (hdd.id)
return 0
def controldevice(ctx, mach, args):
[ctr, port, slot, devtype, uuid] = args
mach.attachDevice(ctr, port, slot, devtype, uuid)
def attachHddCmd(ctx, args):
if (len(args) < 3):
print "usage: attachHdd vm hdd controller port:slot"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
loc = args[2]
try:
hdd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_HardDisk, ctx['global'].constants.AccessMode_ReadWrite, false)
except:
print "no HDD with path %s registered" % (loc)
return 0
if len(args) > 3:
ctr = args[3]
(port, slot) = args[4].split(":")
else:
[ctr, port, slot] = findDevOfType(ctx, mach, ctx['global'].constants.DeviceType_HardDisk)
cmdClosedVm(ctx, mach, lambda ctx, mach, args: mach.attachDevice(ctr, port, slot, ctx['global'].constants.DeviceType_HardDisk, hdd.id))
return 0
def detachVmDevice(ctx, mach, args):
atts = ctx['global'].getArray(mach, 'mediumAttachments')
hid = args[0]
for a in atts:
if a.medium:
if hid == "ALL" or a.medium.id == hid:
mach.detachDevice(a.controller, a.port, a.device)
def detachMedium(ctx, mid, medium):
cmdClosedVm(ctx, machById(ctx, mid), detachVmDevice, [medium])
def detachHddCmd(ctx, args):
if (len(args) < 3):
print "usage: detachHdd vm hdd"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
loc = args[2]
try:
hdd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_HardDisk, ctx['global'].constants.AccessMode_ReadWrite, false)
except:
print "no HDD with path %s registered" % (loc)
return 0
detachMedium(ctx, mach.id, hdd)
return 0
def unregisterHddCmd(ctx, args):
if (len(args) < 2):
print "usage: unregisterHdd path <vmunreg>"
return 0
vbox = ctx['vb']
loc = args[1]
if (len(args) > 2):
vmunreg = int(args[2])
else:
vmunreg = 0
try:
hdd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_HardDisk, ctx['global'].constants.AccessMode_ReadWrite, false)
except:
print "no HDD with path %s registered" % (loc)
return 0
if vmunreg != 0:
machs = ctx['global'].getArray(hdd, 'machineIds')
try:
for mach in machs:
print "Trying to detach from %s" % (mach)
detachMedium(ctx, mach, hdd)
except Exception, e:
print 'failed: ', e
return 0
hdd.close()
return 0
def removeHddCmd(ctx, args):
if (len(args) != 2):
print "usage: removeHdd path"
return 0
vbox = ctx['vb']
loc = args[1]
try:
hdd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_HardDisk, ctx['global'].constants.AccessMode_ReadWrite, false)
except:
print "no HDD with path %s registered" % (loc)
return 0
progress = hdd.deleteStorage()
progressBar(ctx, progress)
return 0
def registerIsoCmd(ctx, args):
if (len(args) < 2):
print "usage: registerIso location"
return 0
vbox = ctx['vb']
loc = args[1]
iso = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
print "registered ISO as %s" % (iso.id)
return 0
def unregisterIsoCmd(ctx, args):
if (len(args) != 2):
print "usage: unregisterIso path"
return 0
vbox = ctx['vb']
loc = args[1]
try:
dvd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
except:
print "no DVD with path %s registered" % (loc)
return 0
progress = dvd.close()
print "Unregistered ISO at %s" % (colPath(ctx, loc))
return 0
def removeIsoCmd(ctx, args):
if (len(args) != 2):
print "usage: removeIso path"
return 0
vbox = ctx['vb']
loc = args[1]
try:
dvd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
except:
print "no DVD with path %s registered" % (loc)
return 0
progress = dvd.deleteStorage()
if progressBar(ctx, progress):
print "Removed ISO at %s" % (colPath(ctx, dvd.location))
else:
reportError(ctx, progress)
return 0
def attachIsoCmd(ctx, args):
if (len(args) < 3):
print "usage: attachIso vm iso controller port:slot"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
loc = args[2]
try:
dvd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
except:
print "no DVD with path %s registered" % (loc)
return 0
if len(args) > 3:
ctr = args[3]
(port, slot) = args[4].split(":")
else:
[ctr, port, slot] = findDevOfType(ctx, mach, ctx['global'].constants.DeviceType_DVD)
cmdClosedVm(ctx, mach, lambda ctx, mach, args: mach.attachDevice(ctr, port, slot, ctx['global'].constants.DeviceType_DVD, dvd))
return 0
def detachIsoCmd(ctx, args):
if (len(args) < 3):
print "usage: detachIso vm iso"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
loc = args[2]
try:
dvd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
except:
print "no DVD with path %s registered" % (loc)
return 0
detachMedium(ctx, mach.id, dvd)
return 0
def mountIsoCmd(ctx, args):
if (len(args) < 3):
print "usage: mountIso vm iso controller port:slot"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
loc = args[2]
try:
dvd = vbox.openMedium(loc, ctx['global'].constants.DeviceType_DVD, ctx['global'].constants.AccessMode_ReadOnly, false)
except:
print "no DVD with path %s registered" % (loc)
return 0
if len(args) > 3:
ctr = args[3]
(port, slot) = args[4].split(":")
else:
# autodetect controller and location, just find first controller with media == DVD
[ctr, port, slot] = findDevOfType(ctx, mach, ctx['global'].constants.DeviceType_DVD)
cmdExistingVm(ctx, mach, 'mountiso', [ctr, port, slot, dvd, True])
return 0
def unmountIsoCmd(ctx, args):
if (len(args) < 2):
print "usage: unmountIso vm controller port:slot"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
vbox = ctx['vb']
if len(args) > 3:
ctr = args[2]
(port, slot) = args[3].split(":")
else:
# autodetect controller and location, just find first controller with media == DVD
[ctr, port, slot] = findDevOfType(ctx, mach, ctx['global'].constants.DeviceType_DVD)
cmdExistingVm(ctx, mach, 'mountiso', [ctr, port, slot, None, True])
return 0
def attachCtr(ctx, mach, args):
[name, bus, ctrltype] = args
ctr = mach.addStorageController(name, bus)
if ctrltype != None:
ctr.controllerType = ctrltype
def attachCtrCmd(ctx, args):
if (len(args) < 4):
print "usage: attachCtr vm cname bus <type>"
return 0
if len(args) > 4:
ctrltype = enumFromString(ctx, 'StorageControllerType', args[4])
if ctrltype == None:
print "Controller type %s unknown" % (args[4])
return 0
else:
ctrltype = None
mach = argsToMach(ctx, args)
if mach is None:
return 0
bus = enumFromString(ctx, 'StorageBus', args[3])
if bus is None:
print "Bus type %s unknown" % (args[3])
return 0
name = args[2]
cmdClosedVm(ctx, mach, attachCtr, [name, bus, ctrltype])
return 0
def detachCtrCmd(ctx, args):
if (len(args) < 3):
print "usage: detachCtr vm name"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
ctr = args[2]
cmdClosedVm(ctx, mach, lambda ctx, mach, args: mach.removeStorageController(ctr))
return 0
def usbctr(ctx, mach, console, args):
if (args[0]):
console.attachUSBDevice(args[1])
else:
console.detachUSBDevice(args[1])
def attachUsbCmd(ctx, args):
if (len(args) < 3):
print "usage: attachUsb vm deviceuid"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
dev = args[2]
cmdExistingVm(ctx, mach, 'guestlambda', [usbctr, True, dev])
return 0
def detachUsbCmd(ctx, args):
if (len(args) < 3):
print "usage: detachUsb vm deviceuid"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
dev = args[2]
cmdExistingVm(ctx, mach, 'guestlambda', [usbctr, False, dev])
return 0
def guiCmd(ctx, args):
if (len(args) > 1):
print "usage: gui"
return 0
binDir = ctx['global'].getBinDir()
vbox = os.path.join(binDir, 'VirtualBox')
try:
os.system(vbox)
except KeyboardInterrupt:
# to allow interruption
pass
return 0
def shareFolderCmd(ctx, args):
if (len(args) < 4):
print "usage: shareFolder vm path name <writable> <persistent>"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
path = args[2]
name = args[3]
writable = False
persistent = False
if len(args) > 4:
for a in args[4:]:
if a == 'writable':
writable = True
if a == 'persistent':
persistent = True
if persistent:
cmdClosedVm(ctx, mach, lambda ctx, mach, args: mach.createSharedFolder(name, path, writable), [])
else:
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: console.createSharedFolder(name, path, writable)])
return 0
def unshareFolderCmd(ctx, args):
if (len(args) < 3):
print "usage: unshareFolder vm name"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
name = args[2]
found = False
for sf in ctx['global'].getArray(mach, 'sharedFolders'):
if sf.name == name:
cmdClosedVm(ctx, mach, lambda ctx, mach, args: mach.removeSharedFolder(name), [])
found = True
break
if not found:
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: console.removeSharedFolder(name)])
return 0
def snapshotCmd(ctx, args):
if (len(args) < 2 or args[1] == 'help'):
print "Take snapshot: snapshot vm take name <description>"
print "Restore snapshot: snapshot vm restore name"
print "Merge snapshot: snapshot vm merge name"
return 0
mach = argsToMach(ctx, args)
if mach is None:
return 0
cmd = args[2]
if cmd == 'take':
if (len(args) < 4):
print "usage: snapshot vm take name <description>"
return 0
name = args[3]
if (len(args) > 4):
desc = args[4]
else:
desc = ""
cmdAnyVm(ctx, mach, lambda ctx, mach, console, args: progressBar(ctx, console.takeSnapshot(name, desc)))
return 0
if cmd == 'restore':
if (len(args) < 4):
print "usage: snapshot vm restore name"
return 0
name = args[3]
snap = mach.findSnapshot(name)
cmdAnyVm(ctx, mach, lambda ctx, mach, console, args: progressBar(ctx, console.restoreSnapshot(snap)))
return 0
if cmd == 'restorecurrent':
if (len(args) < 4):
print "usage: snapshot vm restorecurrent"
return 0
snap = mach.currentSnapshot()
cmdAnyVm(ctx, mach, lambda ctx, mach, console, args: progressBar(ctx, console.restoreSnapshot(snap)))
return 0
if cmd == 'delete':
if (len(args) < 4):
print "usage: snapshot vm delete name"
return 0
name = args[3]
snap = mach.findSnapshot(name)
cmdAnyVm(ctx, mach, lambda ctx, mach, console, args: progressBar(ctx, console.deleteSnapshot(snap.id)))
return 0
print "Command '%s' is unknown" % (cmd)
return 0
def natAlias(ctx, mach, nicnum, nat, args=[]):
"""This command shows/alters NAT's alias settings.
usage: nat <vm> <nicnum> alias [default|[log] [proxyonly] [sameports]]
default - set settings to default values
log - switch on alias logging
proxyonly - switch proxyonly mode on
sameports - enforces NAT using the same ports
"""
alias = {
'log': 0x1,
'proxyonly': 0x2,
'sameports': 0x4
}
if len(args) == 1:
first = 0
msg = ''
for aliasmode, aliaskey in alias.iteritems():
if first == 0:
first = 1
else:
msg += ', '
if int(nat.aliasMode) & aliaskey:
msg += '%d: %s' % (aliasmode, 'on')
else:
msg += '%d: %s' % (aliasmode, 'off')
msg += ')'
return (0, [msg])
else:
nat.aliasMode = 0
if 'default' not in args:
for a in range(1, len(args)):
if not alias.has_key(args[a]):
print 'Invalid alias mode: ' + args[a]
print natAlias.__doc__
return (1, None)
nat.aliasMode = int(nat.aliasMode) | alias[args[a]]
return (0, None)
def natSettings(ctx, mach, nicnum, nat, args):
"""This command shows/alters NAT settings.
usage: nat <vm> <nicnum> settings [<mtu> [[<socsndbuf> <sockrcvbuf> [<tcpsndwnd> <tcprcvwnd>]]]]
mtu - set mtu <= 16000
socksndbuf/sockrcvbuf - sets amount of kb for socket sending/receiving buffer
tcpsndwnd/tcprcvwnd - sets size of initial tcp sending/receiving window
"""
if len(args) == 1:
(mtu, socksndbuf, sockrcvbuf, tcpsndwnd, tcprcvwnd) = nat.getNetworkSettings()
if mtu == 0: mtu = 1500
if socksndbuf == 0: socksndbuf = 64
if sockrcvbuf == 0: sockrcvbuf = 64
if tcpsndwnd == 0: tcpsndwnd = 64
if tcprcvwnd == 0: tcprcvwnd = 64
msg = 'mtu:%s socket(snd:%s, rcv:%s) tcpwnd(snd:%s, rcv:%s)' % (mtu, socksndbuf, sockrcvbuf, tcpsndwnd, tcprcvwnd)
return (0, [msg])
else:
if args[1] < 16000:
print 'invalid mtu value (%s not in range [65 - 16000])' % (args[1])
return (1, None)
for i in range(2, len(args)):
if not args[i].isdigit() or int(args[i]) < 8 or int(args[i]) > 1024:
print 'invalid %s parameter (%i not in range [8-1024])' % (i, args[i])
return (1, None)
a = [args[1]]
if len(args) < 6:
for i in range(2, len(args)): a.append(args[i])
for i in range(len(args), 6): a.append(0)
else:
for i in range(2, len(args)): a.append(args[i])
#print a
nat.setNetworkSettings(int(a[0]), int(a[1]), int(a[2]), int(a[3]), int(a[4]))
return (0, None)
def natDns(ctx, mach, nicnum, nat, args):
"""This command shows/alters DNS's NAT settings
usage: nat <vm> <nicnum> dns [passdomain] [proxy] [usehostresolver]
passdomain - enforces builtin DHCP server to pass domain
proxy - switch on builtin NAT DNS proxying mechanism
usehostresolver - proxies all DNS requests to Host Resolver interface
"""
yesno = {0: 'off', 1: 'on'}
if len(args) == 1:
msg = 'passdomain:%s, proxy:%s, usehostresolver:%s' % (yesno[int(nat.DNSPassDomain)], yesno[int(nat.DNSProxy)], yesno[int(nat.DNSUseHostResolver)])
return (0, [msg])
else:
nat.DNSPassDomain = 'passdomain' in args
nat.DNSProxy = 'proxy' in args
nat.DNSUseHostResolver = 'usehostresolver' in args
return (0, None)
def natTftp(ctx, mach, nicnum, nat, args):
"""This command shows/alters TFTP settings
usage nat <vm> <nicnum> tftp [prefix <prefix>| bootfile <bootfile>| server <server>]
prefix - alters prefix TFTP settings
bootfile - alters bootfile TFTP settings
server - sets booting server
"""
if len(args) == 1:
server = nat.TFTPNextServer
if server is None:
server = nat.network
if server is None:
server = '10.0.%d/24' % (int(nicnum) + 2)
(server, mask) = server.split('/')
while server.count('.') != 3:
server += '.0'
(a, b, c, d) = server.split('.')
server = '%d.%d.%d.4' % (a, b, c)
prefix = nat.TFTPPrefix
if prefix is None:
prefix = '%s/TFTP/' % (ctx['vb'].homeFolder)
bootfile = nat.TFTPBootFile
if bootfile is None:
bootfile = '%s.pxe' % (mach.name)
msg = 'server:%s, prefix:%s, bootfile:%s' % (server, prefix, bootfile)
return (0, [msg])
else:
cmd = args[1]
if len(args) != 3:
print 'invalid args:', args
print natTftp.__doc__
return (1, None)
if cmd == 'prefix': nat.TFTPPrefix = args[2]
elif cmd == 'bootfile': nat.TFTPBootFile = args[2]
elif cmd == 'server': nat.TFTPNextServer = args[2]
else:
print "invalid cmd:", cmd
return (1, None)
return (0, None)
def natPortForwarding(ctx, mach, nicnum, nat, args):
"""This command shows/manages port-forwarding settings
usage:
nat <vm> <nicnum> <pf> [ simple tcp|udp <hostport> <guestport>]
|[no_name tcp|udp <hostip> <hostport> <guestip> <guestport>]
|[ex tcp|udp <pf-name> <hostip> <hostport> <guestip> <guestport>]
|[delete <pf-name>]
"""
if len(args) == 1:
# note: keys/values are swapped in defining part of the function
proto = {0: 'udp', 1: 'tcp'}
msg = []
pfs = ctx['global'].getArray(nat, 'redirects')
for pf in pfs:
(pfnme, pfp, pfhip, pfhp, pfgip, pfgp) = str(pf).split(', ')
msg.append('%s: %s %s:%s => %s:%s' % (pfnme, proto[int(pfp)], pfhip, pfhp, pfgip, pfgp))
return (0, msg) # msg is array
else:
proto = {'udp': 0, 'tcp': 1}
pfcmd = {
'simple': {
'validate': lambda: args[1] in pfcmd.keys() and args[2] in proto.keys() and len(args) == 5,
'func':lambda: nat.addRedirect('', proto[args[2]], '', int(args[3]), '', int(args[4]))
},
'no_name': {
'validate': lambda: args[1] in pfcmd.keys() and args[2] in proto.keys() and len(args) == 7,
'func': lambda: nat.addRedirect('', proto[args[2]], args[3], int(args[4]), args[5], int(args[6]))
},
'ex': {
'validate': lambda: args[1] in pfcmd.keys() and args[2] in proto.keys() and len(args) == 8,
'func': lambda: nat.addRedirect(args[3], proto[args[2]], args[4], int(args[5]), args[6], int(args[7]))
},
'delete': {
'validate': lambda: len(args) == 3,
'func': lambda: nat.removeRedirect(args[2])
}
}
if not pfcmd[args[1]]['validate']():
print 'invalid port-forwarding or args of sub command ', args[1]
print natPortForwarding.__doc__
return (1, None)
a = pfcmd[args[1]]['func']()
return (0, None)
def natNetwork(ctx, mach, nicnum, nat, args):
"""This command shows/alters NAT network settings
usage: nat <vm> <nicnum> network [<network>]
"""
if len(args) == 1:
if nat.network is not None and len(str(nat.network)) != 0:
msg = '\'%s\'' % (nat.network)
else:
msg = '10.0.%d.0/24' % (int(nicnum) + 2)
return (0, [msg])
else:
(addr, mask) = args[1].split('/')
if addr.count('.') > 3 or int(mask) < 0 or int(mask) > 32:
print 'Invalid arguments'
return (1, None)
nat.network = args[1]
return (0, None)
def natCmd(ctx, args):
"""This command is entry point to NAT settins management
usage: nat <vm> <nicnum> <cmd> <cmd-args>
cmd - [alias|settings|tftp|dns|pf|network]
for more information about commands:
nat help <cmd>
"""
natcommands = {
'alias' : natAlias,
'settings' : natSettings,
'tftp': natTftp,
'dns': natDns,
'pf': natPortForwarding,
'network': natNetwork
}
if len(args) < 2 or args[1] == 'help':
if len(args) > 2:
print natcommands[args[2]].__doc__
else:
print natCmd.__doc__
return 0
if len(args) == 1 or len(args) < 4 or args[3] not in natcommands:
print natCmd.__doc__
return 0
mach = ctx['argsToMach'](args)
if mach == None:
print "please specify vm"
return 0
if len(args) < 3 or not args[2].isdigit() or int(args[2]) not in range(0, ctx['vb'].systemProperties.getMaxNetworkAdapters(mach.chipsetType)):
print 'please specify adapter num %d isn\'t in range [0-%d]' % (args[2], ctx['vb'].systemProperties.getMaxNetworkAdapters(mach.chipsetType))
return 0
nicnum = int(args[2])
cmdargs = []
for i in range(3, len(args)):
cmdargs.append(args[i])
# @todo vvl if nicnum is missed but command is entered
# use NAT func for every adapter on machine.
func = args[3]
rosession = 1
session = None
if len(cmdargs) > 1:
rosession = 0
session = ctx['global'].openMachineSession(mach, False)
mach = session.machine
adapter = mach.getNetworkAdapter(nicnum)
natEngine = adapter.NATEngine
(rc, report) = natcommands[func](ctx, mach, nicnum, natEngine, cmdargs)
if rosession == 0:
if rc == 0:
mach.saveSettings()
session.unlockMachine()
elif report is not None:
for r in report:
msg ='%s nic%d %s: %s' % (mach.name, nicnum, func, r)
print msg
return 0
def nicSwitchOnOff(adapter, attr, args):
if len(args) == 1:
yesno = {0: 'off', 1: 'on'}
r = yesno[int(adapter.__getattr__(attr))]
return (0, r)
else:
yesno = {'off' : 0, 'on' : 1}
if args[1] not in yesno:
print '%s isn\'t acceptable, please choose %s' % (args[1], yesno.keys())
return (1, None)
adapter.__setattr__(attr, yesno[args[1]])
return (0, None)
def nicTraceSubCmd(ctx, vm, nicnum, adapter, args):
'''
usage: nic <vm> <nicnum> trace [on|off [file]]
'''
(rc, r) = nicSwitchOnOff(adapter, 'traceEnabled', args)
if len(args) == 1 and rc == 0:
r = '%s file:%s' % (r, adapter.traceFile)
return (0, r)
elif len(args) == 3 and rc == 0:
adapter.traceFile = args[2]
return (0, None)
def nicLineSpeedSubCmd(ctx, vm, nicnum, adapter, args):
if len(args) == 1:
r = '%d kbps'% (adapter.lineSpeed)
return (0, r)
else:
if not args[1].isdigit():
print '%s isn\'t a number' % (args[1])
print (1, None)
adapter.lineSpeed = int(args[1])
return (0, None)
def nicCableSubCmd(ctx, vm, nicnum, adapter, args):
'''
usage: nic <vm> <nicnum> cable [on|off]
'''
return nicSwitchOnOff(adapter, 'cableConnected', args)
def nicEnableSubCmd(ctx, vm, nicnum, adapter, args):
'''
usage: nic <vm> <nicnum> enable [on|off]
'''
return nicSwitchOnOff(adapter, 'enabled', args)
def nicTypeSubCmd(ctx, vm, nicnum, adapter, args):
'''
usage: nic <vm> <nicnum> type [Am79c970A|Am79c970A|I82540EM|I82545EM|I82543GC|Virtio]
'''
if len(args) == 1:
nictypes = ctx['const'].all_values('NetworkAdapterType')
for key in nictypes.keys():
if str(adapter.adapterType) == str(nictypes[key]):
return (0, str(key))
return (1, None)
else:
nictypes = ctx['const'].all_values('NetworkAdapterType')
if args[1] not in nictypes.keys():
print '%s not in acceptable values (%s)' % (args[1], nictypes.keys())
return (1, None)
adapter.adapterType = nictypes[args[1]]
return (0, None)
def nicAttachmentSubCmd(ctx, vm, nicnum, adapter, args):
'''
usage: nic <vm> <nicnum> attachment [Null|NAT|Bridged <interface>|Internal <name>|HostOnly <interface>
'''
if len(args) == 1:
nicAttachmentType = {
ctx['global'].constants.NetworkAttachmentType_Null: ('Null', ''),
ctx['global'].constants.NetworkAttachmentType_NAT: ('NAT', ''),
ctx['global'].constants.NetworkAttachmentType_Bridged: ('Bridged', adapter.bridgedInterface),
ctx['global'].constants.NetworkAttachmentType_Internal: ('Internal', adapter.internalNetwork),
ctx['global'].constants.NetworkAttachmentType_HostOnly: ('HostOnly', adapter.hostOnlyInterface),
# @todo show details of the generic network attachment type
ctx['global'].constants.NetworkAttachmentType_Generic: ('Generic', ''),
}
import types
if type(adapter.attachmentType) != types.IntType:
t = str(adapter.attachmentType)
else:
t = adapter.attachmentType
(r, p) = nicAttachmentType[t]
return (0, 'attachment:%s, name:%s' % (r, p))
else:
nicAttachmentType = {
'Null': {
'v': lambda: len(args) == 2,
'p': lambda: 'do nothing',
'f': lambda: ctx['global'].constants.NetworkAttachmentType_Null},
'NAT': {
'v': lambda: len(args) == 2,
'p': lambda: 'do nothing',
'f': lambda: ctx['global'].constants.NetworkAttachmentType_NAT},
'Bridged': {
'v': lambda: len(args) == 3,
'p': lambda: adapter.__setattr__('bridgedInterface', args[2]),
'f': lambda: ctx['global'].constants.NetworkAttachmentType_Bridged},
'Internal': {
'v': lambda: len(args) == 3,
'p': lambda: adapter.__setattr__('internalNetwork', args[2]),
'f': lambda: ctx['global'].constants.NetworkAttachmentType_Internal},
'HostOnly': {
'v': lambda: len(args) == 2,
'p': lambda: adapter.__setattr__('hostOnlyInterface', args[2]),
'f': lambda: ctx['global'].constants.NetworkAttachmentType_HostOnly},
# @todo implement setting the properties of a generic attachment
'Generic': {
'v': lambda: len(args) == 3,
'p': lambda: 'do nothing',
'f': lambda: ctx['global'].constants.NetworkAttachmentType_Generic}
}
if args[1] not in nicAttachmentType.keys():
print '%s not in acceptable values (%s)' % (args[1], nicAttachmentType.keys())
return (1, None)
if not nicAttachmentType[args[1]]['v']():
print nicAttachmentType.__doc__
return (1, None)
nicAttachmentType[args[1]]['p']()
adapter.attachmentType = nicAttachmentType[args[1]]['f']()
return (0, None)
def nicCmd(ctx, args):
'''
This command to manage network adapters
usage: nic <vm> <nicnum> <cmd> <cmd-args>
where cmd : attachment, trace, linespeed, cable, enable, type
'''
# 'command name':{'runtime': is_callable_at_runtime, 'op': function_name}
niccomand = {
'attachment': nicAttachmentSubCmd,
'trace': nicTraceSubCmd,
'linespeed': nicLineSpeedSubCmd,
'cable': nicCableSubCmd,
'enable': nicEnableSubCmd,
'type': nicTypeSubCmd
}
if len(args) < 2 \
or args[1] == 'help' \
or (len(args) > 2 and args[3] not in niccomand):
if len(args) == 3 \
and args[2] in niccomand:
print niccomand[args[2]].__doc__
else:
print nicCmd.__doc__
return 0
vm = ctx['argsToMach'](args)
if vm is None:
print 'please specify vm'
return 0
if len(args) < 3 \
or int(args[2]) not in range(0, ctx['vb'].systemProperties.getMaxNetworkAdapters(vm.chipsetType)):
print 'please specify adapter num %d isn\'t in range [0-%d]'% (args[2], ctx['vb'].systemProperties.getMaxNetworkAdapters(vm.chipsetType))
return 0
nicnum = int(args[2])
cmdargs = args[3:]
func = args[3]
session = None
session = ctx['global'].openMachineSession(vm)
vm = session.machine
adapter = vm.getNetworkAdapter(nicnum)
(rc, report) = niccomand[func](ctx, vm, nicnum, adapter, cmdargs)
if rc == 0:
vm.saveSettings()
if report is not None:
print '%s nic %d %s: %s' % (vm.name, nicnum, args[3], report)
session.unlockMachine()
return 0
def promptCmd(ctx, args):
if len(args) < 2:
print "Current prompt: '%s'" % (ctx['prompt'])
return 0
ctx['prompt'] = args[1]
return 0
def foreachCmd(ctx, args):
if len(args) < 3:
print "usage: foreach scope command, where scope is XPath-like expression //vms/vm[@CPUCount='2']"
return 0
scope = args[1]
cmd = args[2]
elems = eval_xpath(ctx, scope)
try:
for e in elems:
e.apply(cmd)
except:
print "Error executing"
traceback.print_exc()
return 0
def foreachvmCmd(ctx, args):
if len(args) < 2:
print "foreachvm command <args>"
return 0
cmdargs = args[1:]
cmdargs.insert(1, '')
for mach in getMachines(ctx):
cmdargs[1] = mach.id
runCommandArgs(ctx, cmdargs)
return 0
def recordDemoCmd(ctx, args):
if (len(args) < 3):
print "usage: recordDemo vm filename (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
filename = args[2]
dur = 10000
if len(args) > 3:
dur = float(args[3])
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: recordDemo(ctx, console, filename, dur)])
return 0
def playbackDemoCmd(ctx, args):
if (len(args) < 3):
print "usage: playbackDemo vm filename (duration)"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
filename = args[2]
dur = 10000
if len(args) > 3:
dur = float(args[3])
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: playbackDemo(ctx, console, filename, dur)])
return 0
def pciAddr(ctx, addr):
strg = "%02x:%02x.%d" % (addr >> 8, (addr & 0xff) >> 3, addr & 7)
return colPci(ctx, strg)
def lspci(ctx, console):
assigned = ctx['global'].getArray(console.machine, 'PCIDeviceAssignments')
for a in assigned:
if a.isPhysicalDevice:
print "%s: assigned host device %s guest %s" % (colDev(ctx, a.name), pciAddr(ctx, a.hostAddress), pciAddr(ctx, a.guestAddress))
atts = ctx['global'].getArray(console, 'attachedPCIDevices')
for a in atts:
if a.isPhysicalDevice:
print "%s: physical, guest %s, host %s" % (colDev(ctx, a.name), pciAddr(ctx, a.guestAddress), pciAddr(ctx, a.hostAddress))
else:
print "%s: virtual, guest %s" % (colDev(ctx, a.name), pciAddr(ctx, a.guestAddress))
return
def parsePci(strg):
pcire = re.compile(r'(?P<b>[0-9a-fA-F]+):(?P<d>[0-9a-fA-F]+)\.(?P<f>\d)')
match = pcire.search(strg)
if match is None:
return -1
pdict = match.groupdict()
return ((int(pdict['b'], 16)) << 8) | ((int(pdict['d'], 16)) << 3) | int(pdict['f'])
def lspciCmd(ctx, args):
if (len(args) < 2):
print "usage: lspci vm"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
cmdExistingVm(ctx, mach, 'guestlambda', [lambda ctx, mach, console, args: lspci(ctx, console)])
return 0
def attachpciCmd(ctx, args):
if (len(args) < 3):
print "usage: attachpci vm hostpci <guestpci>"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
hostaddr = parsePci(args[2])
if hostaddr == -1:
print "invalid host PCI %s, accepted format 01:02.3 for bus 1, device 2, function 3" % (args[2])
return 0
if (len(args) > 3):
guestaddr = parsePci(args[3])
if guestaddr == -1:
print "invalid guest PCI %s, accepted format 01:02.3 for bus 1, device 2, function 3" % (args[3])
return 0
else:
guestaddr = hostaddr
cmdClosedVm(ctx, mach, lambda ctx, mach, a: mach.attachHostPCIDevice(hostaddr, guestaddr, True))
return 0
def detachpciCmd(ctx, args):
if (len(args) < 3):
print "usage: detachpci vm hostpci"
return 0
mach = argsToMach(ctx, args)
if mach == None:
return 0
hostaddr = parsePci(args[2])
if hostaddr == -1:
print "invalid host PCI %s, accepted format 01:02.3 for bus 1, device 2, function 3" % (args[2])
return 0
cmdClosedVm(ctx, mach, lambda ctx, mach, a: mach.detachHostPCIDevice(hostaddr))
return 0
def gotoCmd(ctx, args):
if (len(args) < 2):
print "usage: goto line"
return 0
line = int(args[1])
ctx['scriptLine'] = line
return 0
aliases = {'s':'start',
'i':'info',
'l':'list',
'h':'help',
'a':'alias',
'q':'quit', 'exit':'quit',
'tg': 'typeGuest',
'v':'verbose'}
commands = {'help':['Prints help information', helpCmd, 0],
'start':['Start virtual machine by name or uuid: start Linux headless', startCmd, 0],
'createVm':['Create virtual machine: createVm macvm MacOS', createVmCmd, 0],
'removeVm':['Remove virtual machine', removeVmCmd, 0],
'pause':['Pause virtual machine', pauseCmd, 0],
'resume':['Resume virtual machine', resumeCmd, 0],
'save':['Save execution state of virtual machine', saveCmd, 0],
'stats':['Stats for virtual machine', statsCmd, 0],
'powerdown':['Power down virtual machine', powerdownCmd, 0],
'powerbutton':['Effectively press power button', powerbuttonCmd, 0],
'list':['Shows known virtual machines', listCmd, 0],
'info':['Shows info on machine', infoCmd, 0],
'ginfo':['Shows info on guest', ginfoCmd, 0],
'gexec':['Executes program in the guest', gexecCmd, 0],
'gcopy':['Copy file to the guest', gcopyCmd, 0],
'gpipe':['Pipe between host and guest', gpipeCmd, 0],
'alias':['Control aliases', aliasCmd, 0],
'verbose':['Toggle verbosity', verboseCmd, 0],
'setvar':['Set VMs variable: setvar Fedora BIOSSettings.ACPIEnabled True', setvarCmd, 0],
'eval':['Evaluate arbitrary Python construction: eval \'for m in getMachines(ctx): print m.name, "has", m.memorySize, "M"\'', evalCmd, 0],
'quit':['Exits', quitCmd, 0],
'host':['Show host information', hostCmd, 0],
'guest':['Execute command for guest: guest Win32 \'console.mouse.putMouseEvent(20, 20, 0, 0, 0)\'', guestCmd, 0],
'monitorGuest':['Monitor what happens with the guest for some time: monitorGuest Win32 10', monitorGuestCmd, 0],
'monitorGuestKbd':['Monitor guest keyboard for some time: monitorGuestKbd Win32 10', monitorGuestKbdCmd, 0],
'monitorGuestMouse':['Monitor guest mouse for some time: monitorGuestMouse Win32 10', monitorGuestMouseCmd, 0],
'monitorGuestMultiTouch':['Monitor guest touch screen for some time: monitorGuestMultiTouch Win32 10', monitorGuestMultiTouchCmd, 0],
'monitorVBox':['Monitor what happens with Virtual Box for some time: monitorVBox 10', monitorVBoxCmd, 0],
'portForward':['Setup permanent port forwarding for a VM, takes adapter number host port and guest port: portForward Win32 0 8080 80', portForwardCmd, 0],
'showLog':['Show log file of the VM, : showLog Win32', showLogCmd, 0],
'findLog':['Show entries matching pattern in log file of the VM, : findLog Win32 PDM|CPUM', findLogCmd, 0],
'findAssert':['Find assert in log file of the VM, : findAssert Win32', findAssertCmd, 0],
'reloadExt':['Reload custom extensions: reloadExt', reloadExtCmd, 0],
'runScript':['Run VBox script: runScript script.vbox', runScriptCmd, 0],
'sleep':['Sleep for specified number of seconds: sleep 3.14159', sleepCmd, 0],
'shell':['Execute external shell command: shell "ls /etc/rc*"', shellCmd, 0],
'exportVm':['Export VM in OVF format: exportVm Win /tmp/win.ovf', exportVMCmd, 0],
'screenshot':['Take VM screenshot to a file: screenshot Win /tmp/win.png 1024 768 0', screenshotCmd, 0],
'teleport':['Teleport VM to another box (see openportal): teleport Win anotherhost:8000 <passwd> <maxDowntime>', teleportCmd, 0],
'typeGuest':['Type arbitrary text in guest: typeGuest Linux "^lls\\n&UP;&BKSP;ess /etc/hosts\\nq^c" 0.7', typeGuestCmd, 0],
'openportal':['Open portal for teleportation of VM from another box (see teleport): openportal Win 8000 <passwd>', openportalCmd, 0],
'closeportal':['Close teleportation portal (see openportal, teleport): closeportal Win', closeportalCmd, 0],
'getextra':['Get extra data, empty key lists all: getextra <vm|global> <key>', getExtraDataCmd, 0],
'setextra':['Set extra data, empty value removes key: setextra <vm|global> <key> <value>', setExtraDataCmd, 0],
'gueststats':['Print available guest stats (only Windows guests with additions so far): gueststats Win32', gueststatsCmd, 0],
'plugcpu':['Add a CPU to a running VM: plugcpu Win 1', plugcpuCmd, 0],
'unplugcpu':['Remove a CPU from a running VM (additions required, Windows cannot unplug): unplugcpu Linux 1', unplugcpuCmd, 0],
'createHdd': ['Create virtual HDD: createHdd 1000 /disk.vdi ', createHddCmd, 0],
'removeHdd': ['Permanently remove virtual HDD: removeHdd /disk.vdi', removeHddCmd, 0],
'registerHdd': ['Register HDD image with VirtualBox instance: registerHdd /disk.vdi', registerHddCmd, 0],
'unregisterHdd': ['Unregister HDD image with VirtualBox instance: unregisterHdd /disk.vdi', unregisterHddCmd, 0],
'attachHdd': ['Attach HDD to the VM: attachHdd win /disk.vdi "IDE Controller" 0:1', attachHddCmd, 0],
'detachHdd': ['Detach HDD from the VM: detachHdd win /disk.vdi', detachHddCmd, 0],
'registerIso': ['Register CD/DVD image with VirtualBox instance: registerIso /os.iso', registerIsoCmd, 0],
'unregisterIso': ['Unregister CD/DVD image with VirtualBox instance: unregisterIso /os.iso', unregisterIsoCmd, 0],
'removeIso': ['Permanently remove CD/DVD image: removeIso /os.iso', removeIsoCmd, 0],
'attachIso': ['Attach CD/DVD to the VM: attachIso win /os.iso "IDE Controller" 0:1', attachIsoCmd, 0],
'detachIso': ['Detach CD/DVD from the VM: detachIso win /os.iso', detachIsoCmd, 0],
'mountIso': ['Mount CD/DVD to the running VM: mountIso win /os.iso "IDE Controller" 0:1', mountIsoCmd, 0],
'unmountIso': ['Unmount CD/DVD from running VM: unmountIso win "IDE Controller" 0:1', unmountIsoCmd, 0],
'attachCtr': ['Attach storage controller to the VM: attachCtr win Ctr0 IDE ICH6', attachCtrCmd, 0],
'detachCtr': ['Detach HDD from the VM: detachCtr win Ctr0', detachCtrCmd, 0],
'attachUsb': ['Attach USB device to the VM (use listUsb to show available devices): attachUsb win uuid', attachUsbCmd, 0],
'detachUsb': ['Detach USB device from the VM: detachUsb win uuid', detachUsbCmd, 0],
'listMedia': ['List media known to this VBox instance', listMediaCmd, 0],
'listUsb': ['List known USB devices', listUsbCmd, 0],
'shareFolder': ['Make host\'s folder visible to guest: shareFolder win /share share writable', shareFolderCmd, 0],
'unshareFolder': ['Remove folder sharing', unshareFolderCmd, 0],
'gui': ['Start GUI frontend', guiCmd, 0],
'colors':['Toggle colors', colorsCmd, 0],
'snapshot':['VM snapshot manipulation, snapshot help for more info', snapshotCmd, 0],
'nat':['NAT (network address translation engine) manipulation, nat help for more info', natCmd, 0],
'nic' : ['Network adapter management', nicCmd, 0],
'prompt' : ['Control shell prompt', promptCmd, 0],
'foreachvm' : ['Perform command for each VM', foreachvmCmd, 0],
'foreach' : ['Generic "for each" construction, using XPath-like notation: foreach //vms/vm[@OSTypeId=\'MacOS\'] "print obj.name"', foreachCmd, 0],
'recordDemo':['Record demo: recordDemo Win32 file.dmo 10', recordDemoCmd, 0],
'playbackDemo':['Playback demo: playbackDemo Win32 file.dmo 10', playbackDemoCmd, 0],
'lspci': ['List PCI devices attached to the VM: lspci Win32', lspciCmd, 0],
'attachpci': ['Attach host PCI device to the VM: attachpci Win32 01:00.0', attachpciCmd, 0],
'detachpci': ['Detach host PCI device from the VM: detachpci Win32 01:00.0', detachpciCmd, 0],
'goto': ['Go to line in script (script-only)', gotoCmd, 0]
}
def runCommandArgs(ctx, args):
c = args[0]
if aliases.get(c, None) != None:
c = aliases[c]
ci = commands.get(c, None)
if ci == None:
print "Unknown command: '%s', type 'help' for list of known commands" % (c)
return 0
if ctx['remote'] and ctx['vb'] is None:
if c not in ['connect', 'reconnect', 'help', 'quit']:
print "First connect to remote server with %s command." % (colored('connect', 'blue'))
return 0
return ci[1](ctx, args)
def runCommand(ctx, cmd):
if len(cmd) == 0: return 0
args = split_no_quotes(cmd)
if len(args) == 0: return 0
return runCommandArgs(ctx, args)
#
# To write your own custom commands to vboxshell, create
# file ~/.VirtualBox/shellext.py with content like
#
# def runTestCmd(ctx, args):
# print "Testy test", ctx['vb']
# return 0
#
# commands = {
# 'test': ['Test help', runTestCmd]
# }
# and issue reloadExt shell command.
# This file also will be read automatically on startup or 'reloadExt'.
#
# Also one can put shell extensions into ~/.VirtualBox/shexts and
# they will also be picked up, so this way one can exchange
# shell extensions easily.
def addExtsFromFile(ctx, cmds, filename):
if not os.path.isfile(filename):
return
d = {}
try:
execfile(filename, d, d)
for (k, v) in d['commands'].items():
if g_fVerbose:
print "customize: adding \"%s\" - %s" % (k, v[0])
cmds[k] = [v[0], v[1], filename]
except:
print "Error loading user extensions from %s" % (filename)
traceback.print_exc()
def checkUserExtensions(ctx, cmds, folder):
folder = str(folder)
name = os.path.join(folder, "shellext.py")
addExtsFromFile(ctx, cmds, name)
# also check 'exts' directory for all files
shextdir = os.path.join(folder, "shexts")
if not os.path.isdir(shextdir):
return
exts = os.listdir(shextdir)
for e in exts:
# not editor temporary files, please.
if e.endswith('.py'):
addExtsFromFile(ctx, cmds, os.path.join(shextdir, e))
def getHomeFolder(ctx):
if ctx['remote'] or ctx['vb'] is None:
if 'VBOX_USER_HOME' in os.environ:
return os.path.join(os.environ['VBOX_USER_HOME'])
return os.path.join(os.path.expanduser("~"), ".VirtualBox")
else:
return ctx['vb'].homeFolder
def interpret(ctx):
if ctx['remote']:
commands['connect'] = ["Connect to remote VBox instance: connect http://server:18083 user password", connectCmd, 0]
commands['disconnect'] = ["Disconnect from remote VBox instance", disconnectCmd, 0]
commands['reconnect'] = ["Reconnect to remote VBox instance", reconnectCmd, 0]
ctx['wsinfo'] = ["http://localhost:18083", "", ""]
vbox = ctx['vb']
if vbox is not None:
try:
print "Running VirtualBox version %s" % (vbox.version)
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
ctx['perf'] = None # ctx['global'].getPerfCollector(vbox)
else:
ctx['perf'] = None
home = getHomeFolder(ctx)
checkUserExtensions(ctx, commands, home)
if platform.system() in ['Windows', 'Microsoft']:
global g_fHasColors
g_fHasColors = False
hist_file = os.path.join(home, ".vboxshellhistory")
autoCompletion(commands, ctx)
if g_fHasReadline and os.path.exists(hist_file):
readline.read_history_file(hist_file)
# to allow to print actual host information, we collect info for
# last 150 secs maximum, (sample every 10 secs and keep up to 15 samples)
if ctx['perf']:
try:
ctx['perf'].setup(['*'], [vbox.host], 10, 15)
except:
pass
cmds = []
if g_sCmd is not None:
cmds = g_sCmd.split(';')
it = cmds.__iter__()
while True:
try:
if g_fBatchMode:
cmd = 'runScript %s'% (g_sScriptFile)
elif g_sCmd is not None:
cmd = it.next()
else:
cmd = raw_input(ctx['prompt'])
done = runCommand(ctx, cmd)
if done != 0: break
if g_fBatchMode:
break
except KeyboardInterrupt:
print '====== You can type quit or q to leave'
except StopIteration:
break
except EOFError:
break
except Exception, e:
printErr(ctx, e)
if g_fVerbose:
traceback.print_exc()
ctx['global'].waitForEvents(0)
try:
# There is no need to disable metric collection. This is just an example.
if ct['perf']:
ctx['perf'].disable(['*'], [vbox.host])
except:
pass
if g_fHasReadline:
readline.write_history_file(hist_file)
def runCommandCb(ctx, cmd, args):
args.insert(0, cmd)
return runCommandArgs(ctx, args)
def runGuestCommandCb(ctx, uuid, guestLambda, args):
mach = machById(ctx, uuid)
if mach == None:
return 0
args.insert(0, guestLambda)
cmdExistingVm(ctx, mach, 'guestlambda', args)
return 0
def main(argv):
#
# Parse command line arguments.
#
parse = OptionParser()
parse.add_option("-v", "--verbose", dest="verbose", action="store_true", default=False, help = "switch on verbose")
parse.add_option("-a", "--autopath", dest="autopath", action="store_true", default=False, help = "switch on autopath")
parse.add_option("-w", "--webservice", dest="style", action="store_const", const="WEBSERVICE", help = "connect to webservice")
parse.add_option("-b", "--batch", dest="batch_file", help = "script file to execute")
parse.add_option("-c", dest="command_line", help = "command sequence to execute")
parse.add_option("-o", dest="opt_line", help = "option line")
global g_fVerbose, g_sScriptFile, g_fBatchMode, g_fHasColors, g_fHasReadline, g_sCmd
(options, args) = parse.parse_args()
g_fVerbose = options.verbose
style = options.style
if options.batch_file is not None:
g_fBatchMode = True
g_fHasColors = False
g_fHasReadline = False
g_sScriptFile = options.batch_file
if options.command_line is not None:
g_fHasColors = False
g_fHasReadline = False
g_sCmd = options.command_line
params = None
if options.opt_line is not None:
params = {}
strparams = options.opt_line
strparamlist = strparams.split(',')
for strparam in strparamlist:
(key, value) = strparam.split('=')
params[key] = value
if options.autopath:
asLocations = [ os.getcwd(), ];
try: sScriptDir = os.path.dirname(os.path.abspath(__file__));
except: pass; # In case __file__ isn't there.
else:
if platform.system() in [ 'SunOS', ]:
asLocations.append(os.path.join(sScriptDir, 'amd64'));
asLocations.append(sScriptDir);
sPath = os.environ.get("VBOX_PROGRAM_PATH")
if sPath is None:
for sCurLoc in asLocations:
if os.path.isfile(os.path.join(sCurLoc, "VirtualBox")) \
or os.path.isfile(os.path.join(sCurLoc, "VirtualBox.exe")):
print "Autodetected VBOX_PROGRAM_PATH as", sCurLoc
os.environ["VBOX_PROGRAM_PATH"] = sCurLoc
sPath = sCurLoc
break;
if sPath:
sys.path.append(os.path.join(sPath, "sdk", "installer"))
sPath = os.environ.get("VBOX_SDK_PATH")
if sPath is None:
for sCurLoc in asLocations:
if os.path.isfile(os.path.join(sCurLoc, "sdk", "bindings", "VirtualBox.xidl")):
print "Autodetected VBOX_SDK_PATH as", sCurLoc
os.environ["VBOX_SDK_PATH"] = sCurLoc
sPath = sCurLoc;
break;
if sPath:
sTmp = os.path.join(sCurLoc, 'sdk', 'bindings', 'xpcom', 'python');
if os.path.isdir(sTmp):
sys.path.append(sTmp);
del sTmp;
del sPath, asLocations;
#
# Set up the shell interpreter context and
#
from vboxapi import VirtualBoxManager
oVBoxMgr = VirtualBoxManager(style, params)
ctx = {
'global': oVBoxMgr,
'vb': oVBoxMgr.vbox,
'const': oVBoxMgr.constants,
'remote': oVBoxMgr.remote,
'type': oVBoxMgr.type,
'run': lambda cmd, args: runCommandCb(ctx, cmd, args),
'guestlambda': lambda uuid, guestLambda, args: runGuestCommandCb(ctx, uuid, guestLambda, args),
'machById': lambda uuid: machById(ctx, uuid),
'argsToMach': lambda args: argsToMach(ctx, args),
'progressBar': lambda p: progressBar(ctx, p),
'typeInGuest': typeInGuest,
'_machlist': None,
'prompt': g_sPrompt,
'scriptLine': 0,
'interrupt': False,
}
interpret(ctx)
oVBoxMgr.deinit()
del oVBoxMgr
if __name__ == '__main__':
main(sys.argv)
| ruibarreira/linuxtrail | usr/lib/virtualbox/vboxshell.py | Python | gpl-3.0 | 120,819 |
from __future__ import absolute_import
import sys
from future.utils import PY2, PY26
__future_module__ = True
from collections import *
if PY2:
from UserDict import UserDict
from UserList import UserList
from UserString import UserString
if PY26:
from future.backports.misc import OrderedDict, Counter
if sys.version_info < (3, 3):
from future.backports.misc import ChainMap, _count_elements
| snakeleon/YouCompleteMe-x86 | third_party/ycmd/third_party/python-future/src/future/moves/collections.py | Python | gpl-3.0 | 417 |
import pytest
import os
import matplotlib
# Disable plotting
matplotlib.use("Template")
class ThresholdTestData:
def __init__(self):
"""Initialize simple variables."""
# Test data directory
self.datadir = os.path.join(os.path.dirname(os.path.abspath(__file__)), "..", "..", "testdata")
# RGB image
self.small_rgb_img = os.path.join(self.datadir, "setaria_small_plant_rgb.png")
# Gray image
self.small_gray_img = os.path.join(self.datadir, "setaria_small_plant_gray.png")
@pytest.fixture(scope="session")
def threshold_test_data():
"""Test data object for the PlantCV threshold submodule."""
return ThresholdTestData()
| danforthcenter/plantcv | tests/plantcv/threshold/conftest.py | Python | mit | 691 |
from JumpScale import j
import inspect
class CuisineBase:
def __init__(self, executor, cuisine):
self._p_cache = None
self.__classname = None
self._executor = executor
self._cuisine = cuisine
# maybe best to still show the cuisine, is easier
self.cuisine = cuisine
def done(self, name,set=False,reset=False):
"""
returns when a certain step is done
"""
if j.core.db!=None:
if set==False and reset==False:
res=j.core.db.hexists(self._donecat+"."+self._id,name)
if res:
print("will not install:%s"%(self._donecat+"."+self._id+"."+name))
elif set:
j.core.db.hset(self._donecat+"."+self._id,name,"1")
elif reset:
j.core.db.delete(self._donecat+"."+self._id)
else:
return False
@property
def _classname(self):
if self.__classname is None:
self.__classname = str(self.__class__).split(".")[-1].strip("'>")
return self.__classname
def _reset(self):
j.data.cache.reset(self._id)
@property
def _id(self):
return self._executor.id
@property
def _cache(self):
if self._p_cache is None:
self._p_cache = j.data.cache.get(self._id, self._classname, keepInMem=False, reset=False)
return self._p_cache
def __str__(self):
return "cuisine:%s:%s" % (getattr(self._executor, 'addr', 'local'), getattr(self._executor, 'port', ''))
__repr__ = __str__
class CuisineApp(CuisineBase):
NAME = None
VERSION = None
def isInstalled(self):
"""
Checks if a package is installed or not
You can ovveride it to use another way for checking
"""
return self._cuisine.core.command_check(self.NAME)
def install(self):
if not self.isInstalled():
raise NotImplementedError()
class CuisineBaseLoader:
def __init__(self, executor, cuisine):
self._executor = executor
self._cuisine = cuisine
myClassName = str(self.__class__).split(".")[-1].split("'")[0]
localdir = j.sal.fs.getDirName(inspect.getsourcefile(self.__class__))
classes = [j.sal.fs.getBaseName(item)[7:-3] for item in j.sal.fs.listFilesInDir(localdir, filter="Cuisine*")]
for className in classes:
# import the class
exec("from JumpScale.tools.cuisine.%s.Cuisine%s import *" % (myClassName, className))
# attach the class to this class
do = "self.%s=Cuisine%s(self._executor,self._cuisine)" % (className.lower(), className)
# print(do)
exec(do)
class JSCuisineFactory:
def __init__(self):
self.__jslocation__ = "j.tools.cuisine"
self._local = None
self._cuisines_instance = {}
self.logger = j.logger.get("j.tools.cuisine")
def _getBaseClass(self):
return CuisineBase
def _getBaseAppClass(self):
return CuisineApp
def _getBaseClassLoader(self):
return CuisineBaseLoader
def reset(self, cuisine):
"""
reset remove the cuisine instance passed in argument from the cache.
"""
if cuisine.executor.id in self._cuisines_instance:
del self._cuisines_instance[cuisine.executor.id]
@property
def local(self):
if self._local is None:
from JumpScale.tools.cuisine.JSCuisine import JSCuisine
self._local = JSCuisine(j.tools.executor.getLocal())
return self._local
def _generate_pubkey(self):
if not j.do.checkSSHAgentAvailable():
j.do._loadSSHAgent()
rc, out = j.sal.process.execute("ssh-add -l")
keys = []
for line in out.split("\n"):
try:
# TODO: ugly needs to be done better
item = line.split(" ", 2)[2]
keyname = item.split("(", 1)[0].strip()
keys.append(keyname)
except:
pass
key = j.tools.console.askChoice(keys, "please select key")
# key = j.sal.fs.getBaseName(key)
return j.sal.fs.fileGetContents(key + ".pub")
def get_pubkey(self, keyname=''):
if keyname == '':
return self._generate_pubkey()
key = j.do.getSSHKeyPathFromAgent(keyname)
return j.sal.fs.fileGetContents(key + '.pub')
def _get_ssh_executor(self, addr, port, login, passphrase, passwd):
if not passwd and passphrase is not None:
return j.tools.executor.getSSHBased(addr=addr,
port=port,
login=login,
passphrase=passphrase)
else:
passwd = passwd if passwd else j.tools.console.askPassword("please specify root passwd", False)
return j.tools.executor.getSSHBased(addr=addr,
port=port,
login=login,
passwd=passwd)
# UNUSED METHOD
def authorizeKey(self, addr='localhost:22', login="root", passwd="", keyname="", pubkey="", passphrase=None):
"""
will try to login if not ok then will try to push key with passwd
will push local key to remote, if not specified will list & you can select
if passwd not specified will ask
@param pubkey is the pub key to use (is content of key), if this is specified then keyname not used & ssh-agent neither
"""
if addr.find(":") != -1:
addr, port = addr.split(":", 1)
addr = addr.strip()
port = int(port.strip())
else:
port = 22
j.clients.ssh.cache = {} # Empty the cache
_pubkey = pubkey if pubkey else self.get_pubkey(keyname=keyname)
executor = self._get_ssh_executor(addr, port, login, passphrase, passwd)
executor.cuisine.ssh.authorize(login, _pubkey)
executor.cuisine.core.run("chmod -R 700 /root/.ssh")
def get(self, executor=None, usecache=True):
"""
example:
executor=j.tools.executor.getSSHBased(addr='localhost', port=22,login="root",passwd="1234",pushkey="ovh_install")
cuisine=j.tools.cuisine.get(executor)
executor can also be a string like: 192.168.5.5:9022
or if used without executor then will be the local one
"""
from JumpScale.tools.cuisine.JSCuisine import JSCuisine
executor = j.tools.executor.get(executor)
if usecache and executor.id in self._cuisines_instance:
return self._cuisines_instance[executor.id]
cuisine = JSCuisine(executor)
self._cuisines_instance[executor.id] = cuisine
return self._cuisines_instance[executor.id]
def getFromId(self, id):
executor = j.tools.executor.get(id)
return self.get(executor)
| Jumpscale/jumpscale_core8 | lib/JumpScale/tools/cuisine/CuisineFactory.py | Python | apache-2.0 | 7,076 |
from operator import itemgetter
from itertools import groupby
def groupby2(cols, lst, lev=0):
if not cols:
return str(list(lst))
keyfun = itemgetter(cols[0])
srted = sorted(list(lst), key=keyfun)
output = ""
for key, iter in groupby(srted, key=keyfun):
output += "\n"+" "*lev+"%10s:"%key
output += groupby2(cols[1:], iter, lev+1)
return output
| ActiveState/code | recipes/Python/535129_Groupby_hierarchy_tree/recipe-535129.py | Python | mit | 396 |
# Authors: Gilles Louppe, Mathieu Blondel, Maheshakya Wijewardena
# License: BSD 3 clause
import numpy as np
from .base import SelectorMixin
from ..base import TransformerMixin, BaseEstimator, clone
from ..externals import six
from ..utils import safe_mask, check_array, deprecated
from ..utils.validation import check_is_fitted
from ..exceptions import NotFittedError
def _get_feature_importances(estimator):
"""Retrieve or aggregate feature importances from estimator"""
if hasattr(estimator, "feature_importances_"):
importances = estimator.feature_importances_
elif hasattr(estimator, "coef_"):
if estimator.coef_.ndim == 1:
importances = np.abs(estimator.coef_)
else:
importances = np.sum(np.abs(estimator.coef_), axis=0)
else:
raise ValueError(
"The underlying estimator %s has no `coef_` or "
"`feature_importances_` attribute. Either pass a fitted estimator"
" to SelectFromModel or call fit before calling transform."
% estimator.__class__.__name__)
return importances
def _calculate_threshold(estimator, importances, threshold):
"""Interpret the threshold value"""
if threshold is None:
# determine default from estimator
est_name = estimator.__class__.__name__
if ((hasattr(estimator, "penalty") and estimator.penalty == "l1") or
"Lasso" in est_name):
# the natural default threshold is 0 when l1 penalty was used
threshold = 1e-5
else:
threshold = "mean"
if isinstance(threshold, six.string_types):
if "*" in threshold:
scale, reference = threshold.split("*")
scale = float(scale.strip())
reference = reference.strip()
if reference == "median":
reference = np.median(importances)
elif reference == "mean":
reference = np.mean(importances)
else:
raise ValueError("Unknown reference: " + reference)
threshold = scale * reference
elif threshold == "median":
threshold = np.median(importances)
elif threshold == "mean":
threshold = np.mean(importances)
else:
raise ValueError("Expected threshold='mean' or threshold='median' "
"got %s" % threshold)
else:
threshold = float(threshold)
return threshold
class _LearntSelectorMixin(TransformerMixin):
# Note because of the extra threshold parameter in transform, this does
# not naturally extend from SelectorMixin
"""Transformer mixin selecting features based on importance weights.
This implementation can be mixin on any estimator that exposes a
``feature_importances_`` or ``coef_`` attribute to evaluate the relative
importance of individual features for feature selection.
"""
@deprecated('Support to use estimators as feature selectors will be '
'removed in version 0.19. Use SelectFromModel instead.')
def transform(self, X, threshold=None):
"""Reduce X to its most important features.
Uses ``coef_`` or ``feature_importances_`` to determine the most
important features. For models with a ``coef_`` for each class, the
absolute sum over the classes is used.
Parameters
----------
X : array or scipy sparse matrix of shape [n_samples, n_features]
The input samples.
threshold : string, float or None, optional (default=None)
The threshold value to use for feature selection. Features whose
importance is greater or equal are kept while the others are
discarded. If "median" (resp. "mean"), then the threshold value is
the median (resp. the mean) of the feature importances. A scaling
factor (e.g., "1.25*mean") may also be used. If None and if
available, the object attribute ``threshold`` is used. Otherwise,
"mean" is used by default.
Returns
-------
X_r : array of shape [n_samples, n_selected_features]
The input samples with only the selected features.
"""
check_is_fitted(self, ('coef_', 'feature_importances_'),
all_or_any=any)
X = check_array(X, 'csc')
importances = _get_feature_importances(self)
if len(importances) != X.shape[1]:
raise ValueError("X has different number of features than"
" during model fitting.")
if threshold is None:
threshold = getattr(self, 'threshold', None)
threshold = _calculate_threshold(self, importances, threshold)
# Selection
try:
mask = importances >= threshold
except TypeError:
# Fails in Python 3.x when threshold is str;
# result is array of True
raise ValueError("Invalid threshold: all features are discarded.")
if np.any(mask):
mask = safe_mask(X, mask)
return X[:, mask]
else:
raise ValueError("Invalid threshold: all features are discarded.")
class SelectFromModel(BaseEstimator, SelectorMixin):
"""Meta-transformer for selecting features based on importance weights.
.. versionadded:: 0.17
Parameters
----------
estimator : object
The base estimator from which the transformer is built.
This can be both a fitted (if ``prefit`` is set to True)
or a non-fitted estimator.
threshold : string, float, optional default None
The threshold value to use for feature selection. Features whose
importance is greater or equal are kept while the others are
discarded. If "median" (resp. "mean"), then the ``threshold`` value is
the median (resp. the mean) of the feature importances. A scaling
factor (e.g., "1.25*mean") may also be used. If None and if the
estimator has a parameter penalty set to l1, either explicitly
or implicitly (e.g, Lasso), the threshold is used is 1e-5.
Otherwise, "mean" is used by default.
prefit : bool, default False
Whether a prefit model is expected to be passed into the constructor
directly or not. If True, ``transform`` must be called directly
and SelectFromModel cannot be used with ``cross_val_score``,
``GridSearchCV`` and similar utilities that clone the estimator.
Otherwise train the model using ``fit`` and then ``transform`` to do
feature selection.
Attributes
----------
`estimator_`: an estimator
The base estimator from which the transformer is built.
This is stored only when a non-fitted estimator is passed to the
``SelectFromModel``, i.e when prefit is False.
`threshold_`: float
The threshold value used for feature selection.
"""
def __init__(self, estimator, threshold=None, prefit=False):
self.estimator = estimator
self.threshold = threshold
self.prefit = prefit
def _get_support_mask(self):
# SelectFromModel can directly call on transform.
if self.prefit:
estimator = self.estimator
elif hasattr(self, 'estimator_'):
estimator = self.estimator_
else:
raise ValueError(
'Either fit the model before transform or set "prefit=True"'
' while passing the fitted estimator to the constructor.')
scores = _get_feature_importances(estimator)
self.threshold_ = _calculate_threshold(estimator, scores,
self.threshold)
return scores >= self.threshold_
def fit(self, X, y=None, **fit_params):
"""Fit the SelectFromModel meta-transformer.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : Other estimator specific parameters
Returns
-------
self : object
Returns self.
"""
if self.prefit:
raise NotFittedError(
"Since 'prefit=True', call transform directly")
if not hasattr(self, "estimator_"):
self.estimator_ = clone(self.estimator)
self.estimator_.fit(X, y, **fit_params)
return self
def partial_fit(self, X, y=None, **fit_params):
"""Fit the SelectFromModel meta-transformer only once.
Parameters
----------
X : array-like of shape (n_samples, n_features)
The training input samples.
y : array-like, shape (n_samples,)
The target values (integers that correspond to classes in
classification, real numbers in regression).
**fit_params : Other estimator specific parameters
Returns
-------
self : object
Returns self.
"""
if self.prefit:
raise NotFittedError(
"Since 'prefit=True', call transform directly")
if not hasattr(self, "estimator_"):
self.estimator_ = clone(self.estimator)
self.estimator_.partial_fit(X, y, **fit_params)
return self
| DSLituiev/scikit-learn | sklearn/feature_selection/from_model.py | Python | bsd-3-clause | 9,544 |
# Create your views here.
from apps.fumoufeed.models import *
from django.views.generic import View
from django.http import HttpResponse
import re
from django.views.decorators.csrf import csrf_exempt
def JsonResponse(data, status=200):
import json
data = json.dumps(data)
return HttpResponse(data, content_type="Content-type: application/json", status=status)
class PeopleApi(View):
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(PeopleApi, self).dispatch(*args, **kwargs)
def get(self, *args, **kwargs):
fuid = self.request.path.split('/')[-1]
people = People.objects.get(fuid=fuid)
return JsonResponse({'fuid': people.fuid, 'title': people.title, 'name': people.name})
def post(self, *args, **kwargs):
fuid = self.request.REQUEST.get('fuid')
title = self.request.REQUEST.get('title')
name = self.request.REQUEST.get('name')
people = People(fuid=fuid, title=title, name=name)
people.save()
return JsonResponse({'success': True})
def put( self, *args, **kwargs):
fuid = self.request.path.split('/')[-1]
title = self.request.REQUEST.get('title')
name = self.request.REQUEST.get('name')
people = People.objects.get(fuid=fuid)
people.title = title
people.name = name
people.save()
return JsonResponse({'success': True})
class PostApi(View):
@csrf_exempt
def dispatch(self, *args, **kwargs):
return super(PostApi, self).dispatch(*args, **kwargs)
def post( self, *args, **kwargs):
fuid = self.request.REQUEST.get('fuid')
fpid = self.request.REQUEST.get('fpid')
try:
people = People.objects.get(fuid=fuid)
except:
People(fuid=fuid)
try:
post = Post.objects.get(fpid=fpid)
except:
post = Post(fpid=fpid, author=people)
post.save()
return JsonResponse({'success': True})
def delete( self, *args, **kwargs):
fpid = self.request.path.split('/')[-1]
post = Post.objects.get(fpid=fpid)
post.live = False
post.save()
return JsonResponse({'success': True})
@classmethod
def list(request, *args, **kwargs):
ps = Post.objects.filter(live=True).order_by('priority', 'create_time')
data = ps.values('author__fuid', 'fpid', 'author__title', 'author__name')
return JsonResponse(list(data))
| littleq0903/fumoufeed | fumoufeed/apps/fumoufeed/views.py | Python | mit | 2,510 |
#!/usr/bin/env python2
# -*- coding: utf8 -*-
"""Pipeline for Goodman High Troughput Spectrograph spectra Extraction.
This program finds reduced images, i.e. trimmed, bias subtracted, flat fielded,
etc. that match the ``<pattern>`` in the source folder, then classify them in
two groups: Science or Lamps. For science images, finds the spectrum or spectra
and traces it doing some fit.
Simon Torres 2016-06-28
"""
# TODO (simon): Change all astropy.io.fits to astropy.CCDData.read
from __future__ import (absolute_import, division, print_function,
unicode_literals)
from .wavelength import WavelengthCalibration
from ..core import (classify_spectroscopic_data,
search_comp_group,
add_wcs_keys,
identify_targets,
trace_targets,
extraction,
record_trace_information,
save_extracted)
from ..core import (NoMatchFound,
NoTargetException,
ReferenceData)
import sys
import os
import textwrap
import argparse
import astropy.units as u
import logging
from ccdproc import CCDData
# import matplotlib
# matplotlib.use('Qt5Agg')
import matplotlib.pyplot as plt
import warnings
SHOW_PLOTS = True
warnings.filterwarnings('ignore')
__version__ = __import__('goodman_pipeline').__version__
def get_args(arguments=None):
"""Handles the argparse library and returns the arguments
The list of arguments can be found with running ``redspec -h`` or
``redspec --help``.
Notes:
The full list of arguments are not listed here as the may evolve in
which case is impossible to keep this up to date.
Returns:
An object that contains all the variables parsed through the argument
system
"""
# getLogger without __name__ so that we get the root logger.
log = logging.getLogger()
parser = argparse.ArgumentParser(
description="Extracts goodman spectra and does automatic wavelength "
"calibration.\nPipeline Version: {:s}".format(__version__))
parser.add_argument('--data-path',
action='store',
default=os.getcwd(),
type=str,
metavar='<Source Path>',
dest='source',
help='Path for location of raw data. Default <./>')
parser.add_argument('--proc-path',
action='store',
default=os.getcwd(),
type=str,
metavar='<Destination Path>',
dest='destination',
help='Path for destination of processed data. Default '
'<./>')
parser.add_argument('--search-pattern',
action='store',
default='cfzsto',
type=str,
metavar='<Search Pattern>',
dest='pattern',
help="Pattern for matching the goodman's reduced data.")
parser.add_argument('--output-prefix',
action='store',
default='w',
metavar='<Out Prefix>',
dest='output_prefix',
help="Prefix to add to calibrated spectrum.")
parser.add_argument('--extraction',
action='store',
default='fractional',
type=str,
metavar='<Extraction Type>',
dest='extraction_type',
choices=['fractional', 'optimal'],
help="Method to perform extraction. 'fractional' or "
"'optimal'. Only fractional pixel extraction is "
"implemented. Default 'fractional'.")
parser.add_argument('--fit-targets-with',
action='store',
default='moffat',
type=str,
dest='target_fit_model',
choices=['moffat', 'gaussian'],
help="Model to fit peaks found on spatial profile "
"while searching for spectroscopic targets.")
parser.add_argument('--reference-files',
action='store',
default='data/ref_comp/',
metavar='<Reference Dir>',
dest='reference_dir',
help="Directory of Reference files location")
parser.add_argument('--debug',
action='store_true',
dest='debug_mode',
help="Debugging Mode")
parser.add_argument('--debug-plot',
action='store_true',
dest='debug_with_plots',
help="Debugging show debugging plots")
parser.add_argument('--max-targets',
action='store',
dest='max_n_targets',
metavar='<max targets>',
type=int,
default=3,
help="Maximum number of targets to be found in a "
"single image. Default 3")
parser.add_argument('--background-threshold',
action='store',
dest='background_threshold',
type=int,
default=3,
help="Multiplier for background level used to "
"discriminate usable targets. Default 3 times "
"background level")
parser.add_argument('--save-plots',
action='store_true',
dest='save_plots',
help="Save all plots in a directory")
# parser.add_argument('--combine',
# action='store_true',
# dest='combine',
# help="Combine compatible data")
parser.add_argument('--plot-results',
action='store_true',
dest='plot_results',
help="Show wavelength calibrated spectrum at the end.")
parser.add_argument('--version',
action='store_true',
dest='show_version',
help="Show current version of the Goodman Pipeline")
args = parser.parse_args(args=arguments)
try:
ref_full_path = os.path.join(
os.path.dirname(sys.modules['goodman.pipeline'].__file__),
args.reference_dir)
except KeyError as error:
log.debug("KeyError {:s}".format(str(error)))
ref_full_path = os.path.join(
os.path.dirname(sys.modules['goodman_pipeline'].__file__),
args.reference_dir)
if not os.path.isdir(ref_full_path):
log.info("Reference files directory doesn't exist.")
try:
os.path.os.makedirs(ref_full_path)
log.info('Reference Files Directory is: %s', ref_full_path)
args.reference_dir = ref_full_path
except OSError as err:
log.error(err)
else:
args.reference_dir = ref_full_path
if not os.path.isabs(args.source):
args.source = os.path.join(os.getcwd(), args.source)
if not os.path.isdir(args.source):
log.error("Source Directory {:s} doesn't exist.".format(args.source))
if 'test' not in parser.prog:
parser.print_help()
parser.exit(0, "Leaving the Program.")
if not os.path.isabs(args.destination):
args.destination = os.path.join(os.getcwd(), args.destination)
if not os.path.isdir(args.destination):
log.error("Destination folder doesn't exist.")
try:
os.path.os.makedirs(args.destination)
log.info('Destination folder created: %s', args.destination)
except OSError as err:
log.error(err)
parser.print_help()
parser.exit(0, "Leaving the Program.")
return args
class MainApp(object):
"""Defines and initialize all important variables for processing the data
The MainApp class controls the way the night is organized for further
processing. It also sets the appropriate parameters that will allow for a
smooth working in all the other modules.
"""
def __init__(self):
"""Init method for MainApp class
This method initializes the arguments for the class, if they are not
provided it will get them.
"""
self.log = logging.getLogger(__name__)
self.args = None
self.wavelength_solution_obj = None
self.wavelength_calibration = None
self.reference = None
self._pipeline_version = __version__
def __call__(self, args=None):
"""Call method for the MainApp class
This method call the higher level functions in order to do the
spectroscopic data reduction.
Args:
args (object): argparse.Namespace instance that contains all the
arguments.
"""
if args is None:
self.args = get_args()
else:
self.args = args
if self.args.show_version:
print("Goodman HTS Pipeline {:s}".format(__version__))
sys.exit(0)
self.log.debug("Initializing reference data locator.")
self.reference = ReferenceData(reference_dir=self.args.reference_dir)
# data_container instance of NightDataContainer defined in core
self.log.debug("Calling data classification procedure.")
data_container = classify_spectroscopic_data(
path=self.args.source,
search_pattern=self.args.pattern)
if data_container.is_empty:
self.log.debug("Received empty data container.")
sys.exit("Unable to find or classify data.")
else:
self.log.debug("Received non-empty data container.")
self.log.debug("Calling _run method for MainApp")
self._run(data_container=data_container,
extraction_type=self.args.extraction_type,
target_fit_model=self.args.target_fit_model,
background_threshold=self.args.background_threshold)
self.log.info("END")
def _run(self,
data_container,
extraction_type,
target_fit_model,
background_threshold):
assert data_container.is_empty is False
assert any(extraction_type == option for option in ['fractional',
'optimal'])
# log = logging.getLogger(__name__)
full_path = data_container.full_path
for sub_container in [groups for groups in [
data_container.spec_groups,
data_container.object_groups]
if groups is not None]:
for group in sub_container:
# instantiate WavelengthCalibration here for each group.
self.wavelength_calibration = WavelengthCalibration()
# this will contain only obstype == OBJECT
object_group = group[((group.obstype == 'OBJECT') |
(group.obstype == 'SPECTRUM'))]
obj_groupby = object_group.groupby(['object']).size(
).reset_index().rename(columns={0: 'count'})
self.log.info("Processing Science Target: "
"{:s} with {:d} files."
"".format(obj_groupby.iloc[0]['object'],
obj_groupby.iloc[0]['count']))
# this has to be initialized here
comp_group = None
comp_ccd_list = []
if any([value in ['COMP', 'ARC'] for value in group.obstype.unique()]):
self.log.debug('Group has comparison lamps')
comp_group = group[((group.obstype == 'COMP') |
(group.obstype == 'ARC'))]
comp_group = self.reference.check_comp_group(comp_group)
if comp_group is None:
self.log.debug('Group does not have comparison lamps')
if data_container.comp_groups is not None:
self.log.debug('There are comparison lamp group '
'candidates')
try:
comp_group = search_comp_group(
object_group=object_group,
comp_groups=data_container.comp_groups,
reference_data=self.reference)
self.log.warning(
'This comparison lamp might not be optimal '
'if you are doing radial velocity studies')
except NoMatchFound:
self.log.error(
'It was not possible to find a comparison '
'group')
else:
self.log.warning('Data will be extracted but not '
'calibrated')
_combine = True
if len(object_group.file.tolist()) > 1 and _combine:
self.log.debug("This can be combined")
for spec_file in object_group.file.tolist():
self.log.info('Processing Science File: {:s}'.format(
spec_file))
file_path = os.path.join(full_path, spec_file)
ccd = CCDData.read(file_path, unit=u.adu)
ccd.header.set('GSP_PNAM', value=spec_file)
ccd = add_wcs_keys(ccd=ccd)
# ccd.header['GSP_FNAM'] = spec_file
if comp_group is not None and comp_ccd_list == []:
for comp_file in comp_group.file.tolist():
comp_path = os.path.join(full_path, comp_file)
comp_ccd = CCDData.read(comp_path, unit=u.adu)
comp_ccd = add_wcs_keys(ccd=comp_ccd)
comp_ccd.header.set('GSP_PNAM', value=comp_file)
comp_ccd_list.append(comp_ccd)
else:
self.log.debug(
'Comp Group is None or comp list already exist')
# identify
self.log.debug("Calling procedure for target "
"identification.")
target_list = identify_targets(
ccd=ccd,
fit_model=target_fit_model,
background_threshold=background_threshold,
nfind=self.args.max_n_targets,
plots=self.args.debug_with_plots)
# trace
if len(target_list) > 0:
self.log.debug("Calling procedure for tracing target.")
trace_list = trace_targets(ccd=ccd,
target_list=target_list,
sampling_step=5,
pol_deg=2,
plots=self.args.debug_with_plots)
else:
self.log.error("The list of identified targets is "
"empty for {}.".format(spec_file))
continue
# if len(trace_list) > 0:
extracted_target_and_lamps = []
for single_trace, single_profile, trace_info in trace_list:
if single_profile.__class__.name == 'Gaussian1D':
single_profile_center = single_profile.mean.value
elif single_profile.__class__.name == 'Moffat1D':
single_profile_center = single_profile.x_0.value
if len(trace_list) > 1:
target_number = trace_list.index(
[single_trace,
single_profile, trace_info]) + 1
else:
target_number = 0
try:
ccd = record_trace_information(ccd=ccd,
trace_info=trace_info)
# target extraction
extracted = extraction(
ccd=ccd,
target_trace=single_trace,
spatial_profile=single_profile,
extraction_name=extraction_type)
saved_ccd = save_extracted(
ccd=extracted,
destination=self.args.destination,
target_number=target_number)
# print(spec_file)
# lamp extraction
all_lamps = []
if comp_ccd_list:
for comp_lamp in comp_ccd_list:
comp_lamp.header.set(
'GSP_SCTR',
value=saved_ccd.header['GSP_FNAM'],
comment='Science target file the lamp '
'was extracted for.')
comp_lamp = record_trace_information(ccd=comp_lamp,
trace_info=trace_info)
extracted_lamp = extraction(
ccd=comp_lamp,
target_trace=single_trace,
spatial_profile=single_profile,
extraction_name=extraction_type)
save_extracted(
ccd=extracted_lamp,
destination=self.args.destination,
target_number=target_number)
all_lamps.append(extracted_lamp)
extracted_target_and_lamps.append([extracted,
all_lamps])
if self.args.debug_with_plots: # pragma: no cover
# print(plt.get_backend())
plt.close('all')
fig, ax = plt.subplots(1, 1)
ax.set_ylabel("Intensity (ADU)")
ax.set_xlabel("Dispersion Axis (Pixels)")
fig.canvas.set_window_title(
'Extracted Data: Target Center ~ '
'{:.2f}'.format(single_profile_center))
manager = plt.get_current_fig_manager()
if plt.get_backend() == u'GTK3Agg':
manager.window.maximize()
elif plt.get_backend() == u'Qt5Agg':
manager.window.showMaximized()
ax.set_title(
"{:s} Extraction centered near "
"{:.2f} \n File: {:s}".format(
extracted.header['OBJECT'],
single_profile_center,
extracted.header['GSP_FNAM'])
)
if all_lamps:
_alpha = 1.0 / len(all_lamps)
for comp in all_lamps:
ax.plot(comp.data,
label=comp.header['OBJECT'],
alpha=_alpha,
color='k')
ax.plot(extracted.data,
label=extracted.header['OBJECT'])
ax.legend(loc='best')
if plt.isinteractive():
plt.draw()
plt.pause(1)
else:
plt.show()
except NoTargetException:
self.log.error('No target was identified in file'
' {:s}'.format(spec_file))
continue
object_number = None
for sci_target, comp_list in extracted_target_and_lamps:
try:
self.wavelength_solution_obj = \
self.wavelength_calibration(
ccd=sci_target,
comp_list=comp_list,
save_data_to=self.args.destination,
reference_data=self.args.reference_dir,
object_number=object_number,
output_prefix=self.args.output_prefix,
plot_results=self.args.plot_results,
save_plots=self.args.save_plots,
plots=self.args.debug_with_plots)
except NoMatchFound as no_match_error:
self.log.error(no_match_error)
except NotImplemented as error:
self.log.error(error)
if __name__ == '__main__': # pragma: no cover
MAIN_APP = MainApp()
try:
MAIN_APP()
except KeyboardInterrupt:
sys.exit(0)
| soar-telescope/goodman | goodman_pipeline/spectroscopy/redspec.py | Python | bsd-3-clause | 22,879 |
class Solution:
"""
@param A : An integer array
@return : An integer
"""
def singleNumberII(self, A):
# write your code here
res = 0
if A == None or A == 0:
return -1
n = len(A)
#for item in A:
#for k in self.baseConvert(item)
bits = [0]*32
for i in xrange(32):
for j in xrange(n):
bits[i] += A[j] >> i & 1
bits[i] %= 3
res |= (bits[i] << i)
return res
def baseConvertInv(self, n):
string = str(n)[::-1]
base = 1
res = 0
for word in string:
res += int(word) * base
base *= 3
return res
def baseConvert(self, n):
result = ''
while True:
tup = divmod(n, 2)
result += str(tup[1])
if tup[0] == 0:
return int(result[::-1])
else:
n = tup[0]
a = Solution()
print a.singleNumberII([3,3,3,4])
| quake0day/oj | singlenumber2.py | Python | mit | 1,020 |
# -*- coding: utf-8 -*-
import re
import unittest
from mock import patch
from mock import MagicMock
from django.core import mail
from django.core.management import call_command
from django.core.management.base import CommandError
from django.test import TestCase
from common.alert_utils import BatchedEmailErrors
from frontend.models import EmailMessage
from frontend.models import Measure
from frontend.models import PCN
from frontend.management.commands.send_monthly_alerts import Command
from frontend.views.bookmark_utils import BadAlertImageError
from frontend.tests.test_bookmark_utils import _makeContext
from frontend.tests.data_factory import DataFactory
from frontend.tests.test_api_spending import ApiTestBase
CMD_NAME = "send_monthly_alerts"
class ValidateOptionsTestCase(unittest.TestCase):
def _defaultOpts(self, **extra):
default = {
"url": None,
"ccg": None,
"practice": None,
"recipient_email": None,
"url": None,
}
for k, v in extra.items():
default[k] = v
return default
def test_options_depended_on_by_recipient_email(self):
opts = self._defaultOpts(url="thing")
with self.assertRaises(CommandError):
Command().validate_options(**opts)
opts = self._defaultOpts(ccg="thing")
with self.assertRaises(CommandError):
Command().validate_options(**opts)
opts = self._defaultOpts(practice="thing")
with self.assertRaises(CommandError):
Command().validate_options(**opts)
opts = self._defaultOpts(practice="thing", recipient_email="thing")
Command().validate_options(**opts)
def test_incompatibile_options(self):
opts = self._defaultOpts(url="thing", ccg="thing")
with self.assertRaises(CommandError):
Command().validate_options(**opts)
opts = self._defaultOpts(url="thing", practice="thing")
with self.assertRaises(CommandError):
Command().validate_options(**opts)
class GetBookmarksTestCase(TestCase):
fixtures = ["bookmark_alerts"]
def setUp(self):
self.now_month = "2014-11-01"
def test_get_org_bookmarks_without_options(self):
bookmarks = Command().get_org_bookmarks(
self.now_month,
recipient_email=None,
recipient_email_file=None,
skip_email_file=None,
)
active = all([x.user.is_active for x in bookmarks])
self.assertEqual(len(bookmarks), 2)
self.assertTrue(active)
def test_get_org_bookmarks_with_test_options(self):
bookmarks = Command().get_org_bookmarks(
self.now_month,
recipient_email="[email protected]",
ccg="03V",
practice="P87629",
pcn=None,
recipient_email_file=None,
skip_email_file=None,
)
self.assertEqual(len(bookmarks), 1)
self.assertEqual(bookmarks[0].user.email, "[email protected]")
self.assertTrue(bookmarks[0].user.profile.key)
self.assertTrue(bookmarks[0].user.id)
self.assertEqual(bookmarks[0].pct.code, "03V")
self.assertEqual(bookmarks[0].practice.code, "P87629")
def test_get_org_bookmarks_with_skip_file(self):
skip_file = "frontend/tests/fixtures/commands/" "skip_alerts_recipients.txt"
bookmarks = Command().get_org_bookmarks(
self.now_month,
skip_email_file=skip_file,
recipient_email=None,
recipient_email_file=None,
)
self.assertEqual(len(bookmarks), 0)
def test_get_search_bookmarks_without_options(self):
bookmarks = Command().get_search_bookmarks(self.now_month, recipient_email=None)
active = all([x.user.is_active for x in bookmarks])
self.assertEqual(len(bookmarks), 1)
self.assertEqual(bookmarks[0].url, "foo")
self.assertTrue(active)
def test_get_search_bookmarks_with_options(self):
bookmarks = Command().get_search_bookmarks(
self.now_month, recipient_email="[email protected]", url="frob", search_name="baz"
)
self.assertEqual(len(bookmarks), 1)
self.assertEqual(bookmarks[0].user.email, "[email protected]")
self.assertTrue(bookmarks[0].user.profile.key)
self.assertTrue(bookmarks[0].user.id)
self.assertEqual(bookmarks[0].url, "frob")
@patch("frontend.views.bookmark_utils.InterestingMeasureFinder")
@patch("frontend.views.bookmark_utils.attach_image")
class FailingEmailTestCase(TestCase):
"""Exercise the error batching mechanism that allowed a maximum number
of errors before failing the batch.
"""
fixtures = ["bookmark_alerts", "measures", "importlog"]
def test_successful_sends(self, attach_image, finder):
attach_image.side_effect = [Exception, None, None]
measure = MagicMock()
measure.id = "measureid"
test_context = _makeContext(worst=[measure])
self.assertEqual(EmailMessage.objects.count(), 1)
with self.assertRaises(BatchedEmailErrors):
call_mocked_command(test_context, finder, max_errors="4")
self.assertEqual(EmailMessage.objects.count(), 3)
self.assertEqual(len(mail.outbox), 2)
def test_bad_alert_image_error_not_sent_and_not_raised(self, attach_image, finder):
attach_image.side_effect = BadAlertImageError
measure = MagicMock()
measure.id = "measureid"
test_context = _makeContext(worst=[measure])
call_mocked_command(test_context, finder, max_errors="0")
self.assertEqual(len(mail.outbox), 0)
def test_max_errors(self, attach_image, finder):
attach_image.side_effect = [Exception, None, None]
measure = MagicMock()
measure.id = "measureid"
test_context = _makeContext(worst=[measure])
self.assertEqual(EmailMessage.objects.count(), 1)
with self.assertRaises(BatchedEmailErrors):
call_mocked_command(test_context, finder, max_errors="0")
self.assertEqual(EmailMessage.objects.count(), 1)
self.assertEqual(len(mail.outbox), 0)
@patch("frontend.views.bookmark_utils.InterestingMeasureFinder")
@patch("frontend.views.bookmark_utils.attach_image")
class OrgEmailTestCase(TestCase):
fixtures = ["bookmark_alerts", "measures", "importlog"]
def test_email_recipient(self, attach_image, finder):
test_context = _makeContext()
self.assertEqual(EmailMessage.objects.count(), 1) # a text fixture
call_mocked_command_with_defaults(test_context, finder)
self.assertEqual(EmailMessage.objects.count(), 2)
self.assertEqual(len(mail.outbox), 1)
email_message = EmailMessage.objects.last()
self.assertEqual(mail.outbox[-1].to, email_message.to)
self.assertEqual(mail.outbox[-1].to, ["[email protected]"])
def test_email_all_recipients(self, attach_image, finder):
test_context = _makeContext()
self.assertEqual(EmailMessage.objects.count(), 1)
call_mocked_command(test_context, finder)
self.assertEqual(EmailMessage.objects.count(), 4)
self.assertEqual(len(mail.outbox), 3)
def test_email_body_no_data(self, attach_image, finder):
test_context = _makeContext()
call_mocked_command_with_defaults(test_context, finder)
message = mail.outbox[-1].alternatives[0]
html = message[0]
# Name of the practice
self.assertIn("1/ST Andrews Medical Practice", html)
# Unsubscribe link
self.assertIn("/bookmarks/dummykey/", html)
self.assertIn("We've no new information", html)
def test_email_headers(self, attach_image, finder):
test_context = _makeContext()
call_mocked_command_with_defaults(test_context, finder)
message = mail.outbox[-1]
self.assertIn(
message.extra_headers["list-unsubscribe"],
"<http://localhost/bookmarks/dummykey/>",
)
def test_email_body_text(self, attach_image, finder):
test_context = _makeContext()
call_mocked_command_with_defaults(test_context, finder)
message = mail.outbox[-1].body
self.assertIn("**Hello!**", message)
def test_email_body_has_ga_tracking(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(declines=[{"measure": measure, "from": 99.92, "to": 0.12}]),
finder,
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertRegex(html, '<a href=".*&utm_content=.*#cerazette".*>')
def test_email_body_declines(self, attach_image, finder):
attach_image.return_value = "unique-image-id"
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(declines=[{"measure": measure, "from": 99.92, "to": 0.12}]),
finder,
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("this practice slipped", html)
self.assertRegex(
html,
"slipped massively on "
'<a href=".*/practice/P87629/.*#cerazette".*>'
"Cerazette vs. Desogestrel</a>",
)
self.assertIn('<span class="worse"', html)
self.assertIn('<img src="cid:unique-image-id', html)
self.assertNotIn("Your best prescribing areas", html)
self.assertNotIn("Cost savings", html)
def test_email_body_two_declines(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(
declines=[
{"measure": measure, "from": 99.92, "to": 0.12},
{"measure": measure, "from": 30, "to": 10},
]
),
finder,
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertRegex(html, "It also slipped considerably")
def test_email_body_three_declines(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(
declines=[
{"measure": measure, "from": 99.92, "to": 0.12},
{"measure": measure, "from": 30, "to": 10},
{"measure": measure, "from": 20, "to": 10},
]
),
finder,
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertRegex(html, "It also slipped:")
self.assertRegex(
html,
re.compile(
"<ul.*<li>considerably on.*" "<li>moderately on.*</ul>", re.DOTALL
),
)
def test_email_body_worst(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
attach_image.return_value = "unique-image-id"
call_mocked_command_with_defaults(_makeContext(worst=[measure]), finder)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("We've found", html)
self.assertRegex(
html,
re.compile(
'the worst 10% on.*<a href=".*/practice/P87629'
'/.*#cerazette".*>'
"Cerazette vs. Desogestrel</a>",
re.DOTALL,
),
)
self.assertIn('<img src="cid:unique-image-id', html)
def test_email_body_three_worst(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(worst=[measure, measure, measure]), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertRegex(html, "It was also in the worst 10% on:")
self.assertRegex(
html,
re.compile(
"<ul.*<li>.*Desogestrel.*" "<li>.*Desogestrel.*</ul>", re.DOTALL
),
)
def test_email_body_two_savings(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(possible_savings=[(measure, 9.9), (measure, 1.12)]), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("These add up to around <b>£10</b> of " "potential savings", html)
self.assertRegex(
html,
'<li.*>\n<b>£10</b> on <a href=".*/practice/P87629'
'/.*#cerazette".*>'
"Cerazette vs. Desogestrel</a>",
)
def test_email_body_one_saving(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(possible_savings=[(measure, 9.9)]), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("if it had prescribed in line with the average practice", html)
self.assertRegex(
html,
"it could have saved about <b>£10</b> on "
'<a href=".*/practice/P87629/.*#cerazette".*>'
"Cerazette vs. Desogestrel</a>",
)
def test_email_body_achieved_saving(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(achieved_savings=[(measure, 9.9)]), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("this practice saved around <b>£10", html)
def test_email_body_two_achieved_savings(self, attach_image, finder):
measure = Measure.objects.get(pk="cerazette")
call_mocked_command_with_defaults(
_makeContext(achieved_savings=[(measure, 9.9), (measure, 12.0)]), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("<li>\n<b>£10</b> on", html)
self.assertIn("<li>\n<b>£10</b> on", html)
def test_email_body_total_savings(self, attach_image, finder):
call_mocked_command_with_defaults(
_makeContext(possible_top_savings_total=9000.1), finder
)
message = mail.outbox[-1].alternatives[0]
html = message[0]
self.assertIn("it could save around <b>£9,000</b>", html)
@patch("frontend.views.bookmark_utils.attach_image")
class SearchEmailTestCase(TestCase):
fixtures = ["bookmark_alerts", "measures", "importlog"]
def test_all_recipients(self, attach_image):
self.assertEqual(EmailMessage.objects.count(), 1)
call_command(CMD_NAME)
mail_queue = mail.outbox
self.assertEqual(EmailMessage.objects.count(), 4)
self.assertEqual(len(mail_queue), 3)
def test_all_recipients_idempotent(self, attach_image):
self.assertEqual(EmailMessage.objects.count(), 1)
call_command(CMD_NAME)
call_command(CMD_NAME)
mail_queue = mail.outbox
self.assertEqual(EmailMessage.objects.count(), 4)
self.assertEqual(len(mail_queue), 3)
def test_email_recipient(self, attach_image):
opts = {
"recipient_email": "[email protected]",
"url": "something",
"search_name": "some name",
}
self.assertEqual(EmailMessage.objects.count(), 1) # a fixture
call_command(CMD_NAME, **opts)
self.assertEqual(EmailMessage.objects.count(), 2)
email_message = EmailMessage.objects.last()
self.assertEqual(email_message.send_count, 1)
mail_queue = mail.outbox[-1]
self.assertEqual(mail_queue.to, email_message.to)
self.assertEqual(mail_queue.to, [opts["recipient_email"]])
self.assertEqual(
mail_queue.extra_headers["message-id"], email_message.message_id
)
def test_email_headers(self, attach_image):
opts = {
"recipient_email": "[email protected]",
"url": "something",
"search_name": "some name",
}
call_command(CMD_NAME, **opts)
email_message = EmailMessage.objects.last()
mail_queue = mail.outbox[-1]
self.assertEqual(
mail_queue.extra_headers["message-id"], email_message.message_id
)
self.assertEqual(
mail_queue.extra_headers["list-unsubscribe"],
"<http://localhost/bookmarks/dummykey/>",
)
def test_email_body(self, attach_image):
opts = {
"recipient_email": "[email protected]",
"url": "something",
"search_name": "some name",
}
call_command(CMD_NAME, **opts)
message = mail.outbox[-1].alternatives[0]
html = message[0]
mime_type = message[1]
self.assertIn(opts["search_name"], html)
self.assertEqual(mime_type, "text/html")
self.assertIn("/bookmarks/dummykey/", html)
self.assertRegex(html, '<a href="http://localhost/analyse/.*#%s' % "something")
def test_email_body_text(self, attach_image):
opts = {
"recipient_email": "[email protected]",
"url": "something",
"search_name": "some name",
}
call_command(CMD_NAME, **opts)
text = mail.outbox[-1].body
self.assertIn("**Hello!**", text)
self.assertIn("/bookmarks/dummykey/", text)
self.assertRegex(text, "http://localhost/analyse/.*#%s" % "something")
class AllEnglandAlertTestCase(ApiTestBase):
fixtures = ApiTestBase.fixtures + ["functional-measures-dont-edit"]
def test_all_england_alerts_sent(self):
factory = DataFactory()
# Create an All England bookmark, send alerts, and make sure one email
# is sent to correct user
bookmark = factory.create_org_bookmark(None)
call_command(CMD_NAME)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [bookmark.user.email])
class PCNAlertTestCase(ApiTestBase):
fixtures = ApiTestBase.fixtures + ["functional-measures-dont-edit"]
def test_pcn_alerts_sent(self):
"""Create a PCN bookmark, send alerts, and make sure an email is sent
to correct user, and that its contents mention PCNs
"""
factory = DataFactory()
pcn = PCN.objects.get(pk="E00000011")
bookmark = factory.create_org_bookmark(pcn)
call_command(CMD_NAME)
self.assertEqual(len(mail.outbox), 1)
self.assertEqual(mail.outbox[0].to, [bookmark.user.email])
self.assertIn("PCN", mail.outbox[0].body)
def call_mocked_command(context, mock_finder, **opts):
mock_finder.return_value.context_for_org_email.return_value = context
call_command(CMD_NAME, **opts)
def call_mocked_command_with_defaults(context, mock_finder, **opts):
default_opts = {"recipient_email": "[email protected]", "ccg": "03V", "practice": "P87629"}
for k, v in opts.items():
default_opts[k] = v
call_mocked_command(context, mock_finder, **default_opts)
| ebmdatalab/openprescribing | openprescribing/frontend/tests/commands/test_send_monthly_alerts.py | Python | mit | 19,193 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-01-30 01:35
from __future__ import unicode_literals
from django.db import migrations
class Migration(migrations.Migration):
dependencies = [
('api', '0015_auto_20170129_0129'),
]
operations = [
migrations.RemoveField(
model_name='mentorinfo',
name='availability',
),
]
| andrewsosa/hackfsu_com | api/api/migrations/0016_remove_mentorinfo_availability.py | Python | apache-2.0 | 399 |
#!/usr/bin/env python
"""
This is the installation script of the offtheshelf module, a very simple and
minimal NoSQL database that uses shelve as a backend. You can run it by typing:
python setup.py install
You can also run the test suite by running:
python setup.py test
"""
import sys
from distutils.core import setup
from offtheshelf.tests import TestCommand
__author__ = "Daniele Mazzocchio <[email protected]>"
__version__ = "0.0.1"
__date__ = "Jun 24, 2012"
# Python versions prior 2.2.3 don't support 'classifiers' and 'download_url'
if sys.version < "2.2.3":
from distutils.dist import DistributionMetadata
DistributionMetadata.classifiers = None
DistributionMetadata.download_url = None
setup(name = "offtheshelf",
version = __version__,
author = "Daniele Mazzocchio",
author_email = "[email protected]",
packages = ["offtheshelf"],
cmdclass = {"test": TestCommand},
description = "Simple NoSQL database with shelve backend",
classifiers = ["Development status :: 2 - Pre-Alpha",
"Environment :: Console",
"Intended Audience :: Developers",
"License :: OSI-Approved Open Source :: BSD License",
"Natural Language :: English",
"Operating System :: OS Independent",
"Programming Language :: Python",
"Topic :: Database"])
| dotpy/offtheshelf | setup.py | Python | bsd-3-clause | 1,493 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
test_libtool
----------------------------------
Tests for `libtool` module.
"""
import os
import unittest
import sys
import shutil
from libtool.folder_tool import get_year_month_dir
print sys.path
from libtool.package_utils import include_all_ex
class TestLibtool(unittest.TestCase):
def setUp(self):
pass
def test_something(self):
#include_all_ex("test")
folder = get_year_month_dir()
print folder
if os.path.exists(folder):
shutil.rmtree(folder)
def tearDown(self):
pass
if __name__ == '__main__':
unittest.main() | weijia/libtool | tests/test_libtool.py | Python | bsd-3-clause | 652 |
from django.contrib.auth.models import User
from django.views import View
from django.views.generic import DetailView
from django.views.generic.edit import CreateView, UpdateView, FormView
from django.contrib.auth.decorators import login_required
from django.contrib.admin.views.decorators import staff_member_required
from django.utils.decorators import method_decorator
from django.urls import reverse_lazy, reverse
from django.shortcuts import render, get_object_or_404, redirect
from django.contrib import messages
from della.email_service import send_email
from .models import UserProfile
from .forms import (SignupForm, UserProfileForm, RequestActivationCodeForm,
MassEmailForm)
from . import user_service
from . import draw_service
from . import activation_service
class SignupView(CreateView):
model = User
form_class = SignupForm
success_url = '/'
template_name = 'generic_crispy_form_template.html'
def form_valid(self, form):
user = form.save(commit=False)
user.is_active = False
user.save()
user_service.create_user_profile(user=user)
user_service.send_activation_email(request=self.request, user=user)
m = ('Hey {}! Your account has been created. Please check your '
'email for account activation link.').format(user.username)
messages.add_message(self.request, messages.SUCCESS, m)
return redirect('/')
class ActivateView(View):
def get(self, request, username, code):
user = get_object_or_404(User, username=username)
if not activation_service.validate_key(key=code, user=user):
m = 'Activation key expired, request a new one.'
messages.add_message(self.request, messages.ERROR, m)
return redirect(reverse('user_manager:activate-request'))
user_service.activate_user(user=user)
user_service.enable_for_exchange(user=user)
messages.add_message(self.request, messages.SUCCESS,
'Your email is confirmed. Please login.')
return redirect(reverse('user_manager:login'))
class RequestActivationEmailView(FormView):
form_class = RequestActivationCodeForm
template_name = 'generic_crispy_form_template.html'
def form_valid(self, form):
email = form.cleaned_data['email']
user = get_object_or_404(User, email=email)
if user.is_active:
messages.add_message(
self.request, messages.WARNING,
'Account already active. Please login')
return redirect(reverse('user_manager:login'))
user_service.send_activation_email(request=self.request, user=user)
messages.add_message(self.request, messages.INFO,
'Activation email has been sent.')
return redirect('/')
@method_decorator(login_required, name='dispatch')
class UserProfileUpdateView(UpdateView):
model = UserProfile
form_class = UserProfileForm
success_url = reverse_lazy('user_manager:account')
def get_object(self, queryset=None):
return self.request.user.userprofile
def get_initial(self):
form_data = super().get_initial()
form_data['first_name'] = self.object.user.first_name
form_data['last_name'] = self.object.user.last_name
return form_data
def form_valid(self, form):
if 'last_name' in form.changed_data or (
'first_name' in form.changed_data):
self.object.user.first_name = form.cleaned_data['first_name']
self.object.user.last_name = form.cleaned_data['last_name']
self.object.user.save()
response = super().form_valid(form)
return response
class UserProfileDetailView(DetailView):
model = UserProfile
template_name = 'user_manager/userprofile_detail.html'
template_name_santa = 'user_manager/userprofile_detail_santa.html'
def get_object(self, queryset=None):
username = self.kwargs.get('username')
return get_object_or_404(UserProfile, user__username=username)
def render_to_response(self, context, **response_kwargs):
if self.request.user.is_authenticated:
# check if the logged in user has a santee
santee = self.request.user.userprofile.santee
if santee:
if santee.id == self.object.user.id:
self.template_name = self.template_name_santa
return super().render_to_response(context, **response_kwargs)
@method_decorator(staff_member_required, name='dispatch')
class DrawNamesView(View):
template_draw_names = 'user_manager/draw_names.html'
template_draw_names_done = 'user_manager/draw_names_done.html'
def get(self, request):
draw_status = draw_service.get_draw_status()
users = User.objects.filter(userprofile__is_enabled_exchange=True)
context = {}
context['draw_status'] = draw_status
context['user_list'] = users
template = self.template_draw_names_done if (
draw_status) else self.template_draw_names
return render(
request=request, template_name=template, context=context)
def post(self, request):
eligible_users = User.objects.filter(
userprofile__is_enabled_exchange=True).count()
if eligible_users < 3:
m = ('The number of eligible users for exchange is less')
messages.add_message(self.request, messages.ERROR, m)
return redirect(reverse('user_manager:draw-names'))
if not draw_service.get_draw_status():
draw_service.draw_names()
users = User.objects.filter(userprofile__is_enabled_exchange=True)
context = {}
context['user_list'] = users
return render(
request=request, template_name=self.template_draw_names_done,
context=context)
@method_decorator(staff_member_required, name='dispatch')
class MassEmailView(FormView):
form_class = MassEmailForm
template_name = 'user_manager/mass_email.html'
def form_valid(self, form):
message = form.cleaned_data['message']
subject = form.cleaned_data['subject']
recipients = form.cleaned_data['recipients']
send_email(subject=subject, message=message, recipient_list=recipients)
messages.add_message(self.request, messages.SUCCESS,
'Emails have been sent!')
return redirect(reverse('user_manager:mass-email'))
| avinassh/della | della/user_manager/views.py | Python | mit | 6,524 |
# This file is part of Korman.
#
# Korman is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Korman is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Korman. If not, see <http://www.gnu.org/licenses/>.
import bpy
from bpy.props import *
from collections import OrderedDict
from PyHSPlasma import *
from .node_core import *
from ..properties.modifiers.physics import bounds_types, bounds_type_index, bounds_type_str
from .. import idprops
class PlasmaExcludeRegionNode(idprops.IDPropObjectMixin, PlasmaNodeBase, bpy.types.Node):
bl_category = "LOGIC"
bl_idname = "PlasmaExcludeRegionNode"
bl_label = "Exclude Region"
bl_width_default = 195
# ohey, this can be a Python attribute
pl_attrib = {"ptAttribExcludeRegion"}
def _get_bounds(self):
if self.region_object is not None:
return bounds_type_index(self.region_object.plasma_modifiers.collision.bounds)
return bounds_type_index("hull")
def _set_bounds(self, value):
if self.region_object is not None:
self.region_object.plasma_modifiers.collision.bounds = bounds_type_str(value)
region_object = PointerProperty(name="Region",
description="Region object's name",
type=bpy.types.Object,
poll=idprops.poll_mesh_objects)
bounds = EnumProperty(name="Bounds",
description="Region bounds",
items=bounds_types,
get=_get_bounds,
set=_set_bounds)
block_cameras = BoolProperty(name="Block Cameras",
description="The region blocks cameras when it has been cleared")
input_sockets = OrderedDict([
("safe_point", {
"type": "PlasmaExcludeSafePointSocket",
"text": "Safe Point",
"spawn_empty": True,
# This never links to anything...
"valid_link_sockets": frozenset(),
}),
("msg", {
"type": "PlasmaExcludeMessageSocket",
"text": "Message",
"spawn_empty": True,
}),
])
output_sockets = OrderedDict([
("keyref", {
"text": "References",
"type": "PlasmaPythonReferenceNodeSocket",
"valid_link_nodes": {"PlasmaPythonFileNode"},
}),
])
def draw_buttons(self, context, layout):
layout.prop(self, "region_object", icon="MESH_DATA")
layout.prop(self, "bounds")
layout.prop(self, "block_cameras")
def get_key(self, exporter, parent_so):
if self.region_object is None:
self.raise_error("Region must be set")
return self._find_create_key(plExcludeRegionModifier, exporter, bl=self.region_object)
def harvest_actors(self):
return (i.safepoint.name for i in self.find_input_sockets("safe_points") if i.safepoint is not None)
def export(self, exporter, bo, parent_so):
excludergn = self.get_key(exporter, parent_so).object
excludergn.setFlag(plExcludeRegionModifier.kBlockCameras, self.block_cameras)
region_so = exporter.mgr.find_create_object(plSceneObject, bl=self.region_object)
# Safe points
for i in self.find_input_sockets("safe_point"):
safept = i.safepoint_object
if safept:
excludergn.addSafePoint(exporter.mgr.find_create_key(plSceneObject, bl=safept))
# Ensure the region is exported
if exporter.mgr.getVer() <= pvPots:
member_group = "kGroupDetector"
collide_groups = ["kGroupDynamic"]
else:
member_group = "kGroupStatic"
collide_groups = []
exporter.physics.generate_physical(self.region_object, region_so, bounds=self.bounds,
properties=["kPinned"],
losdbs=["kLOSDBUIBlockers"],
member_group=member_group,
collide_groups=collide_groups)
@property
def export_once(self):
return True
@classmethod
def _idprop_mapping(cls):
return {"region_object": "region"}
class PlasmaExcludeSafePointSocket(idprops.IDPropObjectMixin, PlasmaNodeSocketBase, bpy.types.NodeSocket):
bl_color = (0.0, 0.0, 0.0, 0.0)
safepoint_object = PointerProperty(name="Safe Point",
description="A point outside of this exclude region to move the avatar to",
type=bpy.types.Object)
def draw(self, context, layout, node, text):
layout.prop(self, "safepoint_object", icon="EMPTY_DATA")
@classmethod
def _idprop_mapping(cls):
return {"safepoint_object": "safepoint_name"}
@property
def is_used(self):
return self.safepoint_object is not None
class PlasmaExcludeMessageSocket(PlasmaNodeSocketBase, bpy.types.NodeSocket):
bl_color = (0.467, 0.576, 0.424, 1.0)
| Hoikas/korman | korman/nodes/node_logic.py | Python | gpl-3.0 | 5,565 |
# coding=utf-8
# from cStringIO import StringIO
# from euphorie.client.report import HtmlToRtf
# from euphorie.client.report import IdentificationReport
# from euphorie.content.risk import Risk
from euphorie.client import model
from euphorie.client.adapters.session_traversal import TraversedSurveySession
from euphorie.client.interfaces import IClientSkinLayer
from euphorie.client.model import SurveySession
from euphorie.client.tests.utils import addAccount
from euphorie.testing import EuphorieIntegrationTestCase
from ExtensionClass import Base
from plone import api
from plone.app.testing.interfaces import SITE_OWNER_NAME
from six.moves.urllib.parse import quote
from z3c.saconfig import Session
from zope.interface import alsoProvides
import datetime
try:
from unittest import mock
except ImportError:
import mock
# import unittest
class ReportIntegrationTests(EuphorieIntegrationTestCase):
def create_session(self):
with api.env.adopt_user(SITE_OWNER_NAME):
api.content.create(
container=self.portal.sectors, type="euphorie.country", id="eu"
)
client_country = api.content.create(
container=self.portal.client, type="euphorie.clientcountry", id="eu"
)
client_sector = api.content.create(
container=client_country, type="euphorie.clientsector", id="sector"
)
api.content.create(
container=client_sector, type="euphorie.survey", id="survey"
)
sqlsession = Session()
account = model.Account(loginname=u"jane", password=u"secret")
sqlsession.add(account)
session = model.SurveySession(
title=u"Session", zodb_path="eu/sector/survey", account=account
)
sqlsession.add(session)
sqlsession.flush()
return session
def test_default_reports(self):
self.create_session()
traversed_session = self.portal.client.eu.sector.survey.restrictedTraverse(
"++session++1"
)
with self._get_view("report_view", traversed_session) as view:
# default default sections
self.assertEqual(
view.default_reports,
["report_full", "report_action_plan", "report_overview_risks"],
)
# customized default sections
view.webhelpers.content_country_obj.default_reports = [
"report_overview_measures",
]
with self._get_view("report_view", traversed_session) as view:
self.assertEqual(
view.default_reports,
["report_overview_measures"],
)
# XXX Change these tests to test client.docx.views.IdentificationReportDocxView instead
# class IdentificationReportTests(unittest.TestCase):
# def IdentificationReport(self, *a, **kw):
# return IdentificationReport(*a, **kw)
# def test_title_not_a_risk(self):
# node = mock.Mock()
# node.type = 'module'
# node.title = u'My title'
# view = self.IdentificationReport(None, None)
# self.assertEqual(view.title(node, None), u'My title')
# def test_title_unanswered_risk(self):
# node = mock.Mock()
# node.type = 'risk'
# node.identification = None
# node.title = u'My title'
# view = self.IdentificationReport(None, None)
# self.assertEqual(view.title(node, None), u'My title')
# def test_title_empty_problem_description(self):
# node = mock.Mock()
# node.type = 'risk'
# node.identification = u'no'
# node.title = u'My title'
# zodb_node = mock.Mock()
# zodb_node.problem_description = u' '
# view = self.IdentificationReport(None, None)
# self.assertEqual(view.title(node, zodb_node), u'My title')
# def test_title_risk_present_and_with_problem_description(self):
# node = mock.Mock()
# node.type = 'risk'
# node.identification = u'no'
# node.title = u'My title'
# zodb_node = mock.Mock()
# zodb_node.problem_description = u'Bad situation'
# view = self.IdentificationReport(None, None)
# self.assertEqual(view.title(node, zodb_node), u'Bad situation')
# class ShowNegateWarningTests(unittest.TestCase):
# def _call(self, node, zodbnode):
# report = IdentificationReport(None, None)
# return report.show_negate_warning(node, zodbnode)
# def test_show_Unanswered(self):
# # https//code.simplon.biz/tracker/tno-euphorie/ticket/75
# zodbnode = Risk()
# zodbnode.problem_description = None
# node = model.Risk(type="risk")
# self.assertEqual(self._call(node, zodbnode), False)
# def test_RiskNotPresent(self):
# zodbnode = Risk()
# zodbnode.problem_description = None
# node = model.Risk(type="risk", identification="yes")
# self.assertEqual(self._call(node, zodbnode), False)
# def test_RiskNotApplicable(self):
# zodbnode = Risk()
# zodbnode.problem_description = None
# node = model.Risk(type="risk", identification="n/a")
# self.assertEqual(self._call(node, zodbnode), False)
# def test_Present(self):
# zodbnode = Risk()
# zodbnode.problem_description = None
# node = model.Risk(type="risk", identification="no")
# self.assertEqual(self._call(node, zodbnode), True)
# def test_HasProblemDescription(self):
# zodbnode = Risk()
# zodbnode.problem_description = u"Negative"
# node = model.Risk(type="risk", identification="no")
# self.assertEqual(self._call(node, zodbnode), False)
# def test_HasEmptyProblemDescription(self):
# zodbnode = Risk()
# zodbnode.problem_description = u" "
# node = model.Risk(type="risk", identification="no")
# self.assertEqual(self._call(node, zodbnode), True)
# XXX Change these test to check client.docx.html._HtmlToWord instead
# class HtmlToRtfTests(unittest.TestCase):
# def HtmlToRtf(self, *a, **kw):
# return HtmlToRtf(*a, **kw)
# def render(self, output):
# document = Document()
# section = Section()
# for o in output:
# section.append(o)
# document.Sections.append(section)
# renderer = Renderer()
# renderer.Write(document, StringIO()) # Setup instance variables
# renderer._doc = document
# renderer._fout = StringIO()
# renderer._CurrentStyle = ""
# renderer._WriteSection(section, True, False)
# return renderer._fout.getvalue()
# def testEmptyInput(self):
# self.assertEqual(self.HtmlToRtf(u"", u"<stylesheet>"), [])
# def testInvalidHtmlFallback(self):
# self.assertTrue(
# "text\\par" in
# self.render(self.HtmlToRtf(u"<p>text</p>", u"<stylesheet>"))
# )
# def testBasicParagraph(self):
# self.assertTrue(
# "Simple text\\par" in self.render(
# self.HtmlToRtf(u"<p>Simple text</p>", u"<stylesheet>")
# ), []
# )
# def testItalicInText(self):
# self.assertTrue(
# "Simple {\\i text}\\par" in self.render(
# self.
# HtmlToRtf(u"<p>Simple <em>text</em></p>", u"<stylesheet>")
# )
# )
# def testBoldAndItalicText(self):
# self.assertTrue(
# "Very {\\i very }{\\b\\i bold}\\par" in self.render(
# self.HtmlToRtf(
# u"<p>Very <em>very <strong>bold</strong></em></p>",
# u"<stylesheet>"
# )
# )
# )
# def testEmphasisInText(self):
# self.assertTrue(
# "{\\i text}" in
# self.render(self.HtmlToRtf(u"<em>text</em>", u"<stylesheet>"))
# )
# def testInlineEntity(self):
# self.assertTrue(
# "Simple & clean\\par" in self.render(
# self.HtmlToRtf(u"<p>Simple & clean</p>", u"<stylesheet>")
# )
# )
# def testInlineEntityDigit(self):
# self.assertTrue(
# "Simple \r clean\\par" in self.render(
# self.HtmlToRtf(u"<p>Simple clean</p>", u"<stylesheet>")
# )
# )
# def test_link_in_text(self):
# # This demonstrates TNO Euphorie ticket 186
# html = '<p>Check the <a rel="nofollow">manual</a> for more info.</p>'
# rendering = self.render(self.HtmlToRtf(html, '<stylesheet>'))
# self.assertTrue('Check the manual for more info.' in rendering)
# self.assertEqual(rendering.count('more info'), 1)
class ActionPlanTimelineTests(EuphorieIntegrationTestCase):
def setUp(self):
super(ActionPlanTimelineTests, self).setUp()
self.account = addAccount(password="secret")
def _get_timeline(self, context=None, request=None):
"""Return the timeline view"""
class DummySurvey(mock.Mock, Base):
__new__ = object.__new__
def getPhysicalPath(self):
return ("test", "dummy-survey")
if request is None:
request = self.request.clone()
alsoProvides(request, IClientSkinLayer)
if context is None:
survey = DummySurvey()
session = self._create_session()
context = TraversedSurveySession(
survey,
session.id,
).__of__(survey)
return api.content.get_view("timeline", context, request)
def _create_session(self, dbsession=None):
if dbsession is None:
dbsession = Session()
session = SurveySession(account=self.account, zodb_path="survey")
dbsession.add(session)
dbsession.flush()
return session
def test_get_measures_with_correct_module(self):
view = self._get_timeline()
session = view.context.session
# This first module should be ignored, it doesn't contain any risks
session.addChild(
model.Module(
zodb_path="1",
module_id="1",
)
)
# Between the next two modules, the first one (root-level) must be
# returned.
module = session.addChild(
model.Module(
zodb_path="2",
module_id="2",
)
)
module = module.addChild(
model.Module(
zodb_path="2/3",
module_id="3",
)
)
module.addChild(model.Risk(zodb_path="2/3/4", risk_id="1", identification="no"))
survey = view.context.aq_parent
survey.restrictedTraverse = lambda x: object
survey.ProfileQuestions = lambda: []
measures = view.get_measures()
self.assertEqual(len(measures), 1)
self.assertEqual(measures[0][0].module_id, u"2")
def test_get_measures_return_risks_without_measures(self):
view = self._get_timeline()
session = view.context.session
module = session.addChild(
model.Module(
session=session,
zodb_path="1",
module_id="1",
)
)
module.addChild(
model.Risk(
session=session, zodb_path="1/2", risk_id="1", identification="no"
)
)
survey = view.context.aq_parent
survey.restrictedTraverse = lambda x: object
survey.ProfileQuestions = lambda: []
measures = view.get_measures()
self.assertEqual(len(measures), 1)
self.assertEqual(measures[0][2], None)
def test_get_measures_filter_on_session(self):
view = self._get_timeline()
sessions = [
view.context.session,
self._create_session(),
]
for session in sessions:
module = session.addChild(
model.Module(
session=session,
zodb_path="1",
module_id="1",
)
)
module.addChild(
model.Risk(
session=session,
zodb_path="1/2",
risk_id="1",
identification="no",
action_plans=[
model.ActionPlan(
action=u"Measure 1 for %s" % session.account.loginname
)
],
)
)
survey = view.context.aq_parent
survey.restrictedTraverse = lambda x: object
survey.ProfileQuestions = lambda: []
measures = view.get_measures()
self.assertEqual(len(measures), 1)
self.assertEqual(measures[0][2].action, "Measure 1 for [email protected]")
def test_get_measures_order_by_start_date(self):
view = self._get_timeline()
session = view.context.session
module = session.addChild(
model.Module(
session=session,
zodb_path="1",
module_id="1",
)
)
module.addChild(
model.Risk(
session=session,
zodb_path="1/2",
risk_id="1",
identification="no",
action_plans=[
model.ActionPlan(
action=u"Plan 2", planning_start=datetime.date(2011, 12, 15)
),
model.ActionPlan(
action=u"Plan 1", planning_start=datetime.date(2011, 11, 15)
),
],
)
)
survey = view.context.aq_parent
survey.restrictedTraverse = lambda x: object
survey.ProfileQuestions = lambda: []
measures = view.get_measures()
self.assertEqual(len(measures), 2)
self.assertEqual([row[2].action for row in measures], [u"Plan 1", u"Plan 2"])
def test_priority_name_known_priority(self):
view = self._get_timeline()
self.assertEqual(view.priority_name("high"), u"High")
def test_priority_name_known_unpriority(self):
view = self._get_timeline()
self.assertEqual(view.priority_name("dummy"), "dummy")
def test_create_workbook_empty_session(self):
# If there are no risks only the header row should be generated.
view = self._get_timeline()
view.getModulePaths = lambda: []
book = view.create_workbook()
self.assertEqual(len(book.worksheets), 1)
sheet = book.worksheets[0]
self.assertEqual(len(tuple(sheet.rows)), 1)
def test_create_workbook_plan_information(self):
view = self._get_timeline()
module = model.Module(
zodb_path="1",
title=u"Top-level Module title",
)
risk = model.Risk(
zodb_path="1/2/3",
risk_id="1",
title=u"Risk title",
priority="high",
identification="no",
path="001002003",
comment=u"Risk comment",
)
plan = model.ActionPlan(
action=u"Plan 2", planning_start=datetime.date(2011, 12, 15), budget=500
)
survey = view.context.aq_parent
zodb_node = mock.Mock()
zodb_node.problem_description = u"This is wrong."
survey.restrictedTraverse.return_value = zodb_node
view.get_measures = lambda: [(module, risk, plan)]
wb = view.create_workbook()
sheet = wb.worksheets[0]
# planning start
self.assertEqual(sheet["A2"].value, datetime.date(2011, 12, 15))
# planning end
self.assertEqual(sheet["B2"].value, None)
# action plan
self.assertEqual(sheet["C2"].value, u"Plan 2")
# requirements
self.assertEqual(sheet["D2"].value, None)
# responsible
self.assertEqual(sheet["E2"].value, None)
# budget
self.assertEqual(sheet["F2"].value, 500)
# module title
self.assertEqual(sheet["G2"].value, u"Top-level Module title")
# risk number
self.assertEqual(sheet["H2"].value, u"1.2.3")
# risk title
self.assertEqual(sheet["I2"].value, u"This is wrong.")
# risk priority
self.assertEqual(sheet["J2"].value, u"High")
# risk comment
self.assertEqual(sheet["K2"].value, u"Risk comment")
def test_create_workbook_no_problem_description(self):
view = self._get_timeline()
module = model.Module(
zodb_path="1",
path="001",
title=u"Top-level Module title",
)
risk = model.Risk(
zodb_path="1/2/3",
risk_id="1",
title=u"Risk title",
priority="high",
identification="no",
path="001002003",
comment=u"Risk comment",
)
survey = view.context.aq_parent
survey.ProfileQuestions = lambda: []
zodb_node = mock.Mock()
zodb_node.title = u"Risk title."
zodb_node.problem_description = u" "
survey.restrictedTraverse.return_value = zodb_node
view.getRisks = lambda x: [(module, risk)]
sheet = view.create_workbook().worksheets[0]
self.assertEqual(sheet["I2"].value, u"Risk title")
def test_render_value(self):
with api.env.adopt_user(user=self.account):
view = self._get_timeline()
view.context.session.title = u"Acmè"
survey = view.context.aq_parent
survey.ProfileQuestions = lambda: []
view.__call__()
response = view.request.response
self.assertEqual(
response.headers["content-type"],
"application/vnd.openxmlformats-" "officedocument.spreadsheetml.sheet",
)
quoted_filename = quote(u"Timeline for Acmè.xlsx".encode("utf-8"))
self.assertEqual(quoted_filename, "Timeline%20for%20Acm%C3%A8.xlsx")
self.assertEqual(
response.headers["content-disposition"],
"attachment; filename*=UTF-8''{}".format(quoted_filename),
)
| euphorie/Euphorie | src/euphorie/client/tests/test_report.py | Python | gpl-2.0 | 18,350 |
#!/usr/bin/env/python
"""
vivocourses.py -- tools for courses and course section in VIVO
See CHANGELOG.md for history
"""
# TODO write test functions
# TODO get rid of tempita
# TODO update for VIVO-ISF
# TODO replace make_x_rdf series with add_x series
# TODO get rid of count and i in dictionary functions. Iterate over results
# TODO get rid of pickle
__author__ = "Michael Conlon"
__copyright__ = "Copyright 2014, University of Florida"
__license__ = "BSD 3-Clause license"
__version__ = "0.1"
class NoSuchAcademicTermException(Exception):
"""
Academic terms in the OUR data are compared to VIVO. If the academic term
is not found in VIVO, this exception is thrown.
"""
pass
class NoSuchPersonException(Exception):
"""
Every UFID from the OUR is checked against VIVO. If the instructor can not
be found, this exception is thrown.
"""
pass
def make_course_rdf(taught_data):
"""
Given taught_data, generate the RDF for a course,
a teacher role and links between course, teacher role and instructor
"""
import tempita
from vivofoundation import get_vivo_uri
from datetime import datetime
course_rdf_template = tempita.Template("""
<rdf:Description rdf:about="{{course_uri}}">
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#Thing"/>
<rdf:type rdf:resource="http://vivo.ufl.edu/ontology/vivo-ufl/Course"/>
<rdf:type rdf:resource="http://vivo.ufl.edu/ontology/vivo-ufl/UFEntity"/>
<rdfs:label>{{course_name}}</rdfs:label>
<ufVivo:courseNum>{{course_number}}</ufVivo:courseNum>
<ufVivo:harvestedBy>Python Courses version 0.5</ufVivo:harvestedBy>
<ufVivo:dateHarvested>{{harvest_datetime}}</ufVivo:dateHarvested>
</rdf:Description>""")
course_uri = get_vivo_uri()
rdf = course_rdf_template.substitute(course_uri=course_uri,
course_name=taught_data['course_name'],
course_number=taught_data['course_number'],
harvest_datetime=datetime.now().isoformat(),
person_uri=taught_data['person_uri'])
return [rdf, course_uri]
def make_section_rdf(taught_data):
"""
Given teaching data, make a section and a teaching role. Link
the section to its teaching role, to its course and term. Link the
role to the instructor.
"""
from vivofoundation import get_vivo_uri
import tempita
from datetime import datetime
section_rdf_template = tempita.Template("""
<rdf:Description rdf:about="{{section_uri}}">
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#Thing"/>
<rdf:type rdf:resource="http://vivo.ufl.edu/ontology/vivo-ufl/CourseSection"/>
<rdf:type rdf:resource="http://vivo.ufl.edu/ontology/vivo-ufl/UFEntity"/>
<rdfs:label>{{section_name}}</rdfs:label>
<ufVivo:sectionNum>{{section_number}}</ufVivo:sectionNum>
<vivo:dateTimeInterval rdf:resource="{{term_uri}}"/>
<ufVivo:sectionForCourse rdf:resource="{{course_uri}}"/>
<ufVivo:harvestedBy>Python Courses version 0.5</ufVivo:harvestedBy>
<ufVivo:dateHarvested>{{harvest_datetime}}</ufVivo:dateHarvested>
</rdf:Description>
<rdf:Description rdf:about="{{term_uri}}">
<ufVivo:dateTimeIntervalFor rdf:resource="{{section_uri}}"/>
</rdf:Description>
{{if course_new}}
<rdf:Description rdf:about="{{course_role_uri}}">
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#Thing"/>
<rdf:type rdf:resource="http://vivoweb.org/ontology/core#TeacherRole"/>
<rdfs:label>{{course_name}}</rdfs:label>
<ufVivo:courseRoleOf rdf:resource="{{person_uri}}"/>
<vivo:roleRealizedIn rdf:resource="{{course_uri}}"/>
</rdf:Description>
{{endif}}
<rdf:Description rdf:about="{{teacher_role_uri}}">
<rdf:type rdf:resource="http://www.w3.org/2002/07/owl#Thing"/>
<rdf:type rdf:resource="http://vivoweb.org/ontology/core#TeacherRole"/>
<vivo:teacherRoleOf rdf:resource="{{person_uri}}"/>
<vivo:roleRealizedIn rdf:resource="{{section_uri}}"/>
</rdf:Description>""")
section_uri = get_vivo_uri()
rdf = section_rdf_template.substitute(section_uri=section_uri,
section_name=taught_data['section_name'],
section_number=taught_data['section_number'],
term_uri=taught_data['term_uri'],
course_uri=taught_data['course_uri'],
course_name=taught_data['course_name'],
course_new=taught_data['course_new'],
teacher_role_uri=get_vivo_uri(),
course_role_uri=get_vivo_uri(),
person_uri=taught_data['person_uri'],
harvest_datetime=datetime.now().isoformat())
return [rdf, section_uri]
def prepare_teaching_data(filename="course_data.csv"):
"""
Read a CSV file with course data. Create a dictionary with one entry
per OUR record
"""
# TODO write test function for prepare_teaching_data
import os
import pickle
from vivofoundation import read_csv
if os.path.isfile('taught_data.pcl'):
taught_dictionary = pickle.load(open('taught_data.pcl', 'r'))
return taught_dictionary
taught_dictionary = read_csv(filename)
for row, taught_data in taught_dictionary.items():
print taught_data
taught_data['ufid'] = taught_data['UF_UFID'].ljust(8, '0')
taught_data['term_name'] = term_name(taught_data['UF_TERM'])
taught_data['course_number'] = taught_data['UF_COURSE_CD']
taught_data['course_name'] = taught_data['course_number'] +\
' ' + taught_data['UF_COURSE_NAME'].title()
taught_data['section_number'] = taught_data['UF_SECTION']
taught_data['section_name'] = taught_data['course_number'] + ' ' + \
taught_data['term_name'] + ' ' + \
taught_data['UF_SECTION']
taught_dictionary[row] = taught_data
pickle.dump(taught_dictionary, open('taught_data.pcl', 'w'))
return taught_dictionary
def term_name(term_number):
"""
Given a UF term number, return the UF term name
"""
if term_number is not None and len(term_number) < 5:
year = term_number[0:4]
term = term_number[4:5]
if term == "1":
term_name = "Spring "+str(year)
elif term == "5" or term == "6" or term == "7":
term_name = "Summer "+str(year)
elif term == "8":
term_name = "Fall "+str(year)
else:
raise NoSuchAcademicTermException(term_number)
return term_name
else:
raise NoSuchAcademicTermException(term_number)
def make_term_dictionary(debug=False):
"""
Make a term dictionary for academic terms. Key is term name such as
"Spring 2011". Value is URI.
"""
from vivofoundation import vivo_sparql_query
query = """
SELECT ?x ?label
WHERE {
?x a vivo:AcademicTerm .
?x rdfs:label ?label .
}"""
result = vivo_sparql_query(query)
try:
count = len(result["results"]["bindings"])
except KeyError:
count = 0
if debug:
print query, count, result["results"]["bindings"][0],\
result["results"]["bindings"][1]
term_dictionary = {}
i = 0
while i < count:
b = result["results"]["bindings"][i]
term = b['label']['value']
uri = b['x']['value']
term_dictionary[term] = uri
i += 1
return term_dictionary
def make_course_dictionary(debug=False):
"""
Make a course dictionary from VIVO contents. Key is course number
such as ABF2010C. Value is URI.
"""
from vivofoundation import vivo_sparql_query
query = """
SELECT ?x ?label ?coursenum
WHERE {
?x a ufVivo:Course .
?x ufVivo:courseNum ?coursenum .
}"""
result = vivo_sparql_query(query)
try:
count = len(result["results"]["bindings"])
except KeyError:
count = 0
if debug:
print query, count, result["results"]["bindings"][0],\
result["results"]["bindings"][1]
course_dictionary = {}
i = 0
while i < count:
b = result["results"]["bindings"][i]
coursenum = b['coursenum']['value']
uri = b['x']['value']
course_dictionary[coursenum] = uri
i += 1
return course_dictionary
def make_section_dictionary(debug=False):
"""
Make a section dictionary from VIVO contents. Key is section number.
Value is URI.
"""
from vivofoundation import vivo_sparql_query
query = """
SELECT ?x ?label
WHERE {
?x a ufVivo:CourseSection .
?x rdfs:label ?label .
}"""
result = vivo_sparql_query(query)
try:
count = len(result["results"]["bindings"])
except KeyError:
count = 0
if debug:
print query, count, result["results"]["bindings"][0],\
result["results"]["bindings"][1]
section_dictionary = {}
i = 0
while i < count:
b = result["results"]["bindings"][i]
label = b['label']['value']
uri = b['x']['value']
section_dictionary[label] = uri
i += 1
if debug and i % 1000 == 0:
print i, label, uri
return section_dictionary
| mconlon17/vivo-course-lib | vivocourses.py | Python | bsd-3-clause | 9,737 |
__author__ = 'noe'
import numpy as np
def estimate_P(C, reversible = True, fixed_statdist=None):
# import emma
import pyemma.msm.estimation as msmest
# output matrix. Initially eye
n = np.shape(C)[0]
P = np.eye((n), dtype=np.float64)
# treat each connected set separately
S = msmest.connected_sets(C)
for s in S:
if len(s) > 1: # if there's only one state, there's nothing to estimate and we leave it with diagonal 1
# compute transition sub-matrix on s
Cs = C[s,:][:,s]
Ps = msmest.transition_matrix(Cs, reversible = reversible, mu=fixed_statdist)
# write back to matrix
for i,I in enumerate(s):
for j,J in enumerate(s):
P[I,J] = Ps[i,j]
P[s,:][:,s] = Ps
# done
return P
def sample_P(C, nsteps, reversible = True):
if not reversible:
raise Exception('Non-reversible transition matrix sampling not yet implemented.')
# import emma
import pyemma.msm.estimation as msmest
from bhmm.msm.transition_matrix_sampling_rev import TransitionMatrixSamplerRev
# output matrix. Initially eye
n = np.shape(C)[0]
P = np.eye((n), dtype=np.float64)
# treat each connected set separately
S = msmest.connected_sets(C)
for s in S:
if len(s) > 1: # if there's only one state, there's nothing to sample and we leave it with diagonal 1
# compute transition sub-matrix on s
Cs = C[s,:][:,s]
sampler = TransitionMatrixSamplerRev(Cs)
Ps = sampler.sample(nsteps)
# write back to matrix
for i,I in enumerate(s):
for j,J in enumerate(s):
P[I,J] = Ps[i,j]
# done
return P
def stationary_distribution(C, P):
# import emma
import pyemma.msm.estimation as msmest
import pyemma.msm.analysis as msmana
# disconnected sets
n = np.shape(C)[0]
ctot = np.sum(C)
pi = np.zeros((n))
# treat each connected set separately
S = msmest.connected_sets(C)
for s in S:
# compute weight
w = np.sum(C[s,:]) / ctot
pi[s] = w * msmana.statdist(P[s,:][:,s])
# reinforce normalization
pi /= np.sum(pi)
return pi
| bhmm/legacy-bhmm-force-spectroscopy-manuscript | bhmm/msm/tmatrix_disconnected.py | Python | lgpl-3.0 | 2,267 |
from tyr.servers.server import Server
import zuun
import json
class MongoNode(Server):
SERVER_TYPE = 'mongo'
CHEF_RUNLIST = ['role[rolemongo]']
CHEF_MONGODB_TYPE = 'generic'
IAM_ROLE_POLICIES = ['allow-volume-control']
IAM_MANAGED_POLICIES = ['zuun-managed']
def __init__(self, group=None, server_type=None, instance_type=None,
environment=None, ami=None, region=None, role=None,
keypair=None, availability_zone=None,
security_groups=None, block_devices=None,
chef_path=None, subnet_id=None,
platform=None, use_latest_ami=False,
ingress_groups_to_add=None, ports_to_authorize=None,
classic_link=False, chef_server_url=None,
mongodb_version=None):
if server_type is None:
server_type = self.SERVER_TYPE
super(MongoNode, self).__init__(group, server_type, instance_type,
environment, ami, region, role,
keypair, availability_zone,
security_groups, block_devices,
chef_path, subnet_id,
platform, use_latest_ami,
ingress_groups_to_add, ports_to_authorize,
classic_link, chef_server_url)
self.mongodb_version = mongodb_version or '3.2.9'
self.zuun_deployment = '{env}-{group}'.format(
env=self.environment[0],
group=self.group
)
def set_chef_attributes(self):
super(MongoNode, self).set_chef_attributes()
self.CHEF_ATTRIBUTES['mongodb'] = {}
self.CHEF_ATTRIBUTES['mongodb']['cluster_name'] = self.group
self.log.info('Set the cluster name to "{group}"'.format(
group=self.group)
)
self.CHEF_ATTRIBUTES['mongodb']['node_type'] = self.CHEF_MONGODB_TYPE
self.log.info('Set the MongoDB node type to "{type_}"'.format(
type_=self.CHEF_MONGODB_TYPE)
)
self.CHEF_ATTRIBUTES['zuun'] = {
'deployment': self.zuun_deployment,
'role': self.CHEF_MONGODB_TYPE,
}
try:
if self.expanded_replica_set:
self.CHEF_ATTRIBUTES['zuun']['replica_set'] = self.expanded_replica_set
except AttributeError:
pass
def configure(self):
super(MongoNode, self).configure()
if self.environment == "prod":
self.IAM_ROLE_POLICIES.append('allow-mongo-backup-snapshot')
elif self.environment == "stage":
self.IAM_ROLE_POLICIES.append('allow-mongo-snapshot-cleanup')
self.IAM_MANAGED_POLICIES.append('allow-mongo-backup-restore')
def resolve_dependencies(self):
super(MongoNode, self).resolve_dependencies()
zuun.update_data_bag_item(self)
def run_mongo(self, command):
template = 'mongo --port 27018 --eval "JSON.stringify({command})"'
command = template.format(command=command)
r = self.run(command)
return json.loads(r['out'].split('\n')[2])
| hudl/Tyr | tyr/servers/mongo/node.py | Python | unlicense | 3,253 |
class AutoParams(object):
"""
This base class is supposed to be used as a base class or mixin.
Is assigns all the arguments passed to the init method as instance
named attributes.
"""
def __init__(self, **kwargs):
self.__dict__.update({k: v for k, v in kwargs.items() if k != 'self'})
| mathiasbc/Pyswiss | pyswiss/classes.py | Python | gpl-2.0 | 320 |
#-------------------------------------------------------------------------------
# elftools example: dwarf_range_lists.py
#
# Examine DIE entries which have range list values, and decode these range
# lists.
#
# Eli Bendersky ([email protected])
# This code is in the public domain
#-------------------------------------------------------------------------------
from __future__ import print_function
import sys
# If pyelftools is not installed, the example can also run from the root or
# examples/ dir of the source distribution.
sys.path[0:0] = ['.', '..']
from elftools.common.py3compat import itervalues
from elftools.elf.elffile import ELFFile
from elftools.dwarf.descriptions import (
describe_DWARF_expr, set_global_machine_arch)
from elftools.dwarf.ranges import RangeEntry
def process_file(filename):
print('Processing file:', filename)
with open(filename, 'rb') as f:
elffile = ELFFile(f)
if not elffile.has_dwarf_info():
print(' file has no DWARF info')
return
# get_dwarf_info returns a DWARFInfo context object, which is the
# starting point for all DWARF-based processing in pyelftools.
dwarfinfo = elffile.get_dwarf_info()
# The range lists are extracted by DWARFInfo from the .debug_ranges
# section, and returned here as a RangeLists object.
range_lists = dwarfinfo.range_lists()
if range_lists is None:
print(' file has no .debug_ranges section')
return
for CU in dwarfinfo.iter_CUs():
# DWARFInfo allows to iterate over the compile units contained in
# the .debug_info section. CU is a CompileUnit object, with some
# computed attributes (such as its offset in the section) and
# a header which conforms to the DWARF standard. The access to
# header elements is, as usual, via item-lookup.
print(' Found a compile unit at offset %s, length %s' % (
CU.cu_offset, CU['unit_length']))
# A CU provides a simple API to iterate over all the DIEs in it.
for DIE in CU.iter_DIEs():
# Go over all attributes of the DIE. Each attribute is an
# AttributeValue object (from elftools.dwarf.die), which we
# can examine.
for attr in itervalues(DIE.attributes):
if attribute_has_range_list(attr):
# This is a range list. Its value is an offset into
# the .debug_ranges section, so we can use the range
# lists object to decode it.
rangelist = range_lists.get_range_list_at_offset(
attr.value)
print(' DIE %s. attr %s.\n%s' % (
DIE.tag,
attr.name,
rangelist))
def attribute_has_range_list(attr):
""" Only some attributes can have range list values, if they have the
required DW_FORM (rangelistptr "class" in DWARF spec v3)
"""
if attr.name == 'DW_AT_ranges':
if attr.form in ('DW_FORM_data4', 'DW_FORM_data8'):
return True
return False
if __name__ == '__main__':
if sys.argv[1] == '--test':
for filename in sys.argv[2:]:
process_file(filename)
| dhxkgozj/DirEngine | lib/pyelftools/examples/dwarf_range_lists.py | Python | bsd-3-clause | 3,402 |
#!/usr/bin/env python
'''
Copyright (C) 2020, WAFW00F Developers.
See the LICENSE file for copying permission.
'''
NAME = 'SEnginx (Neusoft)'
def is_waf(self):
schemes = [
self.matchContent(r'SENGINX\-ROBOT\-MITIGATION')
]
if any(i for i in schemes):
return True
return False | EnableSecurity/wafw00f | wafw00f/plugins/senginx.py | Python | bsd-3-clause | 310 |
##############################################################################
# Copyright (c) 2013-2017, Lawrence Livermore National Security, LLC.
# Produced at the Lawrence Livermore National Laboratory.
#
# This file is part of Spack.
# Created by Todd Gamblin, [email protected], All rights reserved.
# LLNL-CODE-647188
#
# For details, see https://github.com/spack/spack
# Please also see the NOTICE and LICENSE files for our notice and the LGPL.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU Lesser General Public License (as
# published by the Free Software Foundation) version 2.1, February 1999.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the IMPLIED WARRANTY OF
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the terms and
# conditions of the GNU Lesser General Public License for more details.
#
# You should have received a copy of the GNU Lesser General Public
# License along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
##############################################################################
from spack import *
class PyPathos(PythonPackage):
"""Parallel graph management and execution in heterogeneous computing """
homepage = "https://github.com/uqfoundation/pathos"
url = "https://pypi.io/packages/source/p/pathos/pathos-0.2.0.zip"
version('0.2.0', '7a840ce6c3a67d71e6ad7339034ec53e')
depends_on('[email protected]:2.8,3.1:')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
| skosukhin/spack | var/spack/repos/builtin/packages/py-pathos/package.py | Python | lgpl-2.1 | 1,881 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.5 on 2017-02-27 17:03
from django.db import migrations
from django.db.backends.postgresql_psycopg2.schema import DatabaseSchemaEditor
from django.db.migrations.state import StateApps
from django.core.files.uploadedfile import SimpleUploadedFile
from django.conf import settings
from zerver.lib.avatar_hash import user_avatar_hash, user_avatar_path
from boto.s3.bucket import Bucket
from boto.s3.key import Key
from boto.s3.connection import S3Connection
from typing import Text
import requests
import os
def mkdirs(path: Text) -> None:
dirname = os.path.dirname(path)
if not os.path.isdir(dirname):
os.makedirs(dirname)
class MissingAvatarException(Exception):
pass
def move_local_file(type: Text, path_src: Text, path_dst: Text) -> None:
src_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path_src)
dst_file_path = os.path.join(settings.LOCAL_UPLOADS_DIR, type, path_dst)
if os.path.exists(dst_file_path):
return
if not os.path.exists(src_file_path):
# This is likely caused by a user having previously changed their email
raise MissingAvatarException()
return
mkdirs(dst_file_path)
os.rename(src_file_path, dst_file_path)
def move_avatars_to_be_uid_based(apps: StateApps, schema_editor: DatabaseSchemaEditor) -> None:
user_profile_model = apps.get_model('zerver', 'UserProfile')
if settings.LOCAL_UPLOADS_DIR is not None:
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
src_file_name = user_avatar_hash(user_profile.email)
dst_file_name = user_avatar_path(user_profile)
try:
move_local_file('avatars', src_file_name + '.original', dst_file_name + '.original')
move_local_file('avatars', src_file_name + '-medium.png', dst_file_name + '-medium.png')
move_local_file('avatars', src_file_name + '.png', dst_file_name + '.png')
except MissingAvatarException:
# If the user's avatar is missing, it's probably
# because they previously changed their email address.
# So set them to have a gravatar instead.
user_profile.avatar_source = u"G"
user_profile.save(update_fields=["avatar_source"])
else:
conn = S3Connection(settings.S3_KEY, settings.S3_SECRET_KEY)
bucket_name = settings.S3_AVATAR_BUCKET
bucket = conn.get_bucket(bucket_name, validate=False)
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
uid_hash_path = user_avatar_path(user_profile)
email_hash_path = user_avatar_hash(user_profile.email)
if bucket.get_key(uid_hash_path):
continue
if not bucket.get_key(email_hash_path):
# This is likely caused by a user having previously changed their email
# If the user's avatar is missing, it's probably
# because they previously changed their email address.
# So set them to have a gravatar instead.
user_profile.avatar_source = u"G"
user_profile.save(update_fields=["avatar_source"])
continue
bucket.copy_key(uid_hash_path + ".original",
bucket_name,
email_hash_path + ".original")
bucket.copy_key(uid_hash_path + "-medium.png",
bucket_name,
email_hash_path + "-medium.png")
bucket.copy_key(uid_hash_path,
bucket_name,
email_hash_path)
# From an error handling sanity perspective, it's best to
# start deleting after everything is copied, so that recovery
# from failures is easy (just rerun one loop or the other).
for user_profile in user_profile_model.objects.filter(avatar_source=u"U"):
bucket.delete_key(user_avatar_hash(user_profile.email) + ".original")
bucket.delete_key(user_avatar_hash(user_profile.email) + "-medium.png")
bucket.delete_key(user_avatar_hash(user_profile.email))
class Migration(migrations.Migration):
dependencies = [
('zerver', '0059_userprofile_quota'),
]
operations = [
migrations.RunPython(move_avatars_to_be_uid_based)
]
| Galexrt/zulip | zerver/migrations/0060_move_avatars_to_be_uid_based.py | Python | apache-2.0 | 4,466 |
from pandas.compat import range
import re
import operator
import pytest
import warnings
from numpy import nan
import numpy as np
from pandas import _np_version_under1p8
from pandas.core.sparse.api import SparseArray, SparseSeries
from pandas._libs.sparse import IntIndex
from pandas.util.testing import assert_almost_equal
import pandas.util.testing as tm
class TestSparseArray(object):
def setup_method(self, method):
self.arr_data = np.array([nan, nan, 1, 2, 3, nan, 4, 5, nan, 6])
self.arr = SparseArray(self.arr_data)
self.zarr = SparseArray([0, 0, 1, 2, 3, 0, 4, 5, 0, 6], fill_value=0)
def test_constructor_dtype(self):
arr = SparseArray([np.nan, 1, 2, np.nan])
assert arr.dtype == np.float64
assert np.isnan(arr.fill_value)
arr = SparseArray([np.nan, 1, 2, np.nan], fill_value=0)
assert arr.dtype == np.float64
assert arr.fill_value == 0
arr = SparseArray([0, 1, 2, 4], dtype=np.float64)
assert arr.dtype == np.float64
assert np.isnan(arr.fill_value)
arr = SparseArray([0, 1, 2, 4], dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray([0, 1, 2, 4], fill_value=0, dtype=np.int64)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray([0, 1, 2, 4], dtype=None)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray([0, 1, 2, 4], fill_value=0, dtype=None)
assert arr.dtype == np.int64
assert arr.fill_value == 0
def test_constructor_object_dtype(self):
# GH 11856
arr = SparseArray(['A', 'A', np.nan, 'B'], dtype=np.object)
assert arr.dtype == np.object
assert np.isnan(arr.fill_value)
arr = SparseArray(['A', 'A', np.nan, 'B'], dtype=np.object,
fill_value='A')
assert arr.dtype == np.object
assert arr.fill_value == 'A'
def test_constructor_spindex_dtype(self):
arr = SparseArray(data=[1, 2], sparse_index=IntIndex(4, [1, 2]))
tm.assert_sp_array_equal(arr, SparseArray([np.nan, 1, 2, np.nan]))
assert arr.dtype == np.float64
assert np.isnan(arr.fill_value)
arr = SparseArray(data=[1, 2, 3],
sparse_index=IntIndex(4, [1, 2, 3]),
dtype=np.int64, fill_value=0)
exp = SparseArray([0, 1, 2, 3], dtype=np.int64, fill_value=0)
tm.assert_sp_array_equal(arr, exp)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray(data=[1, 2], sparse_index=IntIndex(4, [1, 2]),
fill_value=0, dtype=np.int64)
exp = SparseArray([0, 1, 2, 0], fill_value=0, dtype=np.int64)
tm.assert_sp_array_equal(arr, exp)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray(data=[1, 2, 3],
sparse_index=IntIndex(4, [1, 2, 3]),
dtype=None, fill_value=0)
exp = SparseArray([0, 1, 2, 3], dtype=None)
tm.assert_sp_array_equal(arr, exp)
assert arr.dtype == np.int64
assert arr.fill_value == 0
# scalar input
arr = SparseArray(data=1, sparse_index=IntIndex(1, [0]), dtype=None)
exp = SparseArray([1], dtype=None)
tm.assert_sp_array_equal(arr, exp)
assert arr.dtype == np.int64
assert arr.fill_value == 0
arr = SparseArray(data=[1, 2], sparse_index=IntIndex(4, [1, 2]),
fill_value=0, dtype=None)
exp = SparseArray([0, 1, 2, 0], fill_value=0, dtype=None)
tm.assert_sp_array_equal(arr, exp)
assert arr.dtype == np.int64
assert arr.fill_value == 0
def test_sparseseries_roundtrip(self):
# GH 13999
for kind in ['integer', 'block']:
for fill in [1, np.nan, 0]:
arr = SparseArray([np.nan, 1, np.nan, 2, 3], kind=kind,
fill_value=fill)
res = SparseArray(SparseSeries(arr))
tm.assert_sp_array_equal(arr, res)
arr = SparseArray([0, 0, 0, 1, 1, 2], dtype=np.int64,
kind=kind, fill_value=fill)
res = SparseArray(SparseSeries(arr), dtype=np.int64)
tm.assert_sp_array_equal(arr, res)
res = SparseArray(SparseSeries(arr))
tm.assert_sp_array_equal(arr, res)
for fill in [True, False, np.nan]:
arr = SparseArray([True, False, True, True], dtype=np.bool,
kind=kind, fill_value=fill)
res = SparseArray(SparseSeries(arr))
tm.assert_sp_array_equal(arr, res)
res = SparseArray(SparseSeries(arr))
tm.assert_sp_array_equal(arr, res)
def test_get_item(self):
assert np.isnan(self.arr[1])
assert self.arr[2] == 1
assert self.arr[7] == 5
assert self.zarr[0] == 0
assert self.zarr[2] == 1
assert self.zarr[7] == 5
errmsg = re.compile("bounds")
tm.assert_raises_regex(IndexError, errmsg, lambda: self.arr[11])
tm.assert_raises_regex(IndexError, errmsg, lambda: self.arr[-11])
assert self.arr[-1] == self.arr[len(self.arr) - 1]
def test_take(self):
assert np.isnan(self.arr.take(0))
assert np.isscalar(self.arr.take(2))
# np.take in < 1.8 doesn't support scalar indexing
if not _np_version_under1p8:
assert self.arr.take(2) == np.take(self.arr_data, 2)
assert self.arr.take(6) == np.take(self.arr_data, 6)
exp = SparseArray(np.take(self.arr_data, [2, 3]))
tm.assert_sp_array_equal(self.arr.take([2, 3]), exp)
exp = SparseArray(np.take(self.arr_data, [0, 1, 2]))
tm.assert_sp_array_equal(self.arr.take([0, 1, 2]), exp)
def test_take_fill_value(self):
data = np.array([1, np.nan, 0, 3, 0])
sparse = SparseArray(data, fill_value=0)
exp = SparseArray(np.take(data, [0]), fill_value=0)
tm.assert_sp_array_equal(sparse.take([0]), exp)
exp = SparseArray(np.take(data, [1, 3, 4]), fill_value=0)
tm.assert_sp_array_equal(sparse.take([1, 3, 4]), exp)
def test_take_negative(self):
exp = SparseArray(np.take(self.arr_data, [-1]))
tm.assert_sp_array_equal(self.arr.take([-1]), exp)
exp = SparseArray(np.take(self.arr_data, [-4, -3, -2]))
tm.assert_sp_array_equal(self.arr.take([-4, -3, -2]), exp)
def test_bad_take(self):
tm.assert_raises_regex(
IndexError, "bounds", lambda: self.arr.take(11))
pytest.raises(IndexError, lambda: self.arr.take(-11))
def test_take_invalid_kwargs(self):
msg = r"take\(\) got an unexpected keyword argument 'foo'"
tm.assert_raises_regex(TypeError, msg, self.arr.take,
[2, 3], foo=2)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, self.arr.take,
[2, 3], out=self.arr)
msg = "the 'mode' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, self.arr.take,
[2, 3], mode='clip')
def test_take_filling(self):
# similar tests as GH 12631
sparse = SparseArray([np.nan, np.nan, 1, np.nan, 4])
result = sparse.take(np.array([1, 0, -1]))
expected = SparseArray([np.nan, np.nan, 4])
tm.assert_sp_array_equal(result, expected)
# fill_value
result = sparse.take(np.array([1, 0, -1]), fill_value=True)
expected = SparseArray([np.nan, np.nan, np.nan])
tm.assert_sp_array_equal(result, expected)
# allow_fill=False
result = sparse.take(np.array([1, 0, -1]),
allow_fill=False, fill_value=True)
expected = SparseArray([np.nan, np.nan, 4])
tm.assert_sp_array_equal(result, expected)
msg = ('When allow_fill=True and fill_value is not None, '
'all indices must be >= -1')
with tm.assert_raises_regex(ValueError, msg):
sparse.take(np.array([1, 0, -2]), fill_value=True)
with tm.assert_raises_regex(ValueError, msg):
sparse.take(np.array([1, 0, -5]), fill_value=True)
with pytest.raises(IndexError):
sparse.take(np.array([1, -6]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]), fill_value=True)
def test_take_filling_fill_value(self):
# same tests as GH 12631
sparse = SparseArray([np.nan, 0, 1, 0, 4], fill_value=0)
result = sparse.take(np.array([1, 0, -1]))
expected = SparseArray([0, np.nan, 4], fill_value=0)
tm.assert_sp_array_equal(result, expected)
# fill_value
result = sparse.take(np.array([1, 0, -1]), fill_value=True)
expected = SparseArray([0, np.nan, 0], fill_value=0)
tm.assert_sp_array_equal(result, expected)
# allow_fill=False
result = sparse.take(np.array([1, 0, -1]),
allow_fill=False, fill_value=True)
expected = SparseArray([0, np.nan, 4], fill_value=0)
tm.assert_sp_array_equal(result, expected)
msg = ('When allow_fill=True and fill_value is not None, '
'all indices must be >= -1')
with tm.assert_raises_regex(ValueError, msg):
sparse.take(np.array([1, 0, -2]), fill_value=True)
with tm.assert_raises_regex(ValueError, msg):
sparse.take(np.array([1, 0, -5]), fill_value=True)
with pytest.raises(IndexError):
sparse.take(np.array([1, -6]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]), fill_value=True)
def test_take_filling_all_nan(self):
sparse = SparseArray([np.nan, np.nan, np.nan, np.nan, np.nan])
result = sparse.take(np.array([1, 0, -1]))
expected = SparseArray([np.nan, np.nan, np.nan])
tm.assert_sp_array_equal(result, expected)
result = sparse.take(np.array([1, 0, -1]), fill_value=True)
expected = SparseArray([np.nan, np.nan, np.nan])
tm.assert_sp_array_equal(result, expected)
with pytest.raises(IndexError):
sparse.take(np.array([1, -6]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]))
with pytest.raises(IndexError):
sparse.take(np.array([1, 5]), fill_value=True)
def test_set_item(self):
def setitem():
self.arr[5] = 3
def setslice():
self.arr[1:5] = 2
tm.assert_raises_regex(TypeError, "item assignment", setitem)
tm.assert_raises_regex(TypeError, "item assignment", setslice)
def test_constructor_from_too_large_array(self):
tm.assert_raises_regex(TypeError, "expected dimension <= 1 data",
SparseArray, np.arange(10).reshape((2, 5)))
def test_constructor_from_sparse(self):
res = SparseArray(self.zarr)
assert res.fill_value == 0
assert_almost_equal(res.sp_values, self.zarr.sp_values)
def test_constructor_copy(self):
cp = SparseArray(self.arr, copy=True)
cp.sp_values[:3] = 0
assert not (self.arr.sp_values[:3] == 0).any()
not_copy = SparseArray(self.arr)
not_copy.sp_values[:3] = 0
assert (self.arr.sp_values[:3] == 0).all()
def test_constructor_bool(self):
# GH 10648
data = np.array([False, False, True, True, False, False])
arr = SparseArray(data, fill_value=False, dtype=bool)
assert arr.dtype == bool
tm.assert_numpy_array_equal(arr.sp_values, np.array([True, True]))
tm.assert_numpy_array_equal(arr.sp_values, np.asarray(arr))
tm.assert_numpy_array_equal(arr.sp_index.indices,
np.array([2, 3], np.int32))
for dense in [arr.to_dense(), arr.values]:
assert dense.dtype == bool
tm.assert_numpy_array_equal(dense, data)
def test_constructor_bool_fill_value(self):
arr = SparseArray([True, False, True], dtype=None)
assert arr.dtype == np.bool
assert not arr.fill_value
arr = SparseArray([True, False, True], dtype=np.bool)
assert arr.dtype == np.bool
assert not arr.fill_value
arr = SparseArray([True, False, True], dtype=np.bool, fill_value=True)
assert arr.dtype == np.bool
assert arr.fill_value
def test_constructor_float32(self):
# GH 10648
data = np.array([1., np.nan, 3], dtype=np.float32)
arr = SparseArray(data, dtype=np.float32)
assert arr.dtype == np.float32
tm.assert_numpy_array_equal(arr.sp_values,
np.array([1, 3], dtype=np.float32))
tm.assert_numpy_array_equal(arr.sp_values, np.asarray(arr))
tm.assert_numpy_array_equal(arr.sp_index.indices,
np.array([0, 2], dtype=np.int32))
for dense in [arr.to_dense(), arr.values]:
assert dense.dtype == np.float32
tm.assert_numpy_array_equal(dense, data)
def test_astype(self):
res = self.arr.astype('f8')
res.sp_values[:3] = 27
assert not (self.arr.sp_values[:3] == 27).any()
msg = "unable to coerce current fill_value nan to int64 dtype"
with tm.assert_raises_regex(ValueError, msg):
self.arr.astype('i8')
arr = SparseArray([0, np.nan, 0, 1])
with tm.assert_raises_regex(ValueError, msg):
arr.astype('i8')
arr = SparseArray([0, np.nan, 0, 1], fill_value=0)
msg = 'Cannot convert non-finite values \\(NA or inf\\) to integer'
with tm.assert_raises_regex(ValueError, msg):
arr.astype('i8')
def test_astype_all(self):
vals = np.array([1, 2, 3])
arr = SparseArray(vals, fill_value=1)
types = [np.float64, np.float32, np.int64,
np.int32, np.int16, np.int8]
for typ in types:
res = arr.astype(typ)
assert res.dtype == typ
assert res.sp_values.dtype == typ
tm.assert_numpy_array_equal(res.values, vals.astype(typ))
def test_set_fill_value(self):
arr = SparseArray([1., np.nan, 2.], fill_value=np.nan)
arr.fill_value = 2
assert arr.fill_value == 2
arr = SparseArray([1, 0, 2], fill_value=0, dtype=np.int64)
arr.fill_value = 2
assert arr.fill_value == 2
# coerces to int
msg = "unable to set fill_value 3\\.1 to int64 dtype"
with tm.assert_raises_regex(ValueError, msg):
arr.fill_value = 3.1
msg = "unable to set fill_value nan to int64 dtype"
with tm.assert_raises_regex(ValueError, msg):
arr.fill_value = np.nan
arr = SparseArray([True, False, True], fill_value=False, dtype=np.bool)
arr.fill_value = True
assert arr.fill_value
# coerces to bool
msg = "unable to set fill_value 0 to bool dtype"
with tm.assert_raises_regex(ValueError, msg):
arr.fill_value = 0
msg = "unable to set fill_value nan to bool dtype"
with tm.assert_raises_regex(ValueError, msg):
arr.fill_value = np.nan
# invalid
msg = "fill_value must be a scalar"
for val in [[1, 2, 3], np.array([1, 2]), (1, 2, 3)]:
with tm.assert_raises_regex(ValueError, msg):
arr.fill_value = val
def test_copy_shallow(self):
arr2 = self.arr.copy(deep=False)
def _get_base(values):
base = values.base
while base.base is not None:
base = base.base
return base
assert (_get_base(arr2) is _get_base(self.arr))
def test_values_asarray(self):
assert_almost_equal(self.arr.values, self.arr_data)
assert_almost_equal(self.arr.to_dense(), self.arr_data)
assert_almost_equal(self.arr.sp_values, np.asarray(self.arr))
def test_to_dense(self):
vals = np.array([1, np.nan, np.nan, 3, np.nan])
res = SparseArray(vals).to_dense()
tm.assert_numpy_array_equal(res, vals)
res = SparseArray(vals, fill_value=0).to_dense()
tm.assert_numpy_array_equal(res, vals)
vals = np.array([1, np.nan, 0, 3, 0])
res = SparseArray(vals).to_dense()
tm.assert_numpy_array_equal(res, vals)
res = SparseArray(vals, fill_value=0).to_dense()
tm.assert_numpy_array_equal(res, vals)
vals = np.array([np.nan, np.nan, np.nan, np.nan, np.nan])
res = SparseArray(vals).to_dense()
tm.assert_numpy_array_equal(res, vals)
res = SparseArray(vals, fill_value=0).to_dense()
tm.assert_numpy_array_equal(res, vals)
# see gh-14647
with tm.assert_produces_warning(FutureWarning,
check_stacklevel=False):
SparseArray(vals).to_dense(fill=2)
def test_getitem(self):
def _checkit(i):
assert_almost_equal(self.arr[i], self.arr.values[i])
for i in range(len(self.arr)):
_checkit(i)
_checkit(-i)
def test_getslice(self):
result = self.arr[:-3]
exp = SparseArray(self.arr.values[:-3])
tm.assert_sp_array_equal(result, exp)
result = self.arr[-4:]
exp = SparseArray(self.arr.values[-4:])
tm.assert_sp_array_equal(result, exp)
# two corner cases from Series
result = self.arr[-12:]
exp = SparseArray(self.arr)
tm.assert_sp_array_equal(result, exp)
result = self.arr[:-12]
exp = SparseArray(self.arr.values[:0])
tm.assert_sp_array_equal(result, exp)
def test_getslice_tuple(self):
dense = np.array([np.nan, 0, 3, 4, 0, 5, np.nan, np.nan, 0])
sparse = SparseArray(dense)
res = sparse[4:, ]
exp = SparseArray(dense[4:, ])
tm.assert_sp_array_equal(res, exp)
sparse = SparseArray(dense, fill_value=0)
res = sparse[4:, ]
exp = SparseArray(dense[4:, ], fill_value=0)
tm.assert_sp_array_equal(res, exp)
with pytest.raises(IndexError):
sparse[4:, :]
with pytest.raises(IndexError):
# check numpy compat
dense[4:, :]
def test_binary_operators(self):
data1 = np.random.randn(20)
data2 = np.random.randn(20)
data1[::2] = np.nan
data2[::3] = np.nan
arr1 = SparseArray(data1)
arr2 = SparseArray(data2)
data1[::2] = 3
data2[::3] = 3
farr1 = SparseArray(data1, fill_value=3)
farr2 = SparseArray(data2, fill_value=3)
def _check_op(op, first, second):
res = op(first, second)
exp = SparseArray(op(first.values, second.values),
fill_value=first.fill_value)
assert isinstance(res, SparseArray)
assert_almost_equal(res.values, exp.values)
res2 = op(first, second.values)
assert isinstance(res2, SparseArray)
tm.assert_sp_array_equal(res, res2)
res3 = op(first.values, second)
assert isinstance(res3, SparseArray)
tm.assert_sp_array_equal(res, res3)
res4 = op(first, 4)
assert isinstance(res4, SparseArray)
# ignore this if the actual op raises (e.g. pow)
try:
exp = op(first.values, 4)
exp_fv = op(first.fill_value, 4)
assert_almost_equal(res4.fill_value, exp_fv)
assert_almost_equal(res4.values, exp)
except ValueError:
pass
def _check_inplace_op(op):
tmp = arr1.copy()
pytest.raises(NotImplementedError, op, tmp, arr2)
with np.errstate(all='ignore'):
bin_ops = [operator.add, operator.sub, operator.mul,
operator.truediv, operator.floordiv, operator.pow]
for op in bin_ops:
_check_op(op, arr1, arr2)
_check_op(op, farr1, farr2)
inplace_ops = ['iadd', 'isub', 'imul', 'itruediv', 'ifloordiv',
'ipow']
for op in inplace_ops:
_check_inplace_op(getattr(operator, op))
def test_pickle(self):
def _check_roundtrip(obj):
unpickled = tm.round_trip_pickle(obj)
tm.assert_sp_array_equal(unpickled, obj)
_check_roundtrip(self.arr)
_check_roundtrip(self.zarr)
def test_generator_warnings(self):
sp_arr = SparseArray([1, 2, 3])
with warnings.catch_warnings(record=True) as w:
warnings.filterwarnings(action='always',
category=DeprecationWarning)
warnings.filterwarnings(action='always',
category=PendingDeprecationWarning)
for _ in sp_arr:
pass
assert len(w) == 0
def test_fillna(self):
s = SparseArray([1, np.nan, np.nan, 3, np.nan])
res = s.fillna(-1)
exp = SparseArray([1, -1, -1, 3, -1], fill_value=-1, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
s = SparseArray([1, np.nan, np.nan, 3, np.nan], fill_value=0)
res = s.fillna(-1)
exp = SparseArray([1, -1, -1, 3, -1], fill_value=0, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
s = SparseArray([1, np.nan, 0, 3, 0])
res = s.fillna(-1)
exp = SparseArray([1, -1, 0, 3, 0], fill_value=-1, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
s = SparseArray([1, np.nan, 0, 3, 0], fill_value=0)
res = s.fillna(-1)
exp = SparseArray([1, -1, 0, 3, 0], fill_value=0, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
s = SparseArray([np.nan, np.nan, np.nan, np.nan])
res = s.fillna(-1)
exp = SparseArray([-1, -1, -1, -1], fill_value=-1, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
s = SparseArray([np.nan, np.nan, np.nan, np.nan], fill_value=0)
res = s.fillna(-1)
exp = SparseArray([-1, -1, -1, -1], fill_value=0, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
# float dtype's fill_value is np.nan, replaced by -1
s = SparseArray([0., 0., 0., 0.])
res = s.fillna(-1)
exp = SparseArray([0., 0., 0., 0.], fill_value=-1)
tm.assert_sp_array_equal(res, exp)
# int dtype shouldn't have missing. No changes.
s = SparseArray([0, 0, 0, 0])
assert s.dtype == np.int64
assert s.fill_value == 0
res = s.fillna(-1)
tm.assert_sp_array_equal(res, s)
s = SparseArray([0, 0, 0, 0], fill_value=0)
assert s.dtype == np.int64
assert s.fill_value == 0
res = s.fillna(-1)
exp = SparseArray([0, 0, 0, 0], fill_value=0)
tm.assert_sp_array_equal(res, exp)
# fill_value can be nan if there is no missing hole.
# only fill_value will be changed
s = SparseArray([0, 0, 0, 0], fill_value=np.nan)
assert s.dtype == np.int64
assert np.isnan(s.fill_value)
res = s.fillna(-1)
exp = SparseArray([0, 0, 0, 0], fill_value=-1)
tm.assert_sp_array_equal(res, exp)
def test_fillna_overlap(self):
s = SparseArray([1, np.nan, np.nan, 3, np.nan])
# filling with existing value doesn't replace existing value with
# fill_value, i.e. existing 3 remains in sp_values
res = s.fillna(3)
exp = np.array([1, 3, 3, 3, 3], dtype=np.float64)
tm.assert_numpy_array_equal(res.to_dense(), exp)
s = SparseArray([1, np.nan, np.nan, 3, np.nan], fill_value=0)
res = s.fillna(3)
exp = SparseArray([1, 3, 3, 3, 3], fill_value=0, dtype=np.float64)
tm.assert_sp_array_equal(res, exp)
class TestSparseArrayAnalytics(object):
def test_sum(self):
data = np.arange(10).astype(float)
out = SparseArray(data).sum()
assert out == 45.0
data[5] = np.nan
out = SparseArray(data, fill_value=2).sum()
assert out == 40.0
out = SparseArray(data, fill_value=np.nan).sum()
assert out == 40.0
def test_numpy_sum(self):
data = np.arange(10).astype(float)
out = np.sum(SparseArray(data))
assert out == 45.0
data[5] = np.nan
out = np.sum(SparseArray(data, fill_value=2))
assert out == 40.0
out = np.sum(SparseArray(data, fill_value=np.nan))
assert out == 40.0
msg = "the 'dtype' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.sum,
SparseArray(data), dtype=np.int64)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.sum,
SparseArray(data), out=out)
def test_cumsum(self):
non_null_data = np.array([1, 2, 3, 4, 5], dtype=float)
non_null_expected = SparseArray(non_null_data.cumsum())
null_data = np.array([1, 2, np.nan, 4, 5], dtype=float)
null_expected = SparseArray(np.array([1.0, 3.0, np.nan, 7.0, 12.0]))
for data, expected in [
(null_data, null_expected),
(non_null_data, non_null_expected)
]:
out = SparseArray(data).cumsum()
tm.assert_sp_array_equal(out, expected)
out = SparseArray(data, fill_value=np.nan).cumsum()
tm.assert_sp_array_equal(out, expected)
out = SparseArray(data, fill_value=2).cumsum()
tm.assert_sp_array_equal(out, expected)
axis = 1 # SparseArray currently 1-D, so only axis = 0 is valid.
msg = "axis\\(={axis}\\) out of bounds".format(axis=axis)
with tm.assert_raises_regex(ValueError, msg):
SparseArray(data).cumsum(axis=axis)
def test_numpy_cumsum(self):
non_null_data = np.array([1, 2, 3, 4, 5], dtype=float)
non_null_expected = SparseArray(non_null_data.cumsum())
null_data = np.array([1, 2, np.nan, 4, 5], dtype=float)
null_expected = SparseArray(np.array([1.0, 3.0, np.nan, 7.0, 12.0]))
for data, expected in [
(null_data, null_expected),
(non_null_data, non_null_expected)
]:
out = np.cumsum(SparseArray(data))
tm.assert_sp_array_equal(out, expected)
out = np.cumsum(SparseArray(data, fill_value=np.nan))
tm.assert_sp_array_equal(out, expected)
out = np.cumsum(SparseArray(data, fill_value=2))
tm.assert_sp_array_equal(out, expected)
msg = "the 'dtype' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
SparseArray(data), dtype=np.int64)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.cumsum,
SparseArray(data), out=out)
def test_mean(self):
data = np.arange(10).astype(float)
out = SparseArray(data).mean()
assert out == 4.5
data[5] = np.nan
out = SparseArray(data).mean()
assert out == 40.0 / 9
def test_numpy_mean(self):
data = np.arange(10).astype(float)
out = np.mean(SparseArray(data))
assert out == 4.5
data[5] = np.nan
out = np.mean(SparseArray(data))
assert out == 40.0 / 9
msg = "the 'dtype' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.mean,
SparseArray(data), dtype=np.int64)
msg = "the 'out' parameter is not supported"
tm.assert_raises_regex(ValueError, msg, np.mean,
SparseArray(data), out=out)
def test_ufunc(self):
# GH 13853 make sure ufunc is applied to fill_value
sparse = SparseArray([1, np.nan, 2, np.nan, -2])
result = SparseArray([1, np.nan, 2, np.nan, 2])
tm.assert_sp_array_equal(abs(sparse), result)
tm.assert_sp_array_equal(np.abs(sparse), result)
sparse = SparseArray([1, -1, 2, -2], fill_value=1)
result = SparseArray([1, 2, 2], sparse_index=sparse.sp_index,
fill_value=1)
tm.assert_sp_array_equal(abs(sparse), result)
tm.assert_sp_array_equal(np.abs(sparse), result)
sparse = SparseArray([1, -1, 2, -2], fill_value=-1)
result = SparseArray([1, 2, 2], sparse_index=sparse.sp_index,
fill_value=1)
tm.assert_sp_array_equal(abs(sparse), result)
tm.assert_sp_array_equal(np.abs(sparse), result)
sparse = SparseArray([1, np.nan, 2, np.nan, -2])
result = SparseArray(np.sin([1, np.nan, 2, np.nan, -2]))
tm.assert_sp_array_equal(np.sin(sparse), result)
sparse = SparseArray([1, -1, 2, -2], fill_value=1)
result = SparseArray(np.sin([1, -1, 2, -2]), fill_value=np.sin(1))
tm.assert_sp_array_equal(np.sin(sparse), result)
sparse = SparseArray([1, -1, 0, -2], fill_value=0)
result = SparseArray(np.sin([1, -1, 0, -2]), fill_value=np.sin(0))
tm.assert_sp_array_equal(np.sin(sparse), result)
def test_ufunc_args(self):
# GH 13853 make sure ufunc is applied to fill_value, including its arg
sparse = SparseArray([1, np.nan, 2, np.nan, -2])
result = SparseArray([2, np.nan, 3, np.nan, -1])
tm.assert_sp_array_equal(np.add(sparse, 1), result)
sparse = SparseArray([1, -1, 2, -2], fill_value=1)
result = SparseArray([2, 0, 3, -1], fill_value=2)
tm.assert_sp_array_equal(np.add(sparse, 1), result)
sparse = SparseArray([1, -1, 0, -2], fill_value=0)
result = SparseArray([2, 0, 1, -1], fill_value=1)
tm.assert_sp_array_equal(np.add(sparse, 1), result)
| mbayon/TFG-MachineLearning | venv/lib/python3.6/site-packages/pandas/tests/sparse/test_array.py | Python | mit | 30,556 |
#!/usr/bin/python
import sys
import os
from hashlib import sha512, sha256
import base64
from lib.oath.hotpie import TOTP
b32Key = ''
secret = base64.b32decode(b32Key)
digits = TOTP(secret, digits=6 )
print(digits)
| jdhall75/authenticator | authenticator.py | Python | gpl-3.0 | 220 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.