code
stringlengths 3
1.05M
| repo_name
stringlengths 5
104
| path
stringlengths 4
251
| language
stringclasses 1
value | license
stringclasses 15
values | size
int64 3
1.05M
|
---|---|---|---|---|---|
# Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Argobots(AutotoolsPackage):
"""Argobots, which was developed as a part of the Argo project, is
a lightweight runtime system that supports integrated computation
and data movement with massive concurrency. It will directly
leverage the lowest-level constructs in the hardware and OS:
lightweight notification mechanisms, data movement engines, memory
mapping, and data placement strategies. It consists of an
execution model and a memory model."""
homepage = "https://www.argobots.org/"
url = "https://github.com/pmodels/argobots/releases/download/v1.0b1/argobots-1.0b1.tar.gz"
git = "https://github.com/pmodels/argobots.git"
maintainers = ['shintaro-iwasaki']
tags = ['e4s']
version("main", branch="main")
version("1.1", sha256="f0f971196fc8354881681c2282a2f2adb6d48ff5e84cf820ca657daad1549005")
version("1.0.1", sha256="fa05a02d7f8f74d845647636609219ee02f6adf628ebcbf40393f829987d9036")
version("1.0", sha256="36a0815f7bf99900a9c9c1eef61ef9b3b76aa2cfc4594a304f6c8c3296da8def")
variant("perf", default=True, description="Add performance optimization flags")
variant("valgrind", default=False, description="Enable Valgrind")
variant("debug", default=False, description="Compiled with debugging symbols")
variant("stackunwind", default=False, description="Enable function stack unwinding")
variant("stackguard", default="none", description="Enable stack guard",
values=('none', 'canary-32', 'mprotect', 'mprotect-strict'), multi=False)
variant("tool", default=False, description="Enable ABT_tool interface")
variant("affinity", default=False, description="Enable affinity setting")
depends_on("m4", type=("build"), when="@main")
depends_on("autoconf", type=("build"), when="@main")
depends_on("automake", type=("build"), when="@main")
depends_on("libtool", type=("build"), when="@main")
depends_on("valgrind", when="+valgrind")
depends_on("libunwind", when="+stackunwind")
def configure_args(self):
args = []
if '+perf' in self.spec:
args.append('--enable-perf-opt')
if '+valgrind' in self.spec:
args.append('--enable-valgrind')
else:
args.append('--disable-valgrind')
if '+debug' in self.spec:
args.append('--enable-debug=yes')
else:
args.append('--disable-debug')
if '+stackunwind' in self.spec:
args.append('--enable-stack-unwind')
args.append('--with-libunwind={0}'.format(self.spec['libunwind'].prefix))
stackguard = self.spec.variants['stackguard'].value
if stackguard != 'none':
args.append('--enable-stack-overflow-check={0}'.format(stackguard))
if '+tool' in self.spec:
args.append('--enable-tool')
if '+affinity' in self.spec:
args.append('--enable-affinity')
return args
| LLNL/spack | var/spack/repos/builtin/packages/argobots/package.py | Python | lgpl-2.1 | 3,177 |
from . import Model, CollectionModel
class Stat(Model):
"""
A Stat object model
"""
class MessagingStat(Stat):
"""
MessagingStat object model
.. attribute:: replyRate
.. attribute:: date
.. attribute:: deliveryRate
.. attribute:: costs
.. attribute:: messagesReceived
.. attribute:: messagesSentDelivered
.. attribute:: messagesSentAccepted
.. attribute:: messagesSentBuffered
.. attribute:: messagesSentFailed
.. attribute:: messagesSentRejected
.. attribute:: messagesSentParts
"""
class SpendingStat(Stat):
"""
SpendingStat object model
.. attribute:: id
.. attribute:: userId
.. attribute:: date
.. attribute:: balance
.. attribute:: delta
.. attribute:: type
.. attribute:: value
.. attribute:: comment
"""
class MessagingStats(CollectionModel):
name = "stats/messaging"
instance = MessagingStat
def list(self, **kwargs):
"""
Returns a list of :class:`MessagingStat` objects (messaging statistics).
:Example:
stats = client.stats_messaging.list()
:param str by: Group results by specified period: `off`, `day`, `month` or `year`. Default is `off`.
:param str start: Start date in unix timestamp format. Default is 7 days ago.
:param str end: End date in unix timestamp format. Default is now.
"""
response, instances = self.request("GET", self.uri, params=kwargs)
return [self.load_instance(r) for r in instances]
class SpendingStats(CollectionModel):
name = "stats/spending"
instance = SpendingStat
def list(self, **kwargs):
"""
Returns a list of :class:`SpendingStat` objects (account spending statistics) and a pager dict.
:Example:
stats, pager = client.stats_spending.list()
:param int page: Fetch specified results page. Default=1
:param int limit: How many results on page. Default=10
:param str start: Start date in unix timestamp format. Default is 7 days ago.
:param str end: End date in unix timestamp format. Default is now.
"""
kwargs["search"] = False
return self.get_instances(kwargs)
| textmagic/textmagic-rest-python | textmagic/rest/models/stats.py | Python | mit | 2,255 |
# -*- coding: utf-8 -*-
"""
Unit tests for landlab.data_record.data_record.DataRecord
Dimension = time
Last updated 10/18/2018
"""
from landlab import RasterModelGrid
grid = RasterModelGrid((3, 3))
def test_dr_nodim_name(dr_nodim):
assert dr_nodim._name == "DataRecord"
| cmshobe/landlab | tests/data_record/test_data_record_nodim.py | Python | mit | 280 |
DEFAULT_DICT = \
[ "A", "ABE", "ACE", "ACT", "AD", "ADA", "ADD",
"AGO", "AID", "AIM", "AIR", "ALL", "ALP", "AM", "AMY",
"AN", "ANA", "AND", "ANN", "ANT", "ANY", "APE", "APS",
"APT", "ARC", "ARE", "ARK", "ARM", "ART", "AS", "ASH",
"ASK", "AT", "ATE", "AUG", "AUK", "AVE", "AWE", "AWK",
"AWL", "AWN", "AX", "AYE", "BAD", "BAG", "BAH", "BAM",
"BAN", "BAR", "BAT", "BAY", "BE", "BED", "BEE", "BEG",
"BEN", "BET", "BEY", "BIB", "BID", "BIG", "BIN", "BIT",
"BOB", "BOG", "BON", "BOO", "BOP", "BOW", "BOY", "BUB",
"BUD", "BUG", "BUM", "BUN", "BUS", "BUT", "BUY", "BY",
"BYE", "CAB", "CAL", "CAM", "CAN", "CAP", "CAR", "CAT",
"CAW", "COD", "COG", "COL", "CON", "COO", "COP", "COT",
"COW", "COY", "CRY", "CUB", "CUE", "CUP", "CUR", "CUT",
"DAB", "DAD", "DAM", "DAN", "DAR", "DAY", "DEE", "DEL",
"DEN", "DES", "DEW", "DID", "DIE", "DIG", "DIN", "DIP",
"DO", "DOE", "DOG", "DON", "DOT", "DOW", "DRY", "DUB",
"DUD", "DUE", "DUG", "DUN", "EAR", "EAT", "ED", "EEL",
"EGG", "EGO", "ELI", "ELK", "ELM", "ELY", "EM", "END",
"EST", "ETC", "EVA", "EVE", "EWE", "EYE", "FAD", "FAN",
"FAR", "FAT", "FAY", "FED", "FEE", "FEW", "FIB", "FIG",
"FIN", "FIR", "FIT", "FLO", "FLY", "FOE", "FOG", "FOR",
"FRY", "FUM", "FUN", "FUR", "GAB", "GAD", "GAG", "GAL",
"GAM", "GAP", "GAS", "GAY", "GEE", "GEL", "GEM", "GET",
"GIG", "GIL", "GIN", "GO", "GOT", "GUM", "GUN", "GUS",
"GUT", "GUY", "GYM", "GYP", "HA", "HAD", "HAL", "HAM",
"HAN", "HAP", "HAS", "HAT", "HAW", "HAY", "HE", "HEM",
"HEN", "HER", "HEW", "HEY", "HI", "HID", "HIM", "HIP",
"HIS", "HIT", "HO", "HOB", "HOC", "HOE", "HOG", "HOP",
"HOT", "HOW", "HUB", "HUE", "HUG", "HUH", "HUM", "HUT",
"I", "ICY", "IDA", "IF", "IKE", "ILL", "INK", "INN",
"IO", "ION", "IQ", "IRA", "IRE", "IRK", "IS", "IT",
"ITS", "IVY", "JAB", "JAG", "JAM", "JAN", "JAR", "JAW",
"JAY", "JET", "JIG", "JIM", "JO", "JOB", "JOE", "JOG",
"JOT", "JOY", "JUG", "JUT", "KAY", "KEG", "KEN", "KEY",
"KID", "KIM", "KIN", "KIT", "LA", "LAB", "LAC", "LAD",
"LAG", "LAM", "LAP", "LAW", "LAY", "LEA", "LED", "LEE",
"LEG", "LEN", "LEO", "LET", "LEW", "LID", "LIE", "LIN",
"LIP", "LIT", "LO", "LOB", "LOG", "LOP", "LOS", "LOT",
"LOU", "LOW", "LOY", "LUG", "LYE", "MA", "MAC", "MAD",
"MAE", "MAN", "MAO", "MAP", "MAT", "MAW", "MAY", "ME",
"MEG", "MEL", "MEN", "MET", "MEW", "MID", "MIN", "MIT",
"MOB", "MOD", "MOE", "MOO", "MOP", "MOS", "MOT", "MOW",
"MUD", "MUG", "MUM", "MY", "NAB", "NAG", "NAN", "NAP",
"NAT", "NAY", "NE", "NED", "NEE", "NET", "NEW", "NIB",
"NIL", "NIP", "NIT", "NO", "NOB", "NOD", "NON", "NOR",
"NOT", "NOV", "NOW", "NU", "NUN", "NUT", "O", "OAF",
"OAK", "OAR", "OAT", "ODD", "ODE", "OF", "OFF", "OFT",
"OH", "OIL", "OK", "OLD", "ON", "ONE", "OR", "ORB",
"ORE", "ORR", "OS", "OTT", "OUR", "OUT", "OVA", "OW",
"OWE", "OWL", "OWN", "OX", "PA", "PAD", "PAL", "PAM",
"PAN", "PAP", "PAR", "PAT", "PAW", "PAY", "PEA", "PEG",
"PEN", "PEP", "PER", "PET", "PEW", "PHI", "PI", "PIE",
"PIN", "PIT", "PLY", "PO", "POD", "POE", "POP", "POT",
"POW", "PRO", "PRY", "PUB", "PUG", "PUN", "PUP", "PUT",
"QUO", "RAG", "RAM", "RAN", "RAP", "RAT", "RAW", "RAY",
"REB", "RED", "REP", "RET", "RIB", "RID", "RIG", "RIM",
"RIO", "RIP", "ROB", "ROD", "ROE", "RON", "ROT", "ROW",
"ROY", "RUB", "RUE", "RUG", "RUM", "RUN", "RYE", "SAC",
"SAD", "SAG", "SAL", "SAM", "SAN", "SAP", "SAT", "SAW",
"SAY", "SEA", "SEC", "SEE", "SEN", "SET", "SEW", "SHE",
"SHY", "SIN", "SIP", "SIR", "SIS", "SIT", "SKI", "SKY",
"SLY", "SO", "SOB", "SOD", "SON", "SOP", "SOW", "SOY",
"SPA", "SPY", "SUB", "SUD", "SUE", "SUM", "SUN", "SUP",
"TAB", "TAD", "TAG", "TAN", "TAP", "TAR", "TEA", "TED",
"TEE", "TEN", "THE", "THY", "TIC", "TIE", "TIM", "TIN",
"TIP", "TO", "TOE", "TOG", "TOM", "TON", "TOO", "TOP",
"TOW", "TOY", "TRY", "TUB", "TUG", "TUM", "TUN", "TWO",
"UN", "UP", "US", "USE", "VAN", "VAT", "VET", "VIE",
"WAD", "WAG", "WAR", "WAS", "WAY", "WE", "WEB", "WED",
"WEE", "WET", "WHO", "WHY", "WIN", "WIT", "WOK", "WON",
"WOO", "WOW", "WRY", "WU", "YAM", "YAP", "YAW", "YE",
"YEA", "YES", "YET", "YOU", "ABED", "ABEL", "ABET", "ABLE",
"ABUT", "ACHE", "ACID", "ACME", "ACRE", "ACTA", "ACTS", "ADAM",
"ADDS", "ADEN", "AFAR", "AFRO", "AGEE", "AHEM", "AHOY", "AIDA",
"AIDE", "AIDS", "AIRY", "AJAR", "AKIN", "ALAN", "ALEC", "ALGA",
"ALIA", "ALLY", "ALMA", "ALOE", "ALSO", "ALTO", "ALUM", "ALVA",
"AMEN", "AMES", "AMID", "AMMO", "AMOK", "AMOS", "AMRA", "ANDY",
"ANEW", "ANNA", "ANNE", "ANTE", "ANTI", "AQUA", "ARAB", "ARCH",
"AREA", "ARGO", "ARID", "ARMY", "ARTS", "ARTY", "ASIA", "ASKS",
"ATOM", "AUNT", "AURA", "AUTO", "AVER", "AVID", "AVIS", "AVON",
"AVOW", "AWAY", "AWRY", "BABE", "BABY", "BACH", "BACK", "BADE",
"BAIL", "BAIT", "BAKE", "BALD", "BALE", "BALI", "BALK", "BALL",
"BALM", "BAND", "BANE", "BANG", "BANK", "BARB", "BARD", "BARE",
"BARK", "BARN", "BARR", "BASE", "BASH", "BASK", "BASS", "BATE",
"BATH", "BAWD", "BAWL", "BEAD", "BEAK", "BEAM", "BEAN", "BEAR",
"BEAT", "BEAU", "BECK", "BEEF", "BEEN", "BEER", "BEET", "BELA",
"BELL", "BELT", "BEND", "BENT", "BERG", "BERN", "BERT", "BESS",
"BEST", "BETA", "BETH", "BHOY", "BIAS", "BIDE", "BIEN", "BILE",
"BILK", "BILL", "BIND", "BING", "BIRD", "BITE", "BITS", "BLAB",
"BLAT", "BLED", "BLEW", "BLOB", "BLOC", "BLOT", "BLOW", "BLUE",
"BLUM", "BLUR", "BOAR", "BOAT", "BOCA", "BOCK", "BODE", "BODY",
"BOGY", "BOHR", "BOIL", "BOLD", "BOLO", "BOLT", "BOMB", "BONA",
"BOND", "BONE", "BONG", "BONN", "BONY", "BOOK", "BOOM", "BOON",
"BOOT", "BORE", "BORG", "BORN", "BOSE", "BOSS", "BOTH", "BOUT",
"BOWL", "BOYD", "BRAD", "BRAE", "BRAG", "BRAN", "BRAY", "BRED",
"BREW", "BRIG", "BRIM", "BROW", "BUCK", "BUDD", "BUFF", "BULB",
"BULK", "BULL", "BUNK", "BUNT", "BUOY", "BURG", "BURL", "BURN",
"BURR", "BURT", "BURY", "BUSH", "BUSS", "BUST", "BUSY", "BYTE",
"CADY", "CAFE", "CAGE", "CAIN", "CAKE", "CALF", "CALL", "CALM",
"CAME", "CANE", "CANT", "CARD", "CARE", "CARL", "CARR", "CART",
"CASE", "CASH", "CASK", "CAST", "CAVE", "CEIL", "CELL", "CENT",
"CERN", "CHAD", "CHAR", "CHAT", "CHAW", "CHEF", "CHEN", "CHEW",
"CHIC", "CHIN", "CHOU", "CHOW", "CHUB", "CHUG", "CHUM", "CITE",
"CITY", "CLAD", "CLAM", "CLAN", "CLAW", "CLAY", "CLOD", "CLOG",
"CLOT", "CLUB", "CLUE", "COAL", "COAT", "COCA", "COCK", "COCO",
"CODA", "CODE", "CODY", "COED", "COIL", "COIN", "COKE", "COLA",
"COLD", "COLT", "COMA", "COMB", "COME", "COOK", "COOL", "COON",
"COOT", "CORD", "CORE", "CORK", "CORN", "COST", "COVE", "COWL",
"CRAB", "CRAG", "CRAM", "CRAY", "CREW", "CRIB", "CROW", "CRUD",
"CUBA", "CUBE", "CUFF", "CULL", "CULT", "CUNY", "CURB", "CURD",
"CURE", "CURL", "CURT", "CUTS", "DADE", "DALE", "DAME", "DANA",
"DANE", "DANG", "DANK", "DARE", "DARK", "DARN", "DART", "DASH",
"DATA", "DATE", "DAVE", "DAVY", "DAWN", "DAYS", "DEAD", "DEAF",
"DEAL", "DEAN", "DEAR", "DEBT", "DECK", "DEED", "DEEM", "DEER",
"DEFT", "DEFY", "DELL", "DENT", "DENY", "DESK", "DIAL", "DICE",
"DIED", "DIET", "DIME", "DINE", "DING", "DINT", "DIRE", "DIRT",
"DISC", "DISH", "DISK", "DIVE", "DOCK", "DOES", "DOLE", "DOLL",
"DOLT", "DOME", "DONE", "DOOM", "DOOR", "DORA", "DOSE", "DOTE",
"DOUG", "DOUR", "DOVE", "DOWN", "DRAB", "DRAG", "DRAM", "DRAW",
"DREW", "DRUB", "DRUG", "DRUM", "DUAL", "DUCK", "DUCT", "DUEL",
"DUET", "DUKE", "DULL", "DUMB", "DUNE", "DUNK", "DUSK", "DUST",
"DUTY", "EACH", "EARL", "EARN", "EASE", "EAST", "EASY", "EBEN",
"ECHO", "EDDY", "EDEN", "EDGE", "EDGY", "EDIT", "EDNA", "EGAN",
"ELAN", "ELBA", "ELLA", "ELSE", "EMIL", "EMIT", "EMMA", "ENDS",
"ERIC", "EROS", "EVEN", "EVER", "EVIL", "EYED", "FACE", "FACT",
"FADE", "FAIL", "FAIN", "FAIR", "FAKE", "FALL", "FAME", "FANG",
"FARM", "FAST", "FATE", "FAWN", "FEAR", "FEAT", "FEED", "FEEL",
"FEET", "FELL", "FELT", "FEND", "FERN", "FEST", "FEUD", "FIEF",
"FIGS", "FILE", "FILL", "FILM", "FIND", "FINE", "FINK", "FIRE",
"FIRM", "FISH", "FISK", "FIST", "FITS", "FIVE", "FLAG", "FLAK",
"FLAM", "FLAT", "FLAW", "FLEA", "FLED", "FLEW", "FLIT", "FLOC",
"FLOG", "FLOW", "FLUB", "FLUE", "FOAL", "FOAM", "FOGY", "FOIL",
"FOLD", "FOLK", "FOND", "FONT", "FOOD", "FOOL", "FOOT", "FORD",
"FORE", "FORK", "FORM", "FORT", "FOSS", "FOUL", "FOUR", "FOWL",
"FRAU", "FRAY", "FRED", "FREE", "FRET", "FREY", "FROG", "FROM",
"FUEL", "FULL", "FUME", "FUND", "FUNK", "FURY", "FUSE", "FUSS",
"GAFF", "GAGE", "GAIL", "GAIN", "GAIT", "GALA", "GALE", "GALL",
"GALT", "GAME", "GANG", "GARB", "GARY", "GASH", "GATE", "GAUL",
"GAUR", "GAVE", "GAWK", "GEAR", "GELD", "GENE", "GENT", "GERM",
"GETS", "GIBE", "GIFT", "GILD", "GILL", "GILT", "GINA", "GIRD",
"GIRL", "GIST", "GIVE", "GLAD", "GLEE", "GLEN", "GLIB", "GLOB",
"GLOM", "GLOW", "GLUE", "GLUM", "GLUT", "GOAD", "GOAL", "GOAT",
"GOER", "GOES", "GOLD", "GOLF", "GONE", "GONG", "GOOD", "GOOF",
"GORE", "GORY", "GOSH", "GOUT", "GOWN", "GRAB", "GRAD", "GRAY",
"GREG", "GREW", "GREY", "GRID", "GRIM", "GRIN", "GRIT", "GROW",
"GRUB", "GULF", "GULL", "GUNK", "GURU", "GUSH", "GUST", "GWEN",
"GWYN", "HAAG", "HAAS", "HACK", "HAIL", "HAIR", "HALE", "HALF",
"HALL", "HALO", "HALT", "HAND", "HANG", "HANK", "HANS", "HARD",
"HARK", "HARM", "HART", "HASH", "HAST", "HATE", "HATH", "HAUL",
"HAVE", "HAWK", "HAYS", "HEAD", "HEAL", "HEAR", "HEAT", "HEBE",
"HECK", "HEED", "HEEL", "HEFT", "HELD", "HELL", "HELM", "HERB",
"HERD", "HERE", "HERO", "HERS", "HESS", "HEWN", "HICK", "HIDE",
"HIGH", "HIKE", "HILL", "HILT", "HIND", "HINT", "HIRE", "HISS",
"HIVE", "HOBO", "HOCK", "HOFF", "HOLD", "HOLE", "HOLM", "HOLT",
"HOME", "HONE", "HONK", "HOOD", "HOOF", "HOOK", "HOOT", "HORN",
"HOSE", "HOST", "HOUR", "HOVE", "HOWE", "HOWL", "HOYT", "HUCK",
"HUED", "HUFF", "HUGE", "HUGH", "HUGO", "HULK", "HULL", "HUNK",
"HUNT", "HURD", "HURL", "HURT", "HUSH", "HYDE", "HYMN", "IBIS",
"ICON", "IDEA", "IDLE", "IFFY", "INCA", "INCH", "INTO", "IONS",
"IOTA", "IOWA", "IRIS", "IRMA", "IRON", "ISLE", "ITCH", "ITEM",
"IVAN", "JACK", "JADE", "JAIL", "JAKE", "JANE", "JAVA", "JEAN",
"JEFF", "JERK", "JESS", "JEST", "JIBE", "JILL", "JILT", "JIVE",
"JOAN", "JOBS", "JOCK", "JOEL", "JOEY", "JOHN", "JOIN", "JOKE",
"JOLT", "JOVE", "JUDD", "JUDE", "JUDO", "JUDY", "JUJU", "JUKE",
"JULY", "JUNE", "JUNK", "JUNO", "JURY", "JUST", "JUTE", "KAHN",
"KALE", "KANE", "KANT", "KARL", "KATE", "KEEL", "KEEN", "KENO",
"KENT", "KERN", "KERR", "KEYS", "KICK", "KILL", "KIND", "KING",
"KIRK", "KISS", "KITE", "KLAN", "KNEE", "KNEW", "KNIT", "KNOB",
"KNOT", "KNOW", "KOCH", "KONG", "KUDO", "KURD", "KURT", "KYLE",
"LACE", "LACK", "LACY", "LADY", "LAID", "LAIN", "LAIR", "LAKE",
"LAMB", "LAME", "LAND", "LANE", "LANG", "LARD", "LARK", "LASS",
"LAST", "LATE", "LAUD", "LAVA", "LAWN", "LAWS", "LAYS", "LEAD",
"LEAF", "LEAK", "LEAN", "LEAR", "LEEK", "LEER", "LEFT", "LEND",
"LENS", "LENT", "LEON", "LESK", "LESS", "LEST", "LETS", "LIAR",
"LICE", "LICK", "LIED", "LIEN", "LIES", "LIEU", "LIFE", "LIFT",
"LIKE", "LILA", "LILT", "LILY", "LIMA", "LIMB", "LIME", "LIND",
"LINE", "LINK", "LINT", "LION", "LISA", "LIST", "LIVE", "LOAD",
"LOAF", "LOAM", "LOAN", "LOCK", "LOFT", "LOGE", "LOIS", "LOLA",
"LONE", "LONG", "LOOK", "LOON", "LOOT", "LORD", "LORE", "LOSE",
"LOSS", "LOST", "LOUD", "LOVE", "LOWE", "LUCK", "LUCY", "LUGE",
"LUKE", "LULU", "LUND", "LUNG", "LURA", "LURE", "LURK", "LUSH",
"LUST", "LYLE", "LYNN", "LYON", "LYRA", "MACE", "MADE", "MAGI",
"MAID", "MAIL", "MAIN", "MAKE", "MALE", "MALI", "MALL", "MALT",
"MANA", "MANN", "MANY", "MARC", "MARE", "MARK", "MARS", "MART",
"MARY", "MASH", "MASK", "MASS", "MAST", "MATE", "MATH", "MAUL",
"MAYO", "MEAD", "MEAL", "MEAN", "MEAT", "MEEK", "MEET", "MELD",
"MELT", "MEMO", "MEND", "MENU", "MERT", "MESH", "MESS", "MICE",
"MIKE", "MILD", "MILE", "MILK", "MILL", "MILT", "MIMI", "MIND",
"MINE", "MINI", "MINK", "MINT", "MIRE", "MISS", "MIST", "MITE",
"MITT", "MOAN", "MOAT", "MOCK", "MODE", "MOLD", "MOLE", "MOLL",
"MOLT", "MONA", "MONK", "MONT", "MOOD", "MOON", "MOOR", "MOOT",
"MORE", "MORN", "MORT", "MOSS", "MOST", "MOTH", "MOVE", "MUCH",
"MUCK", "MUDD", "MUFF", "MULE", "MULL", "MURK", "MUSH", "MUST",
"MUTE", "MUTT", "MYRA", "MYTH", "NAGY", "NAIL", "NAIR", "NAME",
"NARY", "NASH", "NAVE", "NAVY", "NEAL", "NEAR", "NEAT", "NECK",
"NEED", "NEIL", "NELL", "NEON", "NERO", "NESS", "NEST", "NEWS",
"NEWT", "NIBS", "NICE", "NICK", "NILE", "NINA", "NINE", "NOAH",
"NODE", "NOEL", "NOLL", "NONE", "NOOK", "NOON", "NORM", "NOSE",
"NOTE", "NOUN", "NOVA", "NUDE", "NULL", "NUMB", "OATH", "OBEY",
"OBOE", "ODIN", "OHIO", "OILY", "OINT", "OKAY", "OLAF", "OLDY",
"OLGA", "OLIN", "OMAN", "OMEN", "OMIT", "ONCE", "ONES", "ONLY",
"ONTO", "ONUS", "ORAL", "ORGY", "OSLO", "OTIS", "OTTO", "OUCH",
"OUST", "OUTS", "OVAL", "OVEN", "OVER", "OWLY", "OWNS", "QUAD",
"QUIT", "QUOD", "RACE", "RACK", "RACY", "RAFT", "RAGE", "RAID",
"RAIL", "RAIN", "RAKE", "RANK", "RANT", "RARE", "RASH", "RATE",
"RAVE", "RAYS", "READ", "REAL", "REAM", "REAR", "RECK", "REED",
"REEF", "REEK", "REEL", "REID", "REIN", "RENA", "REND", "RENT",
"REST", "RICE", "RICH", "RICK", "RIDE", "RIFT", "RILL", "RIME",
"RING", "RINK", "RISE", "RISK", "RITE", "ROAD", "ROAM", "ROAR",
"ROBE", "ROCK", "RODE", "ROIL", "ROLL", "ROME", "ROOD", "ROOF",
"ROOK", "ROOM", "ROOT", "ROSA", "ROSE", "ROSS", "ROSY", "ROTH",
"ROUT", "ROVE", "ROWE", "ROWS", "RUBE", "RUBY", "RUDE", "RUDY",
"RUIN", "RULE", "RUNG", "RUNS", "RUNT", "RUSE", "RUSH", "RUSK",
"RUSS", "RUST", "RUTH", "SACK", "SAFE", "SAGE", "SAID", "SAIL",
"SALE", "SALK", "SALT", "SAME", "SAND", "SANE", "SANG", "SANK",
"SARA", "SAUL", "SAVE", "SAYS", "SCAN", "SCAR", "SCAT", "SCOT",
"SEAL", "SEAM", "SEAR", "SEAT", "SEED", "SEEK", "SEEM", "SEEN",
"SEES", "SELF", "SELL", "SEND", "SENT", "SETS", "SEWN", "SHAG",
"SHAM", "SHAW", "SHAY", "SHED", "SHIM", "SHIN", "SHOD", "SHOE",
"SHOT", "SHOW", "SHUN", "SHUT", "SICK", "SIDE", "SIFT", "SIGH",
"SIGN", "SILK", "SILL", "SILO", "SILT", "SINE", "SING", "SINK",
"SIRE", "SITE", "SITS", "SITU", "SKAT", "SKEW", "SKID", "SKIM",
"SKIN", "SKIT", "SLAB", "SLAM", "SLAT", "SLAY", "SLED", "SLEW",
"SLID", "SLIM", "SLIT", "SLOB", "SLOG", "SLOT", "SLOW", "SLUG",
"SLUM", "SLUR", "SMOG", "SMUG", "SNAG", "SNOB", "SNOW", "SNUB",
"SNUG", "SOAK", "SOAR", "SOCK", "SODA", "SOFA", "SOFT", "SOIL",
"SOLD", "SOME", "SONG", "SOON", "SOOT", "SORE", "SORT", "SOUL",
"SOUR", "SOWN", "STAB", "STAG", "STAN", "STAR", "STAY", "STEM",
"STEW", "STIR", "STOW", "STUB", "STUN", "SUCH", "SUDS", "SUIT",
"SULK", "SUMS", "SUNG", "SUNK", "SURE", "SURF", "SWAB", "SWAG",
"SWAM", "SWAN", "SWAT", "SWAY", "SWIM", "SWUM", "TACK", "TACT",
"TAIL", "TAKE", "TALE", "TALK", "TALL", "TANK", "TASK", "TATE",
"TAUT", "TEAL", "TEAM", "TEAR", "TECH", "TEEM", "TEEN", "TEET",
"TELL", "TEND", "TENT", "TERM", "TERN", "TESS", "TEST", "THAN",
"THAT", "THEE", "THEM", "THEN", "THEY", "THIN", "THIS", "THUD",
"THUG", "TICK", "TIDE", "TIDY", "TIED", "TIER", "TILE", "TILL",
"TILT", "TIME", "TINA", "TINE", "TINT", "TINY", "TIRE", "TOAD",
"TOGO", "TOIL", "TOLD", "TOLL", "TONE", "TONG", "TONY", "TOOK",
"TOOL", "TOOT", "TORE", "TORN", "TOTE", "TOUR", "TOUT", "TOWN",
"TRAG", "TRAM", "TRAY", "TREE", "TREK", "TRIG", "TRIM", "TRIO",
"TROD", "TROT", "TROY", "TRUE", "TUBA", "TUBE", "TUCK", "TUFT",
"TUNA", "TUNE", "TUNG", "TURF", "TURN", "TUSK", "TWIG", "TWIN",
"TWIT", "ULAN", "UNIT", "URGE", "USED", "USER", "USES", "UTAH",
"VAIL", "VAIN", "VALE", "VARY", "VASE", "VAST", "VEAL", "VEDA",
"VEIL", "VEIN", "VEND", "VENT", "VERB", "VERY", "VETO", "VICE",
"VIEW", "VINE", "VISE", "VOID", "VOLT", "VOTE", "WACK", "WADE",
"WAGE", "WAIL", "WAIT", "WAKE", "WALE", "WALK", "WALL", "WALT",
"WAND", "WANE", "WANG", "WANT", "WARD", "WARM", "WARN", "WART",
"WASH", "WAST", "WATS", "WATT", "WAVE", "WAVY", "WAYS", "WEAK",
"WEAL", "WEAN", "WEAR", "WEED", "WEEK", "WEIR", "WELD", "WELL",
"WELT", "WENT", "WERE", "WERT", "WEST", "WHAM", "WHAT", "WHEE",
"WHEN", "WHET", "WHOA", "WHOM", "WICK", "WIFE", "WILD", "WILL",
"WIND", "WINE", "WING", "WINK", "WINO", "WIRE", "WISE", "WISH",
"WITH", "WOLF", "WONT", "WOOD", "WOOL", "WORD", "WORE", "WORK",
"WORM", "WORN", "WOVE", "WRIT", "WYNN", "YALE", "YANG", "YANK",
"YARD", "YARN", "YAWL", "YAWN", "YEAH", "YEAR", "YELL", "YOGA",
"YOKE"]
DEFAULT_RDICT = {}
i = 0
for word in DEFAULT_DICT:
DEFAULT_RDICT[word] = i
i += 1
del word
del i
| mzdaniel/oh-mainline | vendor/packages/python-otp/otp/dict.py | Python | agpl-3.0 | 18,237 |
#!/usr/bin/python
#
# examples/security.py -- demonstrate the SECURITY extension
#
# Copyright (C) 2011 Outpost Embedded, LLC
# Forest Bond <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program; if not, write to the Free Software
# Foundation, Inc., 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
import sys, os
from optparse import OptionParser
# Change path so we find Xlib
sys.path.insert(1, os.path.join(sys.path[0], '..'))
from Xlib.display import Display
from Xlib.ext import security
def main(argv):
parser = OptionParser()
parser.add_option('--generate', action='store_true', default=False)
parser.add_option('--proto', default='MIT-MAGIC-COOKIE-1')
parser.add_option('--trusted', action='store_true', default=False)
parser.add_option('--untrusted', action='store_true', default=False)
parser.add_option('--revoke', action='store_true', default=False)
opts, args = parser.parse_args(argv[1:])
if opts.trusted and opts.untrusted:
parser.error('--trusted and --untrusted cannot be combined')
if not any((opts.generate, opts.revoke)):
parser.error('specify --generate or --revoke')
display = Display()
if not display.has_extension('SECURITY'):
if display.query_extension('SECURITY') is None:
print >>sys.stderr, 'SECURITY extension not supported'
return 1
security_version = display.security_query_version()
print >>sys.stderr, 'Found SECURITY version %s.%s' % (
security_version.major_version,
security_version.minor_version,
)
if opts.generate:
kwargs = {}
if opts.trusted:
kwargs['trust_level'] = security.SecurityClientTrusted
elif opts.untrusted:
kwargs['trust_level'] = security.SecurityClientUntrusted
reply = display.security_generate_authorization(opts.proto, **kwargs)
print reply.authid
elif opts.revoke:
for arg in args:
authid = int(arg, 10)
display.security_revoke_authorization(authid)
if __name__ == '__main__':
sys.exit(main(sys.argv))
| alexer/python-xlib | examples/security.py | Python | gpl-2.0 | 2,717 |
import logging, os, marshal, json, cPickle, time, copy, time, datetime, re, urllib, httplib
from base64 import b64encode, b64decode
from lib.crypt import encrypt, decrypt
from uuid import uuid4
from node import Node, InvalidIdentity
class FriendNode(Node):
def __init__(self, *args, **kwargs):
if 'identityData' in kwargs:
identityData = kwargs['identityData']
else:
identityData = args[0]
kwargs['identityData'] = identityData
try:
newIdentity = args[1]
except:
newIdentity = None
if type(kwargs['identityData']) == type(u'') or type(kwargs['identityData']) == type(''):
identityData = self.getManagedNode(kwargs['identityData'])
elif type(kwargs['identityData']) == type({}):
identityData = kwargs['identityData']
else:
raise InvalidIdentity("A valid server Identity was not given nor was a public_key specified.")
super(FriendNode, self).__init__(*args, **kwargs)
self.set('routed_public_key', kwargs['acceptor']['public_key'], True)
self.set('source_indexer_key', kwargs['requester']['public_key'], True)
if 'connector' in kwargs:
self.set('public_key', kwargs['connector']['public_key'])
self.set('private_key', kwargs['connector']['private_key'])
self.setModifiedToNow()
def validIdentity(self, data):
try:
if 'public_key' in data \
and 'private_key' in data \
and 'modified' in data \
and 'data' in data \
and 'friends' in data['data'] \
and 'identity' in data['data'] \
and 'name' in data['data']['identity'] \
and 'avatar' in data['data']['identity']:
return True
else:
raise InvalidIdentity("invalid identity dictionary for identity")
except InvalidIdentity:
raise
class RoutedFriendNode(FriendNode):
def __init__(self, *args, **kwargs):
if 'identityData' in kwargs:
identityData = kwargs['identityData']
else:
identityData = args[0]
kwargs['identityData'] = identityData
try:
newIdentity = args[1]
except:
newIdentity = None
if type(kwargs['identityData']) == type(u'') or type(kwargs['identityData']) == type(''):
identityData = self.getFriend(kwargs['identityData'])
elif type(kwargs['identityData']) == type({}):
identityData = kwargs['identityData']
else:
raise InvalidIdentity("A valid server Identity was not given nor was a public_key specified.")
super(RoutedFriendNode, self).__init__(*args, **kwargs)
def validIdentity(self, data):
try:
if 'public_key' in data \
and 'private_key' in data \
and 'source_indexer_key' in data \
and 'routed_public_key' in data \
and 'modified' in data \
and 'data' in data \
and 'friends' in data['data'] \
and 'identity' in data['data'] \
and 'name' in data['data']['identity']:
return True
else:
raise InvalidIdentity("invalid identity dictionary for identity")
except InvalidIdentity:
raise
| pdxwebdev/yadapy | yadapy/friendnode.py | Python | gpl-3.0 | 3,507 |
import threading
import wx
import logging
from StringIO import StringIO
import HTMLParser
import os
import time
import subprocess
import sys
import requests
from logger.SubmitDebugReportDialog import SubmitDebugReportDialog
class Logger():
def __init__(self, name):
self.name = name
self.transport_logger = None
self.loggerObject = None
self.loggerOutput = None
self.loggerFileHandler = None
self.configureLogger()
def setGlobalLauncherConfig(self, globalLauncherConfig):
self.globalLauncherConfig = globalLauncherConfig
def setGlobalLauncherPreferencesFilePath(self, globalLauncherPreferencesFilePath):
self.globalLauncherPreferencesFilePath = globalLauncherPreferencesFilePath
def sendLogMessagesToDebugWindowTextControl(self, logTextCtrl):
# Send all log messages to the debug window, which may or may not be visible.
log_window_handler = logging.StreamHandler(stream=logTextCtrl)
log_window_handler.setLevel(logging.DEBUG)
log_format_string = '%(asctime)s - %(name)s - %(module)s - %(funcName)s - %(lineno)d - %(levelname)s - %(message)s'
log_window_handler.setFormatter(logging.Formatter(log_format_string))
self.loggerObject = logging.getLogger(self.name)
self.loggerObject.addHandler(log_window_handler)
# Don't send ssh.transport log messages to
# the log window, because they won't be
# wrapped in wx.CallAfter, unless we provide
# our own customized version of the ssh module.
#transport_logger.addHandler(log_window_handler)
def configureLogger(self):
# print "defining global logger"
self.loggerObject = logging.getLogger(self.name)
# print self.logger
self.loggerObject.setLevel(logging.DEBUG)
self.transport_logger = logging.getLogger('ssh.transport')
self.transport_logger.setLevel(logging.DEBUG)
log_format_string = '%(asctime)s - %(name)s - %(module)s - %(funcName)s - %(lineno)d - %(levelname)s - %(message)s'
# Send all log messages to a string.
self.loggerOutput = StringIO()
string_handler = logging.StreamHandler(stream=self.loggerOutput)
string_handler.setLevel(logging.DEBUG)
string_handler.setFormatter(logging.Formatter(log_format_string))
self.loggerObject.addHandler(string_handler)
self.transport_logger.addHandler(string_handler)
# Finally, send all log messages to a log file.
from os.path import expanduser, join
self.loggerFileHandler = logging.FileHandler(join(expanduser("~"), '.MASSIVE_Launcher_debug_log.txt'))
self.loggerFileHandler.setLevel(logging.DEBUG)
self.loggerFileHandler.setFormatter(logging.Formatter(log_format_string))
self.loggerObject.addHandler(self.loggerFileHandler)
self.transport_logger.addHandler(self.loggerFileHandler)
def debug(self, message):
if threading.current_thread().name=="MainThread":
self.loggerObject.debug(message)
else:
wx.CallAfter(self.loggerObject.debug, message)
def error(self, message):
if threading.current_thread().name=="MainThread":
self.loggerObject.error(message)
else:
wx.CallAfter(self.loggerObject.error, message)
def warning(self, message):
if threading.current_thread().name=="MainThread":
self.loggerObject.warning(message)
else:
wx.CallAfter(self.loggerObject.warning, message)
def dump_log(self, launcherMainFrame, submit_log=False, jobParams = None, showFailedToOpenRemoteDesktopMessage=True):
# Commenting out logging.shutdown() for now,
# because of concerns that logging could be used
# after the call to logging.shutdown() which is
# not allowed.
# logging.shutdown()
logger.debug("Logger.dump_log: Flushing self.loggerObject.handlers[0], which is of class: " + self.loggerObject.handlers[0].__class__.__name__)
self.loggerObject.handlers[0].flush()
if launcherMainFrame==None:
logger.debug("Logger.dump_log: Bailing out early, because launcherMainFrame is None.")
return
def showSubmitDebugLogDialog():
dlg = SubmitDebugReportDialog(None,wx.ID_ANY,'MASSIVE/CVL Launcher',self.loggerOutput.getvalue(),self.globalLauncherConfig,self.globalLauncherPreferencesFilePath,showFailedToOpenRemoteDesktopMessage=showFailedToOpenRemoteDesktopMessage)
try:
try:
wx.EndBusyCursor()
stoppedBusyCursor = True
except:
stoppedBusyCursor = False
result = dlg.ShowModal()
if stoppedBusyCursor:
wx.BeginBusyCursor()
launcherMainFrame.submit_log = result == wx.ID_OK
if launcherMainFrame.submit_log:
self.name = dlg.getName()
self.email = dlg.getEmail()
self.comments = dlg.getComments()
self.pleaseContactMe = dlg.getPleaseContactMe()
finally:
dlg.Destroy()
launcherMainFrame.submitDebugLogDialogCompleted = True
launcherMainFrame.submitDebugLogDialogCompleted = False
if submit_log:
if threading.current_thread().name=="MainThread":
showSubmitDebugLogDialog()
else:
wx.CallAfter(showSubmitDebugLogDialog)
while not launcherMainFrame.submitDebugLogDialogCompleted:
time.sleep(0.1)
if submit_log and launcherMainFrame.submit_log:
self.debug('about to send debug log')
url = 'https://cvl.massive.org.au/cgi-bin/log_drop.py'
debugLog = "\n"
if jobParams is not None:
debugLog = debugLog + "Username: " + jobParams['username'] + "\n"
debugLog = debugLog + "Config: " + jobParams['configName'] + "\n"
debugLog = debugLog + "Name: " + self.name + "\n"
debugLog = debugLog + "Email: " + self.email + "\n"
debugLog = debugLog + "Contact me? "
if self.pleaseContactMe:
debugLog = debugLog + "Yes" + "\n"
else:
debugLog = debugLog + "No" + "\n"
debugLog = debugLog + "Comments:\n\n" + self.comments + "\n\n"
atLeastOneError = False
for line in self.loggerOutput.getvalue().splitlines(True):
if "ERROR" in line:
atLeastOneError = True
debugLog = debugLog + line
if atLeastOneError:
debugLog = debugLog + "\n"
debugLog = debugLog + self.loggerOutput.getvalue()
file_info = {'logfile': debugLog}
# If we are running in an installation then we have to use
# our packaged cacert.pem file:
if os.path.exists('cacert.pem'):
r = requests.post(url, files=file_info, verify='cacert.pem')
elif os.path.exists('/opt/MassiveLauncher/cacert.pem'):
r = requests.post(url, files=file_info, verify='/opt/MassiveLauncher/cacert.pem')
elif os.path.exists('c:/program files/massive launcher/cacert.pem'):
r = requests.post(url, files=file_info, verify='c:/program files/massive launcher/cacert.pem')
elif os.path.exists('c:/program files (x86)/massive launcher/cacert.pem'):
r = requests.post(url, files=file_info, verify='c:/program files (x86)/massive launcher/cacert.pem')
else:
r = requests.post(url, files=file_info)
logger = Logger("launcher")
| CVL-dev/cvl-fabric-launcher | logger/Logger.py | Python | gpl-3.0 | 7,832 |
import numpy as np
import pandas as pd
import pytest
from .const import RANDOM_SEED, TARGET_COL
N_CATEGORY = 50
N_OBS = 10000
N_CAT_FEATURE = 10
N_NUM_FEATURE = 5
@pytest.fixture(scope="module")
def generate_data():
generated = False
def _generate_data():
if not generated:
assert N_CAT_FEATURE > 1
assert N_NUM_FEATURE > 3
np.random.seed(RANDOM_SEED)
X_num = np.random.normal(size=(N_OBS, N_NUM_FEATURE))
X_cat = np.random.randint(0, N_CATEGORY, size=(N_OBS, N_CAT_FEATURE))
df = pd.DataFrame(
np.hstack((X_num, X_cat)),
columns=["num_{}".format(x) for x in range(N_NUM_FEATURE)]
+ ["cat_{}".format(x) for x in range(N_CAT_FEATURE)],
)
df[TARGET_COL] = (
1
+ X_num[:, 0] * X_num[:, 1]
- np.log1p(np.exp(X_num[:, 1] + X_num[:, 2]))
+ 10 * (X_cat[:, 0] == 0).astype(int)
+ np.random.normal(scale=0.01, size=N_OBS)
)
return df
yield _generate_data
| jeongyoonlee/Kaggler | tests/conftest.py | Python | mit | 1,121 |
#
# Copyright 2016 The BigDL Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
import pytest
import numpy as np
import pandas as pd
import random
from bigdl.chronos.data import TSDataset
from bigdl.chronos.data.utils.roll_dataset import RollDataset
def get_ts_df():
sample_num = np.random.randint(100, 200)
train_df = pd.DataFrame({"datetime": pd.date_range('1/1/2019', periods=sample_num),
"value": np.random.randn(sample_num),
"id": np.array(['00']*sample_num),
"extra feature": np.random.randn(sample_num)})
return train_df
def get_multi_id_ts_df():
sample_num = 100
train_df = pd.DataFrame({"value": np.random.randn(sample_num),
"id": np.array(['00']*50 + ['01']*50),
"extra feature": np.random.randn(sample_num)})
train_df["datetime"] = pd.date_range('1/1/2019', periods=sample_num)
train_df.loc[50:100, "datetime"] = pd.date_range('1/1/2019', periods=50)
return train_df
class TestRollDataset:
@staticmethod
def assert_equal_with_tsdataset(df,
horizon,
lookback,
feature_num=1,
):
# get results rolled by tsdata.roll
extra_feature_col = None if feature_num == 0 else ["extra feature"]
tsdata = TSDataset.from_pandas(df, dt_col="datetime", target_col="value",
extra_feature_col=extra_feature_col, id_col="id")
tsdata.roll(lookback=lookback, horizon=horizon)
x, y = tsdata.to_numpy()
# get results rolled by RollDataset
roll_dataset = RollDataset(df=df,
lookback=lookback,
horizon=horizon,
feature_col=tsdata.feature_col,
target_col=tsdata.target_col,
id_col=tsdata.id_col)
assert len(roll_dataset) == len(x)
for i in range(len(x)):
if horizon != 0:
# for train and y is not None.
xi, yi = x[i], y[i]
roll_dataset_xi, roll_dataset_yi = roll_dataset[i]
np.testing.assert_array_almost_equal(xi, roll_dataset_xi.detach().numpy())
np.testing.assert_array_almost_equal(yi, roll_dataset_yi.detach().numpy())
else:
# for test, y is None.
xi = x[i]
roll_dataset_xi = roll_dataset[i]
np.testing.assert_array_almost_equal(xi, roll_dataset_xi.detach().numpy())
@staticmethod
def combination_tests_for_df(df):
lookback = random.randint(1, 20)
horizon_tests = [
random.randint(1, 10), # train & horizon is int
[1, 4, 16], # train & horizon is list of ints
0, # test
]
# todo: add tests for multiple targets and feature_num > 1
feature_num_tests = [0, 1]
for horizon in horizon_tests:
for feature_num in feature_num_tests:
TestRollDataset.assert_equal_with_tsdataset(df=df,
horizon=horizon,
lookback=lookback,
feature_num=feature_num)
def test_single_id(self):
df = get_ts_df()
TestRollDataset.combination_tests_for_df(df)
def test_multi_id(self):
df = get_multi_id_ts_df()
TestRollDataset.combination_tests_for_df(df)
def test_df_nan(self):
df = get_ts_df()
df["value"][0] = np.nan
with pytest.raises(AssertionError):
RollDataset(df=df,
lookback=2,
horizon=1,
feature_col=["extra feature"],
target_col=["value"],
id_col="id")
| intel-analytics/BigDL | python/chronos/test/bigdl/chronos/data/utils/test_roll_dataset.py | Python | apache-2.0 | 4,626 |
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import numpy as np
import pandas
from sklearn import metrics
import tensorflow as tf
from tensorflow.contrib.layers.python.layers import encoders
from tensorflow.python.ops import nn
import setting
learn = tf.contrib.learn
BOW_EMBEDING_DIM = 80 #50
#activation_fn = nn.relu #tf.nn.relu
#activation_fn = nn.sigmoid
#activation_fn = nn.tanh
ACTIVATIONS = {
"relu" : nn.relu,
"sigmod" : nn.sigmoid,
"tanh" : nn.tanh
}
activation_fn = ACTIVATIONS[setting.activation_fn]
def bag_of_words_model(features, target):
"""A bag-of-words model. Note it disregards the word order in the text."""
target = tf.one_hot(target, 15, 1, 0)
features = encoders.bow_encoder(
features, vocab_size=setting.n_words, embed_dim=BOW_EMBEDING_DIM)
logits = tf.contrib.layers.fully_connected(features, 15, activation_fn) #=None)
loss = tf.contrib.losses.softmax_cross_entropy(logits, target)
#loss = tf.losses.softmax_cross_entropy(logits, target)
train_op = tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
optimizer='Adam',
learning_rate=0.01)
return ({
'class': tf.argmax(logits, 1),
'prob': tf.nn.softmax(logits)
}, loss, train_op)
def emb_bag_of_words_model(features, target):
"""A bag-of-words model. Note it disregards the word order in the text."""
target = tf.one_hot(target, 15, 1, 0)
## features = encoders.bow_encoder(
## features, vocab_size=setting.n_words, embed_dim=BOW_EMBEDING_DIM)
logits = tf.contrib.layers.fully_connected(features, 15, activation_fn) #=None)
loss = tf.contrib.losses.softmax_cross_entropy(logits, target)
#loss = tf.losses.softmax_cross_entropy(logits, target)
train_op = tf.contrib.layers.optimize_loss(
loss,
tf.contrib.framework.get_global_step(),
optimizer='Adam',
learning_rate=0.01)
return ({
'class': tf.argmax(logits, 1),
'prob': tf.nn.softmax(logits)
}, loss, train_op)
# test
if __name__ == '__main__':
with tf.Session() as sess:
docs = [[0, 1], [2, 3]]
enc = encoders.bow_encoder(docs, 4, 3)
sess.run(tf.global_variables_initializer())
#self.assertAllEqual([2, 3], enc.eval().shape)
print(enc.eval())
| sunmont/textclassifier | bow.py | Python | apache-2.0 | 2,357 |
#!/usr/bin/env python
# -*- coding: latin-1; py-indent-offset:4 -*-
################################################################################
#
# This file is part of Bfplusplus
#
# Bfplusplus is a graphical interface to the Betfair Betting Exchange
# Copyright (C) 2010 Daniel Rodriguez (aka Daniel Rodriksson)
# Copyright (C) 2011 Sensible Odds Ltd.
#
# You can learn more and contact the author at:
#
# http://code.google.com/p/bfplusplus/
#
# Bfplusplus is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# Bfplusplus is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with Bfplusplus. If not, see <http://www.gnu.org/licenses/>.
#
################################################################################
from bfobj import MarketTuple
from util import Message
if True:
def init(self):
showPnL = self.config.ReadBool('Show Profit and Loss', True)
self.config.WriteBool('Show Profit and Loss', showPnL)
self.m_checkBoxShowProfitAndLoss.SetValue(showPnL)
def GetMarketProfitAndLoss(self, marketTuple):
if self.thGetMarketPrices:
if marketTuple.marketId:
marketComp = self.marketCache[marketTuple]
marketComp.profitAndLossDirty()
# Range markets do not support "profit and loss"
if marketComp.marketType in ('R', 'L'):
marketTuple = MarketTuple(0, 0)
message = Message(marketTuple=marketTuple)
self.thGetMarketProfitAndLoss.passMessage(message)
def OnGetMarketProfitAndLoss(self, event):
message, response, exception = self.SplitEventResponse(event)
if not response:
if exception:
self.LogMessages(exception)
return
marketComp = self.marketCache[message.marketTuple]
compPerc = float(self.m_sliderCompensate.GetValue())/100.0
ticksAway = 0 if not self.compTicksAway else self.compTicksAwayCount
marketComp.updateMarketProfitAndLoss(response.annotations, compPerc, self.compIndex,
ticksAway=ticksAway)
if not self.saveCount:
if self.saveCountPNL:
self.saveCountPNL -= 1
else:
self.GetMarketProfitAndLoss(MarketTuple(0, 0))
self.UpdateMarketProfitAndLoss(message.marketTuple)
def OnCheckBoxShowProfitAndLoss(self, event):
showPnL = self.m_checkBoxShowProfitAndLoss.GetValue()
self.config.WriteBool('Show Profit and Loss', showPnL)
if self.thGetMarketPrices:
self.UpdateMarketProfitAndLoss(self.marketTuple, doShowPnL=showPnL)
def UpdateMarketProfitAndLoss(self, marketTuple, doShowPnL=False, whatIf=False):
if marketTuple is None:
return
if marketTuple != self.marketTuple:
# skip delayed prices from threads
return
marketComp = self.marketCache[marketTuple]
multiWinner = (marketComp.numberOfWinners != 1)
# Prescan the selectionIds in Asian Markets to see how much the
# first selection id repeats itself
if marketComp.marketType == 'A':
if marketComp.runners:
selId = marketComp.runners[0].selectionId
selIdCount = 0
for runner in marketComp.runners:
if runner.selectionId == selId:
selIdCount += 1
showPNL = self.m_checkBoxShowProfitAndLoss.GetValue()
seenAsianAnn = 0
for row, runner in enumerate(marketComp.runners):
pnlStr = ''
if showPNL:
if not doShowPnL and not whatIf and \
not marketComp.annotationsDirty[runner.selectionId]:
continue
try:
annotations = marketComp.annotations[runner.selectionId]
except KeyError:
continue
numAnn = len(annotations)
if marketComp.marketType == 'A':
if selIdCount == 1:
if seenAsianAnn < numAnn and annotations:
seenAsianAnn = numAnn
for annotation in annotations:
pnlStr += '\n%.2f' % annotation.ifWin
pnlStr += ' (%s/%s)' % (getattr(annotation, 'from'), annotation.to)
elif seenAsianAnn < numAnn:
if annotations:
annotation = annotations[seenAsianAnn]
seenAsianAnn += 1
pnlStr += '\n%.2f' % annotation.ifWin
pnlStr += ' (%s/%s)' % (getattr(annotation, 'from'), annotation.to)
elif annotations:
annotation = annotations[0]
pnlStr += '\n%.2f' % annotation.ifWin
if multiWinner:
pnlStr += '%s%.2f' % (self.annSep['MW'], annotation.ifLoss)
if self.curCompensation and not self.curCompensation.size < self.minStakes.minimumStake:
if multiWinner and self.curCompensation.selectionId != runner.selectionId:
pass
else:
pnlStr += '\n'
ifWin = annotation.ifWin
ifWin += self.curCompensation.getIfWinForSelectionId(runner.selectionId)
pnlStr += '(%.2f' % ifWin
if multiWinner:
ifLoss = annotation.ifLoss
ifLoss += self.curCompensation.ifLoss
pnlStr += '%s%.2f' % (self.annSep['MW'], ifLoss)
pnlStr += ')'
label = marketComp.getRunnerLabel(runner)
fullLabel = '%s%s' % (label, pnlStr)
self.m_gridMarket.SetRowLabelValue(row, fullLabel)
# Fill the "Balance column" with the best/worst compensations for each runner
for row, runner in enumerate(marketComp.runners):
runnerComps = marketComp.getCompensations(runner)
if not runnerComps:
self.m_gridMarket.SetCellValue(row, self.colGridBalance, '')
continue
label = ''
i = 0
for comp in runnerComps:
if i > 3:
break
if not comp.size < self.minStakes.minimumStake:
label += u'%.2f/%.2f\n' % (comp.compWin, comp.compLoss)
i += 1
self.m_gridMarket.SetCellValue(row, self.colGridBalance, label.rstrip())
| mementum/bfplusplus | bfplusplus/guimods/mainframe/profitandloss.py | Python | gpl-3.0 | 7,203 |
# -*- coding: utf-8 -*-
import numpy as np
import pytest
import pandas as pd
import pandas.util.testing as tm
from pandas.core.arrays import (DatetimeArrayMixin,
TimedeltaArrayMixin,
PeriodArray)
# TODO: more freq variants
@pytest.fixture(params=['D', 'B', 'W', 'M', 'Q', 'Y'])
def period_index(request):
"""
A fixture to provide PeriodIndex objects with different frequencies.
Most PeriodArray behavior is already tested in PeriodIndex tests,
so here we just test that the PeriodArray behavior matches
the PeriodIndex behavior.
"""
freqstr = request.param
# TODO: non-monotone indexes; NaTs, different start dates
pi = pd.period_range(start=pd.Timestamp('2000-01-01'),
periods=100,
freq=freqstr)
return pi
@pytest.fixture(params=['D', 'B', 'W', 'M', 'Q', 'Y'])
def datetime_index(request):
"""
A fixture to provide DatetimeIndex objects with different frequencies.
Most DatetimeArray behavior is already tested in DatetimeIndex tests,
so here we just test that the DatetimeArray behavior matches
the DatetimeIndex behavior.
"""
freqstr = request.param
# TODO: non-monotone indexes; NaTs, different start dates, timezones
pi = pd.date_range(start=pd.Timestamp('2000-01-01'),
periods=100,
freq=freqstr)
return pi
@pytest.fixture
def timedelta_index(request):
"""
A fixture to provide TimedeltaIndex objects with different frequencies.
Most TimedeltaArray behavior is already tested in TimedeltaIndex tests,
so here we just test that the TimedeltaArray behavior matches
the TimedeltaIndex behavior.
"""
# TODO: flesh this out
return pd.TimedeltaIndex(['1 Day', '3 Hours', 'NaT'])
class TestDatetimeArray(object):
def test_from_dti(self, tz_naive_fixture):
tz = tz_naive_fixture
dti = pd.date_range('2016-01-01', periods=3, tz=tz)
arr = DatetimeArrayMixin(dti)
assert list(dti) == list(arr)
# Check that Index.__new__ knows what to do with DatetimeArray
dti2 = pd.Index(arr)
assert isinstance(dti2, pd.DatetimeIndex)
assert list(dti2) == list(arr)
def test_astype_object(self, tz_naive_fixture):
tz = tz_naive_fixture
dti = pd.date_range('2016-01-01', periods=3, tz=tz)
arr = DatetimeArrayMixin(dti)
asobj = arr.astype('O')
assert isinstance(asobj, np.ndarray)
assert asobj.dtype == 'O'
assert list(asobj) == list(dti)
@pytest.mark.parametrize('freqstr', ['D', 'B', 'W', 'M', 'Q', 'Y'])
def test_to_perioddelta(self, datetime_index, freqstr):
# GH#23113
dti = datetime_index
arr = DatetimeArrayMixin(dti)
expected = dti.to_perioddelta(freq=freqstr)
result = arr.to_perioddelta(freq=freqstr)
assert isinstance(result, TimedeltaArrayMixin)
# placeholder until these become actual EA subclasses and we can use
# an EA-specific tm.assert_ function
tm.assert_index_equal(pd.Index(result), pd.Index(expected))
@pytest.mark.parametrize('freqstr', ['D', 'B', 'W', 'M', 'Q', 'Y'])
def test_to_period(self, datetime_index, freqstr):
dti = datetime_index
arr = DatetimeArrayMixin(dti)
expected = dti.to_period(freq=freqstr)
result = arr.to_period(freq=freqstr)
assert isinstance(result, PeriodArray)
# placeholder until these become actual EA subclasses and we can use
# an EA-specific tm.assert_ function
tm.assert_index_equal(pd.Index(result), pd.Index(expected))
@pytest.mark.parametrize('propname', pd.DatetimeIndex._bool_ops)
def test_bool_properties(self, datetime_index, propname):
# in this case _bool_ops is just `is_leap_year`
dti = datetime_index
arr = DatetimeArrayMixin(dti)
assert dti.freq == arr.freq
result = getattr(arr, propname)
expected = np.array(getattr(dti, propname), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize('propname', pd.DatetimeIndex._field_ops)
def test_int_properties(self, datetime_index, propname):
dti = datetime_index
arr = DatetimeArrayMixin(dti)
result = getattr(arr, propname)
expected = np.array(getattr(dti, propname), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
class TestTimedeltaArray(object):
def test_from_tdi(self):
tdi = pd.TimedeltaIndex(['1 Day', '3 Hours'])
arr = TimedeltaArrayMixin(tdi)
assert list(arr) == list(tdi)
# Check that Index.__new__ knows what to do with TimedeltaArray
tdi2 = pd.Index(arr)
assert isinstance(tdi2, pd.TimedeltaIndex)
assert list(tdi2) == list(arr)
def test_astype_object(self):
tdi = pd.TimedeltaIndex(['1 Day', '3 Hours'])
arr = TimedeltaArrayMixin(tdi)
asobj = arr.astype('O')
assert isinstance(asobj, np.ndarray)
assert asobj.dtype == 'O'
assert list(asobj) == list(tdi)
def test_to_pytimedelta(self, timedelta_index):
tdi = timedelta_index
arr = TimedeltaArrayMixin(tdi)
expected = tdi.to_pytimedelta()
result = arr.to_pytimedelta()
tm.assert_numpy_array_equal(result, expected)
def test_total_seconds(self, timedelta_index):
tdi = timedelta_index
arr = TimedeltaArrayMixin(tdi)
expected = tdi.total_seconds()
result = arr.total_seconds()
tm.assert_numpy_array_equal(result, expected.values)
@pytest.mark.parametrize('propname', pd.TimedeltaIndex._field_ops)
def test_int_properties(self, timedelta_index, propname):
tdi = timedelta_index
arr = TimedeltaArrayMixin(tdi)
result = getattr(arr, propname)
expected = np.array(getattr(tdi, propname), dtype=result.dtype)
tm.assert_numpy_array_equal(result, expected)
class TestPeriodArray(object):
def test_from_pi(self, period_index):
pi = period_index
arr = PeriodArray(pi)
assert list(arr) == list(pi)
# Check that Index.__new__ knows what to do with PeriodArray
pi2 = pd.Index(arr)
assert isinstance(pi2, pd.PeriodIndex)
assert list(pi2) == list(arr)
def test_astype_object(self, period_index):
pi = period_index
arr = PeriodArray(pi)
asobj = arr.astype('O')
assert isinstance(asobj, np.ndarray)
assert asobj.dtype == 'O'
assert list(asobj) == list(pi)
@pytest.mark.parametrize('how', ['S', 'E'])
def test_to_timestamp(self, how, period_index):
pi = period_index
arr = PeriodArray(pi)
expected = DatetimeArrayMixin(pi.to_timestamp(how=how))
result = arr.to_timestamp(how=how)
assert isinstance(result, DatetimeArrayMixin)
# placeholder until these become actual EA subclasses and we can use
# an EA-specific tm.assert_ function
tm.assert_index_equal(pd.Index(result), pd.Index(expected))
@pytest.mark.parametrize('propname', PeriodArray._bool_ops)
def test_bool_properties(self, period_index, propname):
# in this case _bool_ops is just `is_leap_year`
pi = period_index
arr = PeriodArray(pi)
result = getattr(arr, propname)
expected = np.array(getattr(pi, propname))
tm.assert_numpy_array_equal(result, expected)
@pytest.mark.parametrize('propname', PeriodArray._field_ops)
def test_int_properties(self, period_index, propname):
pi = period_index
arr = PeriodArray(pi)
result = getattr(arr, propname)
expected = np.array(getattr(pi, propname))
tm.assert_numpy_array_equal(result, expected)
| amolkahat/pandas | pandas/tests/arrays/test_datetimelike.py | Python | bsd-3-clause | 7,938 |
import sys
import urllib
import json
import requests
class BingApiResult():
def __init__(self, json_result):
self.response = json.loads(json_result)['SearchResponse']['Web']
self.total = self.response['Total']
def find_libraries(self):
libraries = []
return self.response['Results']
def eof(self):
try:
self.find_libraries()
return False
except KeyError as e:
return True
def bing_search(search_terms, base_url, offset):
base_url = base_url + "&Query=" + urllib.quote_plus(search_terms)
url = base_url + "&Web.Count=50&Web.Offset=" + str(offset)
print str(offset) + " " + url
r = requests.get(url)
search_result = BingApiResult(r.content)
if search_result.eof():
print "EOF"
return []
libraries = search_result.find_libraries()
print 'Total results ' + str(search_result.total)
print 'Current results ' + str(len(libraries))
return libraries
def main():
# for example, to search Bing for Horizon libraries: "inanchor:ipac20 account"
search_terms = "inanchor:ipac20 -site:si.edu"
if len(sys.argv) > 1:
search_terms = sys.argv[1]
base_url = "http://api.bing.net/json.aspx?AppId=91650C54158D791BE8B89E229B2190C53C83ABE8&Sources=Web&Version=2.0&Market=en-us&Adult=Moderate&Web.Options=DisableQueryAlterations"
offset = 0
libraries = []
new_libraries = bing_search(search_terms, base_url, offset)
while len(new_libraries) != 0:
libraries.extend(new_libraries)
offset += len(new_libraries)
new_libraries = bing_search(search_terms, base_url, offset)
for library in libraries:
print library['Title'] + ',' + library['Url']
if __name__ == '__main__':
main() | bluestemscott/librarygadget | librarygadget/librarybot/findlibraries/bing_finder.py | Python | mit | 1,859 |
#!/usr/bin/env python
# #
# Copyright 2009-2015 Ghent University
#
# This file is part of EasyBuild,
# originally created by the HPC team of Ghent University (http://ugent.be/hpc/en),
# with support of Ghent University (http://ugent.be/hpc),
# the Flemish Supercomputer Centre (VSC) (https://vscentrum.be/nl/en),
# the Hercules foundation (http://www.herculesstichting.be/in_English)
# and the Department of Economy, Science and Innovation (EWI) (http://www.ewi-vlaanderen.be/en).
#
# http://github.com/hpcugent/easybuild
#
# EasyBuild is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation v2.
#
# EasyBuild is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with EasyBuild. If not, see <http://www.gnu.org/licenses/>.
# #
"""
Main entry point for EasyBuild: build software from .eb input file
@author: Stijn De Weirdt (Ghent University)
@author: Dries Verdegem (Ghent University)
@author: Kenneth Hoste (Ghent University)
@author: Pieter De Baets (Ghent University)
@author: Jens Timmerman (Ghent University)
@author: Toon Willems (Ghent University)
@author: Ward Poelmans (Ghent University)
@author: Fotis Georgatos (Uni.Lu, NTUA)
"""
import copy
import os
import stat
import sys
import tempfile
import traceback
# IMPORTANT this has to be the first easybuild import as it customises the logging
# expect missing log output when this not the case!
from easybuild.tools.build_log import EasyBuildError, init_logging, print_msg, print_error, stop_logging
import easybuild.tools.config as config
import easybuild.tools.options as eboptions
from easybuild.framework.easyblock import EasyBlock, build_and_install_one
from easybuild.framework.easyconfig import EASYCONFIGS_PKG_SUBDIR
from easybuild.framework.easyconfig.tools import alt_easyconfig_paths, dep_graph, det_easyconfig_paths
from easybuild.framework.easyconfig.tools import get_paths_for, parse_easyconfigs, review_pr, skip_available
from easybuild.framework.easyconfig.tweak import obtain_ec_for, tweak
from easybuild.tools.config import get_repository, get_repositorypath
from easybuild.tools.filetools import adjust_permissions, cleanup, write_file
from easybuild.tools.options import process_software_build_specs
from easybuild.tools.robot import det_robot_path, dry_run, resolve_dependencies, search_easyconfigs
from easybuild.tools.package.utilities import check_pkg_support
from easybuild.tools.parallelbuild import submit_jobs
from easybuild.tools.repository.repository import init_repository
from easybuild.tools.testing import create_test_report, overall_test_report, regtest, session_module_list, session_state
from easybuild.tools.version import this_is_easybuild
_log = None
def log_start(eb_command_line, eb_tmpdir):
"""Log startup info."""
_log.info(this_is_easybuild())
# log used command line
_log.info("Command line: %s" % (' '.join(eb_command_line)))
_log.info("Using %s as temporary directory" % eb_tmpdir)
def find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=False):
"""Find easyconfigs by build specifications."""
generated, ec_file = obtain_ec_for(build_specs, robot_path, None)
if generated:
if try_to_generate:
print_msg("Generated an easyconfig file %s, going to use it now..." % ec_file, silent=testing)
else:
# (try to) cleanup
try:
os.remove(ec_file)
except OSError, err:
_log.warning("Failed to remove generated easyconfig file %s: %s" % (ec_file, err))
# don't use a generated easyconfig unless generation was requested (using a --try-X option)
raise EasyBuildError("Unable to find an easyconfig for the given specifications: %s; "
"to make EasyBuild try to generate a matching easyconfig, "
"use the --try-X options ", build_specs)
return [(ec_file, generated)]
def build_and_install_software(ecs, init_session_state, exit_on_failure=True):
"""Build and install software for all provided parsed easyconfig files."""
# obtain a copy of the starting environment so each build can start afresh
# we shouldn't use the environment from init_session_state, since relevant env vars might have been set since
# e.g. via easyconfig.handle_allowed_system_deps
init_env = copy.deepcopy(os.environ)
res = []
for ec in ecs:
ec_res = {}
try:
(ec_res['success'], app_log, err) = build_and_install_one(ec, init_env)
ec_res['log_file'] = app_log
if not ec_res['success']:
ec_res['err'] = EasyBuildError(err)
except Exception, err:
# purposely catch all exceptions
ec_res['success'] = False
ec_res['err'] = err
ec_res['traceback'] = traceback.format_exc()
# keep track of success/total count
if ec_res['success']:
test_msg = "Successfully built %s" % ec['spec']
else:
test_msg = "Build of %s failed" % ec['spec']
if 'err' in ec_res:
test_msg += " (err: %s)" % ec_res['err']
# dump test report next to log file
test_report_txt = create_test_report(test_msg, [(ec, ec_res)], init_session_state)
if 'log_file' in ec_res:
test_report_fp = "%s_test_report.md" % '.'.join(ec_res['log_file'].split('.')[:-1])
parent_dir = os.path.dirname(test_report_fp)
# parent dir for test report may not be writable at this time, e.g. when --read-only-installdir is used
if os.stat(parent_dir).st_mode & 0200:
write_file(test_report_fp, test_report_txt)
else:
adjust_permissions(parent_dir, stat.S_IWUSR, add=True, recursive=False)
write_file(test_report_fp, test_report_txt)
adjust_permissions(parent_dir, stat.S_IWUSR, add=False, recursive=False)
if not ec_res['success'] and exit_on_failure:
if 'traceback' in ec_res:
raise EasyBuildError(ec_res['traceback'])
else:
raise EasyBuildError(test_msg)
res.append((ec, ec_res))
return res
def main(args=None, logfile=None, do_build=None, testing=False):
"""
Main function: parse command line options, and act accordingly.
@param args: command line arguments to use
@param logfile: log file to use
@param do_build: whether or not to actually perform the build
@param testing: enable testing mode
"""
# purposely session state very early, to avoid modules loaded by EasyBuild meddling in
init_session_state = session_state()
# initialise options
eb_go = eboptions.parse_options(args=args)
options = eb_go.options
orig_paths = eb_go.args
# set umask (as early as possible)
if options.umask is not None:
new_umask = int(options.umask, 8)
old_umask = os.umask(new_umask)
# set by option parsers via set_tmpdir
eb_tmpdir = tempfile.gettempdir()
# initialise logging for main
global _log
_log, logfile = init_logging(logfile, logtostdout=options.logtostdout, testing=testing)
# disallow running EasyBuild as root
if os.getuid() == 0:
raise EasyBuildError("You seem to be running EasyBuild with root privileges which is not wise, "
"so let's end this here.")
# log startup info
eb_cmd_line = eb_go.generate_cmd_line() + eb_go.args
log_start(eb_cmd_line, eb_tmpdir)
if options.umask is not None:
_log.info("umask set to '%s' (used to be '%s')" % (oct(new_umask), oct(old_umask)))
# process software build specifications (if any), i.e.
# software name/version, toolchain name/version, extra patches, ...
(try_to_generate, build_specs) = process_software_build_specs(options)
# determine robot path
# --try-X, --dep-graph, --search use robot path for searching, so enable it with path of installed easyconfigs
tweaked_ecs = try_to_generate and build_specs
tweaked_ecs_path, pr_path = alt_easyconfig_paths(eb_tmpdir, tweaked_ecs=tweaked_ecs, from_pr=options.from_pr)
auto_robot = try_to_generate or options.dep_graph or options.search or options.search_short
robot_path = det_robot_path(options.robot_paths, tweaked_ecs_path, pr_path, auto_robot=auto_robot)
_log.debug("Full robot path: %s" % robot_path)
# configure & initialize build options
config_options_dict = eb_go.get_options_by_section('config')
build_options = {
'build_specs': build_specs,
'command_line': eb_cmd_line,
'pr_path': pr_path,
'robot_path': robot_path,
'silent': testing,
'try_to_generate': try_to_generate,
'valid_stops': [x[0] for x in EasyBlock.get_steps()],
}
# initialise the EasyBuild configuration & build options
config.init(options, config_options_dict)
config.init_build_options(build_options=build_options, cmdline_options=options)
# check whether packaging is supported when it's being used
if options.package:
check_pkg_support()
else:
_log.debug("Packaging not enabled, so not checking for packaging support.")
# update session state
eb_config = eb_go.generate_cmd_line(add_default=True)
modlist = session_module_list(testing=testing) # build options must be initialized first before 'module list' works
init_session_state.update({'easybuild_configuration': eb_config})
init_session_state.update({'module_list': modlist})
_log.debug("Initial session state: %s" % init_session_state)
# review specified PR
if options.review_pr:
print review_pr(options.review_pr, colored=options.color)
# search for easyconfigs, if a query is specified
query = options.search or options.search_short
if query:
search_easyconfigs(query, short=not options.search)
# determine easybuild-easyconfigs package install path
easyconfigs_pkg_paths = get_paths_for(subdir=EASYCONFIGS_PKG_SUBDIR)
if not easyconfigs_pkg_paths:
_log.warning("Failed to determine install path for easybuild-easyconfigs package.")
# command line options that do not require any easyconfigs to be specified
no_ec_opts = [options.aggregate_regtest, options.review_pr, options.search, options.search_short, options.regtest]
# determine paths to easyconfigs
paths = det_easyconfig_paths(orig_paths)
if paths:
# transform paths into tuples, use 'False' to indicate the corresponding easyconfig files were not generated
paths = [(p, False) for p in paths]
else:
if 'name' in build_specs:
# try to obtain or generate an easyconfig file via build specifications if a software name is provided
paths = find_easyconfigs_by_specs(build_specs, robot_path, try_to_generate, testing=testing)
elif not any(no_ec_opts):
print_error(("Please provide one or multiple easyconfig files, or use software build "
"options to make EasyBuild search for easyconfigs"),
log=_log, opt_parser=eb_go.parser, exit_on_error=not testing)
_log.debug("Paths: %s" % paths)
# run regtest
if options.regtest or options.aggregate_regtest:
_log.info("Running regression test")
# fallback: easybuild-easyconfigs install path
regtest_ok = regtest([path[0] for path in paths] or easyconfigs_pkg_paths)
if not regtest_ok:
_log.info("Regression test failed (partially)!")
sys.exit(31) # exit -> 3x1t -> 31
# read easyconfig files
easyconfigs, generated_ecs = parse_easyconfigs(paths)
# tweak obtained easyconfig files, if requested
# don't try and tweak anything if easyconfigs were generated, since building a full dep graph will fail
# if easyconfig files for the dependencies are not available
if try_to_generate and build_specs and not generated_ecs:
easyconfigs = tweak(easyconfigs, build_specs, targetdir=tweaked_ecs_path)
# dry_run: print all easyconfigs and dependencies, and whether they are already built
if options.dry_run or options.dry_run_short:
txt = dry_run(easyconfigs, short=not options.dry_run, build_specs=build_specs)
print_msg(txt, log=_log, silent=testing, prefix=False)
# cleanup and exit after dry run, searching easyconfigs or submitting regression test
if any(no_ec_opts + [options.dry_run, options.dry_run_short]):
cleanup(logfile, eb_tmpdir, testing)
sys.exit(0)
# skip modules that are already installed unless forced
if not options.force:
retained_ecs = skip_available(easyconfigs)
if not testing:
for skipped_ec in [ec for ec in easyconfigs if ec not in retained_ecs]:
print_msg("%s is already installed (module found), skipping" % skipped_ec['full_mod_name'])
easyconfigs = retained_ecs
# determine an order that will allow all specs in the set to build
if len(easyconfigs) > 0:
if options.robot:
print_msg("resolving dependencies ...", log=_log, silent=testing)
ordered_ecs = resolve_dependencies(easyconfigs, build_specs=build_specs)
else:
ordered_ecs = easyconfigs
else:
print_msg("No easyconfigs left to be built.", log=_log, silent=testing)
ordered_ecs = []
# create dependency graph and exit
if options.dep_graph:
_log.info("Creating dependency graph %s" % options.dep_graph)
dep_graph(options.dep_graph, ordered_ecs)
sys.exit(0)
# submit build as job(s), clean up and exit
if options.job:
submit_jobs(ordered_ecs, eb_go.generate_cmd_line(), testing=testing)
if not testing:
print_msg("Submitted parallel build jobs, exiting now")
cleanup(logfile, eb_tmpdir, testing)
sys.exit(0)
# build software, will exit when errors occurs (except when testing)
exit_on_failure = not options.dump_test_report and not options.upload_test_report
if not testing or (testing and do_build):
ecs_with_res = build_and_install_software(ordered_ecs, init_session_state, exit_on_failure=exit_on_failure)
else:
ecs_with_res = [(ec, {}) for ec in ordered_ecs]
correct_builds_cnt = len([ec_res for (_, ec_res) in ecs_with_res if ec_res.get('success', False)])
overall_success = correct_builds_cnt == len(ordered_ecs)
success_msg = "Build succeeded for %s out of %s" % (correct_builds_cnt, len(ordered_ecs))
repo = init_repository(get_repository(), get_repositorypath())
repo.cleanup()
# dump/upload overall test report
test_report_msg = overall_test_report(ecs_with_res, len(paths), overall_success, success_msg, init_session_state)
if test_report_msg is not None:
print_msg(test_report_msg)
print_msg(success_msg, log=_log, silent=testing)
# cleanup and spec files
for ec in easyconfigs:
if 'original_spec' in ec and os.path.isfile(ec['spec']):
os.remove(ec['spec'])
# stop logging and cleanup tmp log file, unless one build failed (individual logs are located in eb_tmpdir)
stop_logging(logfile, logtostdout=options.logtostdout)
if overall_success:
cleanup(logfile, eb_tmpdir, testing)
if __name__ == "__main__":
try:
main()
except EasyBuildError, e:
print_error(e.msg)
| torbjoernk/easybuild-framework | easybuild/main.py | Python | gpl-2.0 | 15,854 |
# -*- coding: utf-8 -*-
__params__ = {'la': 33, 'lb': 33, 'dima': 10, 'dimb': 10}
def protocol(client, server, params):
la = params['la']
lb = params['lb']
client.a = Signed(bitlen=la).input(src=driver, desc="a")
server.b = Signed(bitlen=lb).input(src=driver, desc="b")
client.ga = Garbled(val=client.a)
client.cc = Signed(val=client.ga)
client.gb <<= Garbled(val=server.b)
client.sc = Signed(val=client.gb)
server.ga <<= client.ga
client.cc2 <<= Signed(val=server.ga)
server.gb = Garbled(val=server.b)
client.sc2 <<= Signed(val=server.gb)
client.sc.output(dest=driver, desc="sc")
client.cc.output(dest=driver, desc="cc")
client.sc2.output(dest=driver, desc="sc2")
client.cc2.output(dest=driver, desc="cc2")
| tastyproject/tasty | tasty/tests/functional/protocols/conversions/signed-garbled-signed-client/protocol.py | Python | gpl-3.0 | 781 |
#!/usr/bin/env python
import vtk
from vtk.test import Testing
from vtk.util.misc import vtkGetDataRoot
VTK_DATA_ROOT = vtkGetDataRoot()
ren1 = vtk.vtkRenderer()
renWin = vtk.vtkRenderWindow()
renWin.AddRenderer(ren1)
iren = vtk.vtkRenderWindowInteractor()
iren.SetRenderWindow(renWin)
# read data
#
reader = vtk.vtkGenericEnSightReader()
# Make sure all algorithms use the composite data pipeline
cdp = vtk.vtkCompositeDataPipeline()
reader.SetDefaultExecutivePrototype(cdp)
reader.SetCaseFileName("" + str(VTK_DATA_ROOT) + "/Data/EnSight/office_bin.case")
reader.Update()
outline = vtk.vtkStructuredGridOutlineFilter()
# outline SetInputConnection [reader GetOutputPort]
outline.SetInputData(reader.GetOutput().GetBlock(0))
mapOutline = vtk.vtkPolyDataMapper()
mapOutline.SetInputConnection(outline.GetOutputPort())
outlineActor = vtk.vtkActor()
outlineActor.SetMapper(mapOutline)
outlineActor.GetProperty().SetColor(0,0,0)
# Create source for streamtubes
streamer = vtk.vtkStreamTracer()
# streamer SetInputConnection [reader GetOutputPort]
streamer.SetInputData(reader.GetOutput().GetBlock(0))
streamer.SetStartPosition(0.1,2.1,0.5)
streamer.SetMaximumPropagation(500)
streamer.SetInitialIntegrationStep(0.1)
streamer.SetIntegrationDirectionToForward()
cone = vtk.vtkConeSource()
cone.SetResolution(8)
cones = vtk.vtkGlyph3D()
cones.SetInputConnection(streamer.GetOutputPort())
cones.SetSourceConnection(cone.GetOutputPort())
cones.SetScaleFactor(3)
cones.SetInputArrayToProcess(1, 0, 0, vtk.vtkDataObject.FIELD_ASSOCIATION_POINTS, "vectors")
cones.SetScaleModeToScaleByVector()
mapCones = vtk.vtkPolyDataMapper()
mapCones.SetInputConnection(cones.GetOutputPort())
# eval mapCones SetScalarRange [[reader GetOutput] GetScalarRange]
mapCones.SetScalarRange(reader.GetOutput().GetBlock(0).GetScalarRange())
conesActor = vtk.vtkActor()
conesActor.SetMapper(mapCones)
ren1.AddActor(outlineActor)
ren1.AddActor(conesActor)
ren1.SetBackground(0.4,0.4,0.5)
renWin.SetSize(300,300)
iren.Initialize()
# interact with data
reader.SetDefaultExecutivePrototype(None)
# --- end of script --
| hlzz/dotfiles | graphics/VTK-7.0.0/IO/EnSight/Testing/Python/EnSightOfficeBin.py | Python | bsd-3-clause | 2,148 |
from .fft_tools import zoom
import numpy as np
import matplotlib.pyplot as pl
def iterative_zoom(image, mindiff=1., zoomshape=[10,10],
return_zoomed=False, zoomstep=2, verbose=False,
minmax=np.min, ploteach=False, return_center=True):
"""
Iteratively zoom in on the *minimum* position in an image until the
delta-peak value is below `mindiff`
Parameters
----------
image : np.ndarray
Two-dimensional image with a *minimum* to zoom in on (or maximum, if
specified using `minmax`)
mindiff : float
Minimum difference that must be present in image before zooming is done
zoomshape : [int,int]
Shape of the "mini" image to create. Smaller is faster, but a bit less
accurate. [10,10] seems to work well in preliminary tests (though unit
tests have not been written)
return_zoomed : bool
Return the zoomed image in addition to the measured offset?
zoomstep : int
Amount to increase the zoom factor by on each iteration. Probably best to
stick with small integers (2-5ish).
verbose : bool
Print out information about zoom factor, offset at each iteration
minmax : np.min or np.max
Can zoom in on the minimum or maximum of the image
ploteach : bool
Primarily a debug tool, and to be used with extreme caution! Will open
a new figure at each iteration showing the next zoom level.
return_center : bool
Return the center position in original image coordinates? If False,
will retern the *offset from center* instead (but beware the
conventions associated with the concept of 'center' for even images).
Returns
-------
The y,x offsets (following numpy convention) of the center position of the
original image. If `return_zoomed`, returns (zoomed_image, zoom_factor,
offsets) because you can't interpret the zoomed image without the zoom
factor.
"""
image_zoom = image
argminmax = np.argmin if "min" in minmax.__name__ else np.argmax
zf = 1. # "zoom factor" initialized to 1 for the base shift measurement
offset = np.array([0]*image.ndim,dtype='float') # center offset
delta_image = (image_zoom - minmax(image_zoom))
xaxzoom = np.indices(image.shape)
if ploteach:
ii = 1
pl.figure(ii)
pl.clf()
pl.pcolor(np.arange(image.shape[0]+1)-0.5,np.arange(image.shape[1]+1)-0.5, image)
minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape)
pl.plot(minpos[1],minpos[0],'wx')
# check to make sure the smallest *nonzero* difference > mindiff
while np.abs(delta_image[np.abs(delta_image)>0]).min() > mindiff:
minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape)
center = xaxzoom[0][minpos],xaxzoom[1][minpos]
offset = xaxzoom[0][minpos]-(image.shape[0]-1)/2,xaxzoom[1][minpos]-(image.shape[1]-1)/2
zf *= zoomstep
xaxzoom, image_zoom = zoom.zoom_on_pixel(image, center, usfac=zf,
outshape=zoomshape, return_xouts=True)
delta_image = image_zoom-minmax(image_zoom)
# base case: in case you can't do any better...
# (at this point, you're all the way zoomed)
if np.all(delta_image == 0):
if verbose:
print("Can't zoom any further. zf=%i" % zf)
break
if verbose:
print(("Zoom factor %6i, center = %30s, offset=%30s, minpos=%30s, min|diff|=%15g" %
(zf, ",".join(["%15g" % c for c in center]),
",".join(["%15g" % c for c in offset]),
",".join(["%5i" % c for c in minpos]),
np.abs(delta_image[np.abs(delta_image)>0]).min()
)))
if ploteach:
ii += 1
pl.figure(ii)
pl.clf()
pl.pcolor(centers_to_edges(xaxzoom[1][0,:]),centers_to_edges(xaxzoom[0][:,0]),image_zoom)
pl.contour(xaxzoom[1],xaxzoom[0],image_zoom-image_zoom.min(),levels=[1,5,15],cmap=pl.cm.gray)
pl.plot(center[1],center[0],'wx')
minpos = np.unravel_index(argminmax(image_zoom), image_zoom.shape)
pl.plot(xaxzoom[1][minpos],
xaxzoom[0][minpos],
'w+')
pl.arrow(center[1],center[0],xaxzoom[1][minpos]-center[1],xaxzoom[0][minpos]-center[0],color='w',
head_width=0.1/zf, linewidth=1./zf, length_includes_head=True)
pl.figure(1)
#pl.contour(xaxzoom[1],xaxzoom[0],image_zoom-image_zoom.min(),levels=[1,5,15],cmap=pl.cm.gray)
pl.arrow(center[1],center[0],xaxzoom[1][minpos]-center[1],xaxzoom[0][minpos]-center[0],color='w',
head_width=0.1/zf, linewidth=1./zf, length_includes_head=True)
if return_center:
result = center
else:
result = offset
if return_zoomed:
return image_zoom,zf,result
else:
return result
def centers_to_edges(arr):
dx = arr[1]-arr[0]
newarr = np.linspace(arr.min()-dx/2,arr.max()+dx/2,arr.size+1)
return newarr
def iterative_zoom_1d(data, mindiff=1., zoomshape=(10,),
return_zoomed=False, zoomstep=2, verbose=False,
minmax=np.min, return_center=True):
"""
Iteratively zoom in on the *minimum* position in a spectrum or timestream
until the delta-peak value is below `mindiff`
Parameters
----------
data : np.ndarray
One-dimensional array with a *minimum* (or maximum, as specified by
minmax) to zoom in on
mindiff : float
Minimum difference that must be present in image before zooming is done
zoomshape : int
Shape of the "mini" image to create. Smaller is faster, but a bit less
accurate. 10 seems to work well in preliminary tests (though unit
tests have not been written)
return_zoomed : bool
Return the zoomed image in addition to the measured offset?
zoomstep : int
Amount to increase the zoom factor by on each iteration. Probably best to
stick with small integers (2-5ish).
verbose : bool
Print out information about zoom factor, offset at each iteration
minmax : np.min or np.max
Can zoom in on the minimum or maximum of the image
return_center : bool
Return the center position in original image coordinates? If False,
will retern the *offset from center* instead (but beware the
conventions associated with the concept of 'center' for even images).
Returns
-------
The x offsets of the center position of the original spectrum. If
`return_zoomed`, returns (zoomed_image, zoom_factor, offsets) because you
can't interpret the zoomed spectrum without the zoom factor.
"""
data_zoom = data
argminmax = np.argmin if "min" in minmax.__name__ else np.argmax
zf = 1. # "zoom factor" initialized to 1 for the base shift measurement
offset = 0.
delta_data = (data_zoom - minmax(data_zoom))
xaxzoom = np.arange(data.size)
# check to make sure the smallest *nonzero* difference > mindiff
while np.abs(delta_data[np.abs(delta_data)>0]).min() > mindiff:
minpos = argminmax(data_zoom)
center = xaxzoom.squeeze()[minpos],
offset = xaxzoom.squeeze()[minpos]-(data.size-1)/2,
zf *= zoomstep
xaxzoom, data_zoom = zoom.zoom_on_pixel(data, center, usfac=zf,
outshape=zoomshape, return_xouts=True)
delta_data = data_zoom-minmax(data_zoom)
# base case: in case you can't do any better...
# (at this point, you're all the way zoomed)
if np.all(delta_data == 0):
if verbose:
print("Can't zoom any further. zf=%i" % zf)
break
if verbose:
print(("Zoom factor %6i, center = %30s, offset=%30s, minpos=%30s, mindiff=%30s" %
(zf, "%15g" % center,
"%15g" % offset,
"%15g" % minpos,
"%15g" % np.abs(delta_data[np.abs(delta_data)>0]).min(),
)))
if return_center:
result = center
else:
result = offset
if return_zoomed:
return data_zoom,zf,result
else:
return result
def centers_to_edges(arr):
dx = arr[1]-arr[0]
newarr = np.linspace(arr.min()-dx/2,arr.max()+dx/2,arr.size+1)
return newarr
| keflavich/image_registration | image_registration/iterative_zoom.py | Python | mit | 8,555 |
# -*- coding: utf-8 -*-
"""
addonpr config module
Copyright (C) 2012-2013 Team XBMC
http://www.xbmc.org
This Program is free software; you can redistribute it and/or modify
it under the terms of the GNU General Public License as published by
the Free Software Foundation; either version 2, or (at your option)
any later version.
This Program is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
GNU General Public License for more details.
You should have received a copy of the GNU General Public License
along with this program; see the file LICENSE. If not, see
<http://www.gnu.org/licenses/>.
"""
BRANCHES = ['eden', 'frodo', 'gotham']
DEPENDENCIES = {
'eden': {'xbmc.python': '2.0'
},
'frodo': {'xbmc.addon': '12.0.0',
'xbmc.core': '0.1.0',
'xbmc.gui': '4.0.0',
'xbmc.json': '6.0.0',
'xbmc.metadata': '2.1.0',
'xbmc.python': '2.1.0'
},
'gotham': {'xbmc.addon': '12.0.0',
'xbmc.core': '0.1.0',
'xbmc.gui': '5.0.1',
'xbmc.json': '6.6.0',
'xbmc.metadata': '2.1.0',
'xbmc.python': '2.14.0'
}
}
STRINGS_ID = {
'plugin': (30000, 30999),
'skin': (31000, 31999),
'script': (32000, 32999),
'all': (30000, 33999),
}
| beenje/addon-pr | addonpr/config.py | Python | gpl-2.0 | 1,507 |
# -*- coding: utf-8 -*-
# Generated by Django 1.10.4 on 2017-02-21 01:52
from __future__ import unicode_literals
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("planner", "0013_auto_20170220_2045")]
operations = [
migrations.AddField(
model_name="school",
name="audit_notes",
field=models.TextField(
blank=True,
help_text="Notes to make performing audits easier",
null=True,
),
)
]
| mblayman/lcp | conductor/planner/migrations/0014_school_audit_notes.py | Python | bsd-2-clause | 561 |
"""
Course Outline page in Studio.
"""
import datetime
from bok_choy.page_object import PageObject
from bok_choy.promise import EmptyPromise
from selenium.webdriver.support.ui import Select
from selenium.webdriver.common.keys import Keys
from ..common.utils import click_css, confirm_prompt
from .course_page import CoursePage
from .container import ContainerPage
from .utils import set_input_value_and_save, set_input_value
class CourseOutlineItem(object):
"""
A mixin class for any :class:`PageObject` shown in a course outline.
"""
BODY_SELECTOR = None
EDIT_BUTTON_SELECTOR = '.xblock-field-value-edit'
NAME_SELECTOR = '.item-title'
NAME_INPUT_SELECTOR = '.xblock-field-input'
NAME_FIELD_WRAPPER_SELECTOR = '.xblock-title .wrapper-xblock-field'
STATUS_MESSAGE_SELECTOR = '> div[class$="status"] .status-message'
CONFIGURATION_BUTTON_SELECTOR = '.action-item .configure-button'
def __repr__(self):
# CourseOutlineItem is also used as a mixin for CourseOutlinePage, which doesn't have a locator
# Check for the existence of a locator so that errors when navigating to the course outline page don't show up
# as errors in the repr method instead.
try:
return "{}(<browser>, {!r})".format(self.__class__.__name__, self.locator)
except AttributeError:
return "{}(<browser>)".format(self.__class__.__name__)
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineItem` context
"""
# If the item doesn't have a body selector or locator, then it can't be bounded
# This happens in the context of the CourseOutlinePage
if self.BODY_SELECTOR and hasattr(self, 'locator'):
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
else:
return selector
@property
def name(self):
"""
Returns the display name of this object.
"""
name_element = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).first
if name_element:
return name_element.text[0]
else:
return None
@property
def has_status_message(self):
"""
Returns True if the item has a status message, False otherwise.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).first.visible
@property
def status_message(self):
"""
Returns the status message of this item.
"""
return self.q(css=self._bounded_selector(self.STATUS_MESSAGE_SELECTOR)).text[0]
@property
def has_staff_lock_warning(self):
""" Returns True if the 'Contains staff only content' message is visible """
return self.status_message == 'Contains staff only content' if self.has_status_message else False
@property
def is_staff_only(self):
""" Returns True if the visiblity state of this item is staff only (has a black sidebar) """
return "is-staff-only" in self.q(css=self._bounded_selector(''))[0].get_attribute("class")
def edit_name(self):
"""
Puts the item's name into editable form.
"""
self.q(css=self._bounded_selector(self.EDIT_BUTTON_SELECTOR)).first.click()
def enter_name(self, new_name):
"""
Enters new_name as the item's display name.
"""
set_input_value(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
def change_name(self, new_name):
"""
Changes the container's name.
"""
self.edit_name()
set_input_value_and_save(self, self._bounded_selector(self.NAME_INPUT_SELECTOR), new_name)
self.wait_for_ajax()
def finalize_name(self):
"""
Presses ENTER, saving the value of the display name for this item.
"""
self.q(css=self._bounded_selector(self.NAME_INPUT_SELECTOR)).results[0].send_keys(Keys.ENTER)
self.wait_for_ajax()
def set_staff_lock(self, is_locked):
"""
Sets the explicit staff lock of item on the container page to is_locked.
"""
modal = self.edit()
modal.is_explicitly_locked = is_locked
modal.save()
def in_editable_form(self):
"""
Return whether this outline item's display name is in its editable form.
"""
return "is-editing" in self.q(
css=self._bounded_selector(self.NAME_FIELD_WRAPPER_SELECTOR)
)[0].get_attribute("class")
def edit(self):
self.q(css=self._bounded_selector(self.CONFIGURATION_BUTTON_SELECTOR)).first.click()
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.')
return modal
@property
def release_date(self):
element = self.q(css=self._bounded_selector(".status-release-value"))
return element.first.text[0] if element.present else None
@property
def due_date(self):
element = self.q(css=self._bounded_selector(".status-grading-date"))
return element.first.text[0] if element.present else None
@property
def policy(self):
element = self.q(css=self._bounded_selector(".status-grading-value"))
return element.first.text[0] if element.present else None
def publish(self):
"""
Publish the unit.
"""
click_css(self, self._bounded_selector('.action-publish'), require_notification=False)
modal = CourseOutlineModal(self)
EmptyPromise(lambda: modal.is_shown(), 'Modal is shown.')
modal.publish()
@property
def publish_action(self):
"""
Returns the link for publishing a unit.
"""
return self.q(css=self._bounded_selector('.action-publish')).first
class CourseOutlineContainer(CourseOutlineItem):
"""
A mixin to a CourseOutline page object that adds the ability to load
a child page object by title or by index.
CHILD_CLASS must be a :class:`CourseOutlineChild` subclass.
"""
CHILD_CLASS = None
ADD_BUTTON_SELECTOR = '> .outline-content > .add-item a.button-new'
def child(self, title, child_class=None):
"""
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
return child_class(
self.browser,
self.q(css=child_class.BODY_SELECTOR).filter(
lambda el: title in [inner.text for inner in
el.find_elements_by_css_selector(child_class.NAME_SELECTOR)]
).attrs('data-locator')[0]
)
def children(self, child_class=None):
"""
Returns all the children page objects of class child_class.
"""
if not child_class:
child_class = self.CHILD_CLASS
return self.q(css=self._bounded_selector(child_class.BODY_SELECTOR)).map(
lambda el: child_class(self.browser, el.get_attribute('data-locator'))).results
def child_at(self, index, child_class=None):
"""
Returns the child at the specified index.
:type self: object
"""
if not child_class:
child_class = self.CHILD_CLASS
return self.children(child_class)[index]
def add_child(self, require_notification=True):
"""
Adds a child to this xblock, waiting for notifications.
"""
click_css(
self,
self._bounded_selector(self.ADD_BUTTON_SELECTOR),
require_notification=require_notification,
)
def expand_subsection(self):
"""
Toggle the expansion of this subsection.
"""
self.browser.execute_script("jQuery.fx.off = true;")
def subsection_expanded():
add_button = self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).first.results
return add_button and add_button[0].is_displayed()
currently_expanded = subsection_expanded()
self.q(css=self._bounded_selector('.ui-toggle-expansion i')).first.click()
self.wait_for_element_presence(self._bounded_selector(self.ADD_BUTTON_SELECTOR), 'Subsection is expanded')
EmptyPromise(
lambda: subsection_expanded() != currently_expanded,
"Check that the container {} has been toggled".format(self.locator)
).fulfill()
self.browser.execute_script("jQuery.fx.off = false;")
return self
@property
def is_collapsed(self):
"""
Return whether this outline item is currently collapsed.
"""
return "is-collapsed" in self.q(css=self._bounded_selector('')).first.attrs("class")[0]
class CourseOutlineChild(PageObject, CourseOutlineItem):
"""
A page object that will be used as a child of :class:`CourseOutlineContainer`.
"""
url = None
BODY_SELECTOR = '.outline-item'
def __init__(self, browser, locator):
super(CourseOutlineChild, self).__init__(browser)
self.locator = locator
def is_browser_on_page(self):
return self.q(css='{}[data-locator="{}"]'.format(self.BODY_SELECTOR, self.locator)).present
def delete(self, cancel=False):
"""
Clicks the delete button, then cancels at the confirmation prompt if cancel is True.
"""
click_css(self, self._bounded_selector('.delete-button'), require_notification=False)
confirm_prompt(self, cancel)
def _bounded_selector(self, selector):
"""
Return `selector`, but limited to this particular `CourseOutlineChild` context
"""
return '{}[data-locator="{}"] {}'.format(
self.BODY_SELECTOR,
self.locator,
selector
)
@property
def name(self):
titles = self.q(css=self._bounded_selector(self.NAME_SELECTOR)).text
if titles:
return titles[0]
else:
return None
@property
def children(self):
"""
Will return any first-generation descendant items of this item.
"""
descendants = self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineChild(self.browser, el.get_attribute('data-locator'))).results
# Now remove any non-direct descendants.
grandkids = []
for descendant in descendants:
grandkids.extend(descendant.children)
grand_locators = [grandkid.locator for grandkid in grandkids]
return [descendant for descendant in descendants if descendant.locator not in grand_locators]
class CourseOutlineUnit(CourseOutlineChild):
"""
PageObject that wraps a unit link on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-unit'
NAME_SELECTOR = '.unit-title a'
def go_to(self):
"""
Open the container page linked to by this unit link, and return
an initialized :class:`.ContainerPage` for that unit.
"""
return ContainerPage(self.browser, self.locator).visit()
def is_browser_on_page(self):
return self.q(css=self.BODY_SELECTOR).present
def children(self):
return self.q(css=self._bounded_selector(self.BODY_SELECTOR)).map(
lambda el: CourseOutlineUnit(self.browser, el.get_attribute('data-locator'))).results
class CourseOutlineSubsection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a subsection block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-subsection'
NAME_SELECTOR = '.subsection-title'
NAME_FIELD_WRAPPER_SELECTOR = '.subsection-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineUnit
def unit(self, title):
"""
Return the :class:`.CourseOutlineUnit with the title `title`.
"""
return self.child(title)
def units(self):
"""
Returns the units in this subsection.
"""
return self.children()
def unit_at(self, index):
"""
Returns the CourseOutlineUnit at the specified index.
"""
return self.child_at(index)
def add_unit(self):
"""
Adds a unit to this subsection
"""
self.q(css=self._bounded_selector(self.ADD_BUTTON_SELECTOR)).click()
class CourseOutlineSection(CourseOutlineContainer, CourseOutlineChild):
"""
:class`.PageObject` that wraps a section block on the Studio Course Outline page.
"""
url = None
BODY_SELECTOR = '.outline-section'
NAME_SELECTOR = '.section-title'
NAME_FIELD_WRAPPER_SELECTOR = '.section-header .wrapper-xblock-field'
CHILD_CLASS = CourseOutlineSubsection
def subsection(self, title):
"""
Return the :class:`.CourseOutlineSubsection` with the title `title`.
"""
return self.child(title)
def subsections(self):
"""
Returns a list of the CourseOutlineSubsections of this section
"""
return self.children()
def subsection_at(self, index):
"""
Returns the CourseOutlineSubsection at the specified index.
"""
return self.child_at(index)
def add_subsection(self):
"""
Adds a subsection to this section
"""
self.add_child()
class ExpandCollapseLinkState(object):
"""
Represents the three states that the expand/collapse link can be in
"""
MISSING = 0
COLLAPSE = 1
EXPAND = 2
class CourseOutlinePage(CoursePage, CourseOutlineContainer):
"""
Course Outline page in Studio.
"""
url_path = "course"
CHILD_CLASS = CourseOutlineSection
EXPAND_COLLAPSE_CSS = '.button-toggle-expand-collapse'
BOTTOM_ADD_SECTION_BUTTON = '.outline > .add-section .button-new'
def is_browser_on_page(self):
return self.q(css='body.view-outline').present and self.q(css='div.ui-loading.is-hidden').present
def view_live(self):
"""
Clicks the "View Live" link and switches to the new tab
"""
click_css(self, '.view-live-button', require_notification=False)
self.browser.switch_to_window(self.browser.window_handles[-1])
def section(self, title):
"""
Return the :class:`.CourseOutlineSection` with the title `title`.
"""
return self.child(title)
def section_at(self, index):
"""
Returns the :class:`.CourseOutlineSection` at the specified index.
"""
return self.child_at(index)
def click_section_name(self, parent_css=''):
"""
Find and click on first section name in course outline
"""
self.q(css='{} .section-name'.format(parent_css)).first.click()
def get_section_name(self, parent_css='', page_refresh=False):
"""
Get the list of names of all sections present
"""
if page_refresh:
self.browser.refresh()
return self.q(css='{} .section-name'.format(parent_css)).text
def section_name_edit_form_present(self, parent_css=''):
"""
Check that section name edit form present
"""
return self.q(css='{} .section-name input'.format(parent_css)).present
def change_section_name(self, new_name, parent_css=''):
"""
Change section name of first section present in course outline
"""
self.click_section_name(parent_css)
self.q(css='{} .section-name input'.format(parent_css)).first.fill(new_name)
self.q(css='{} .section-name .save-button'.format(parent_css)).first.click()
self.wait_for_ajax()
def click_release_date(self):
"""
Open release date edit modal of first section in course outline
"""
self.q(css='div.section-published-date a.edit-release-date').first.click()
def sections(self):
"""
Returns the sections of this course outline page.
"""
return self.children()
def add_section_from_top_button(self):
"""
Clicks the button for adding a section which resides at the top of the screen.
"""
click_css(self, '.wrapper-mast nav.nav-actions .button-new')
def add_section_from_bottom_button(self, click_child_icon=False):
"""
Clicks the button for adding a section which resides at the bottom of the screen.
"""
element_css = self.BOTTOM_ADD_SECTION_BUTTON
if click_child_icon:
element_css += " .fa-plus"
click_css(self, element_css)
def toggle_expand_collapse(self):
"""
Toggles whether all sections are expanded or collapsed
"""
self.q(css=self.EXPAND_COLLAPSE_CSS).click()
def start_reindex(self):
"""
Starts course reindex by clicking reindex button
"""
self.reindex_button.click()
def open_exam_settings_dialog(self):
"""
clicks on the settings button of subsection.
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
def change_problem_release_date_in_studio(self):
"""
Sets a new start date
"""
self.q(css=".subsection-header-actions .configure-button").first.click()
self.q(css="#start_date").fill("01/01/2030")
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def make_exam_proctored(self):
"""
Makes a Proctored exam.
"""
self.q(css="#id_proctored_exam").first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def make_exam_timed(self):
"""
Makes a timed exam.
"""
self.q(css="#id_timed_exam").first.click()
self.q(css=".action-save").first.click()
self.wait_for_ajax()
def select_none_exam(self):
"""
Choose "none" exam but do not press enter
"""
self.q(css="#id_not_timed").first.click()
def select_timed_exam(self):
"""
Choose a timed exam but do not press enter
"""
self.q(css="#id_timed_exam").first.click()
def select_proctored_exam(self):
"""
Choose a proctored exam but do not press enter
"""
self.q(css="#id_proctored_exam").first.click()
def select_practice_exam(self):
"""
Choose a practice exam but do not press enter
"""
self.q(css="#id_practice_exam").first.click()
def time_allotted_field_visible(self):
"""
returns whether the time allotted field is visible
"""
return self.q(css="#id_time_limit_div").visible
def proctoring_items_are_displayed(self):
"""
Returns True if all the items are found.
"""
# The None radio button
if not self.q(css="#id_not_timed").present:
return False
# The Timed exam radio button
if not self.q(css="#id_timed_exam").present:
return False
# The Proctored exam radio button
if not self.q(css="#id_proctored_exam").present:
return False
# The Practice exam radio button
if not self.q(css="#id_practice_exam").present:
return False
return True
@property
def bottom_add_section_button(self):
"""
Returns the query representing the bottom add section button.
"""
return self.q(css=self.BOTTOM_ADD_SECTION_BUTTON).first
@property
def has_no_content_message(self):
"""
Returns true if a message informing the user that the course has no content is visible
"""
return self.q(css='.outline .no-content').is_present()
@property
def has_rerun_notification(self):
"""
Returns true iff the rerun notification is present on the page.
"""
return self.q(css='.wrapper-alert.is-shown').is_present()
def dismiss_rerun_notification(self):
"""
Clicks the dismiss button in the rerun notification.
"""
self.q(css='.dismiss-button').click()
@property
def expand_collapse_link_state(self):
"""
Returns the current state of the expand/collapse link
"""
link = self.q(css=self.EXPAND_COLLAPSE_CSS)[0]
if not link.is_displayed():
return ExpandCollapseLinkState.MISSING
elif "collapse-all" in link.get_attribute("class"):
return ExpandCollapseLinkState.COLLAPSE
else:
return ExpandCollapseLinkState.EXPAND
@property
def reindex_button(self):
"""
Returns reindex button.
"""
return self.q(css=".button.button-reindex")[0]
def expand_all_subsections(self):
"""
Expands all the subsections in this course.
"""
for section in self.sections():
if section.is_collapsed:
section.expand_subsection()
for subsection in section.subsections():
if subsection.is_collapsed:
subsection.expand_subsection()
@property
def xblocks(self):
"""
Return a list of xblocks loaded on the outline page.
"""
return self.children(CourseOutlineChild)
@property
def license(self):
"""
Returns the course license text, if present. Else returns None.
"""
return self.q(css=".license-value").first.text[0]
@property
def deprecated_warning_visible(self):
"""
Returns true if the deprecated warning is visible.
"""
return self.q(css='.wrapper-alert-error.is-shown').is_present()
@property
def warning_heading_text(self):
"""
Returns deprecated warning heading text.
"""
return self.q(css='.warning-heading-text').text[0]
@property
def components_list_heading(self):
"""
Returns deprecated warning component list heading text.
"""
return self.q(css='.components-list-heading-text').text[0]
@property
def modules_remove_text_shown(self):
"""
Returns True if deprecated warning advance modules remove text is visible.
"""
return self.q(css='.advance-modules-remove-text').visible
@property
def modules_remove_text(self):
"""
Returns deprecated warning advance modules remove text.
"""
return self.q(css='.advance-modules-remove-text').text[0]
@property
def components_visible(self):
"""
Returns True if components list visible.
"""
return self.q(css='.components-list').visible
@property
def components_display_names(self):
"""
Returns deprecated warning components display name list.
"""
return self.q(css='.components-list li>a').text
@property
def deprecated_advance_modules(self):
"""
Returns deprecated advance modules list.
"""
return self.q(css='.advance-modules-list li').text
class CourseOutlineModal(object):
MODAL_SELECTOR = ".wrapper-modal-window"
def __init__(self, page):
self.page = page
def _bounded_selector(self, selector):
"""
Returns `selector`, but limited to this particular `CourseOutlineModal` context.
"""
return " ".join([self.MODAL_SELECTOR, selector])
def is_shown(self):
return self.page.q(css=self.MODAL_SELECTOR).present
def find_css(self, selector):
return self.page.q(css=self._bounded_selector(selector))
def click(self, selector, index=0):
self.find_css(selector).nth(index).click()
def save(self):
self.click(".action-save")
self.page.wait_for_ajax()
def publish(self):
self.click(".action-publish")
self.page.wait_for_ajax()
def cancel(self):
self.click(".action-cancel")
def has_release_date(self):
return self.find_css("#start_date").present
def has_due_date(self):
return self.find_css("#due_date").present
def has_policy(self):
return self.find_css("#grading_type").present
def set_date(self, property_name, input_selector, date):
"""
Set `date` value to input pointed by `selector` and `property_name`.
"""
month, day, year = map(int, date.split('/'))
self.click(input_selector)
if getattr(self, property_name):
current_month, current_year = map(int, getattr(self, property_name).split('/')[1:])
else: # Use default timepicker values, which are current month and year.
current_month, current_year = datetime.datetime.today().month, datetime.datetime.today().year
date_diff = 12 * (year - current_year) + month - current_month
selector = "a.ui-datepicker-{}".format('next' if date_diff > 0 else 'prev')
for i in xrange(abs(date_diff)):
self.page.q(css=selector).click()
self.page.q(css="a.ui-state-default").nth(day - 1).click() # set day
self.page.wait_for_element_invisibility("#ui-datepicker-div", "datepicker should be closed")
EmptyPromise(
lambda: getattr(self, property_name) == u'{m}/{d}/{y}'.format(m=month, d=day, y=year),
"{} is updated in modal.".format(property_name)
).fulfill()
@property
def release_date(self):
return self.find_css("#start_date").first.attrs('value')[0]
@release_date.setter
def release_date(self, date):
"""
Date is "mm/dd/yyyy" string.
"""
self.set_date('release_date', "#start_date", date)
@property
def due_date(self):
return self.find_css("#due_date").first.attrs('value')[0]
@due_date.setter
def due_date(self, date):
"""
Date is "mm/dd/yyyy" string.
"""
self.set_date('due_date', "#due_date", date)
@property
def policy(self):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
return self.get_selected_option_text(element)
@policy.setter
def policy(self, grading_label):
"""
Select the grading format with `value` in the drop-down list.
"""
element = self.find_css('#grading_type')[0]
select = Select(element)
select.select_by_visible_text(grading_label)
EmptyPromise(
lambda: self.policy == grading_label,
"Grading label is updated.",
).fulfill()
@property
def is_explicitly_locked(self):
"""
Returns true if the explict staff lock checkbox is checked, false otherwise.
"""
return self.find_css('#staff_lock')[0].is_selected()
@is_explicitly_locked.setter
def is_explicitly_locked(self, value):
"""
Checks the explicit staff lock box if value is true, otherwise unchecks the box.
"""
if value != self.is_explicitly_locked:
self.find_css('label[for="staff_lock"]').click()
EmptyPromise(lambda: value == self.is_explicitly_locked, "Explicit staff lock is updated").fulfill()
def shows_staff_lock_warning(self):
"""
Returns true iff the staff lock warning is visible.
"""
return self.find_css('.staff-lock .tip-warning').visible
def get_selected_option_text(self, element):
"""
Returns the text of the first selected option for the element.
"""
if element:
select = Select(element)
return select.first_selected_option.text
else:
return None
| iivic/BoiseStateX | common/test/acceptance/pages/studio/overview.py | Python | agpl-3.0 | 27,908 |
"""This is a backport of shutil.get_terminal_size from Python 3.3.
The original implementation is in C, but here we use the ctypes and
fcntl modules to create a pure Python version of os.get_terminal_size.
"""
import os
import struct
import sys
from collections import namedtuple
__all__ = ["get_terminal_size"]
terminal_size = namedtuple("terminal_size", "columns lines")
try:
from ctypes import windll, create_string_buffer
_handles = {
0: windll.kernel32.GetStdHandle(-10),
1: windll.kernel32.GetStdHandle(-11),
2: windll.kernel32.GetStdHandle(-12),
}
def _get_terminal_size(fd):
columns = lines = 0
try:
handle = _handles[fd]
csbi = create_string_buffer(22)
res = windll.kernel32.GetConsoleScreenBufferInfo(handle, csbi)
if res:
res = struct.unpack("hhhhHhhhhhh", csbi.raw)
left, top, right, bottom = res[5:9]
columns = right - left + 1
lines = bottom - top + 1
except Exception:
pass
return columns, lines
except ImportError:
import fcntl
import termios
def _get_terminal_size(fd):
try:
res = fcntl.ioctl(fd, termios.TIOCGWINSZ, b"\x00" * 4)
lines, columns = struct.unpack("hh", res)
except Exception:
columns = lines = 0
return columns, lines
def get_terminal_size(fallback=(80, 24)):
"""Get the size of the terminal window.
For each of the two dimensions, the environment variable, COLUMNS
and LINES respectively, is checked. If the variable is defined and
the value is a positive integer, it is used.
When COLUMNS or LINES is not defined, which is the common case,
the terminal connected to sys.__stdout__ is queried
by invoking os.get_terminal_size.
If the terminal size cannot be successfully queried, either because
the system doesn't support querying, or because we are not
connected to a terminal, the value given in fallback parameter
is used. Fallback defaults to (80, 24) which is the default
size used by many terminal emulators.
The value returned is a named tuple of type os.terminal_size.
"""
# Attempt to use the environment first
try:
columns = int(os.environ["COLUMNS"])
except (KeyError, ValueError):
columns = 0
try:
lines = int(os.environ["LINES"])
except (KeyError, ValueError):
lines = 0
# Only query if necessary
if columns <= 0 or lines <= 0:
try:
columns, lines = _get_terminal_size(sys.__stdout__.fileno())
except (NameError, OSError):
pass
# Use fallback as last resort
if columns <= 0 and lines <= 0:
columns, lines = fallback
return terminal_size(columns, lines)
| vicky2135/lucious | oscar/lib/python2.7/site-packages/shutil_backports/get_terminal_size.py | Python | bsd-3-clause | 2,867 |
"""
Creating standalone Django apps is a PITA because you're not in a project, so
you don't have a settings.py file. I can never remember to define
DJANGO_SETTINGS_MODULE, so I run these commands which get the right env
automatically.
"""
import functools
import os
from fabric.api import local, cd, env
from fabric.contrib.project import rsync_project
NAME = os.path.basename(os.path.dirname(__file__))
ROOT = os.path.abspath(os.path.dirname(__file__))
os.environ['PYTHONPATH'] = os.pathsep.join([ROOT,
os.path.join(ROOT, 'examples')])
env.hosts = ['jbalogh.me']
local = functools.partial(local, capture=False)
def doc(kind='html'):
with cd('docs'):
local('make clean %s' % kind)
SETTINGS = ('locmem_settings',
'settings',
'memcache_byid',
'custom_backend')
try:
import redis
redis.Redis(host='localhost', port=6379).info()
SETTINGS += ('redis_settings', 'redis_byid')
except Exception:
print 'WARNING: Skipping redis tests.'
def test():
for settings in SETTINGS:
print settings
os.environ['DJANGO_SETTINGS_MODULE'] = 'cache_machine.%s' % settings
local('django-admin.py test')
def updoc():
doc('dirhtml')
rsync_project('p/%s' % NAME, 'docs/_build/dirhtml/', delete=True)
| Perkville/django-cache-machine | fabfile.py | Python | bsd-3-clause | 1,331 |
# This file is part of cldoc. cldoc is free software: you can
# redistribute it and/or modify it under the terms of the GNU General Public
# License as published by the Free Software Foundation, version 2.
#
# This program is distributed in the hope that it will be useful, but WITHOUT
# ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS
# FOR A PARTICULAR PURPOSE. See the GNU General Public License for more
# details.
#
# You should have received a copy of the GNU General Public License along with
# this program; if not, write to the Free Software Foundation, Inc., 51
# Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA.
import copy
class Struct(object):
def __init__(self, **kwargs):
for key in kwargs:
setattr(self, key, kwargs[key])
@staticmethod
def define(_name, **kwargs):
defaults = kwargs
class subclass(Struct):
def __init__(self, **kwargs):
defs = copy.deepcopy(defaults)
for key in kwargs:
if not key in defs:
raise AttributeError("'{0}' has no attribute '{1}'".format(_name, key))
else:
defs[key] = kwargs[key]
super(subclass, self).__init__(**defs)
subclass.__name__ = _name
return subclass
| kostyll/cldoc | cldoc/struct.py | Python | gpl-2.0 | 1,364 |
from django.contrib import admin
from django.forms import ModelForm
from django.contrib.auth.admin import UserAdmin as BaseUserAdmin
from django.contrib.auth.models import User
from django.contrib.flatpages.models import FlatPage
from django.contrib.flatpages.admin import FlatPageAdmin
from pagedown.widgets import PagedownWidget
from modeltranslation.admin import TranslationAdmin
from models import Application, Component, Asset, Profile, License
import assets.translation
class ComponentInline(admin.StackedInline):
model = Component
class ComponentAdmin(TranslationAdmin):
pass
class ProfileInline(admin.StackedInline):
model = Profile
can_delete = False
class UserAdmin(BaseUserAdmin):
inlines = (ProfileInline, )
class ApplicationForm(ModelForm):
class Meta:
model = Application
fields = ['slug', 'title', 'url', 'notes', 'logo']
widgets = {'notes': PagedownWidget()}
class ApplicationAdmin(TranslationAdmin):
form = ApplicationForm
#inlines = [ComponentInline]
class LicenseForm(ModelForm):
class Meta:
model = License
fields = ['slug', 'title', 'notes', 'text']
widgets = {'notes': PagedownWidget(), 'text': PagedownWidget()}
class LicenseAdmin(admin.ModelAdmin):
form = LicenseForm
class TranslatedFlatPageAdmin(FlatPageAdmin, TranslationAdmin):
def formfield_for_dbfield(self, db_field, **kwargs):
field = super(TranslatedFlatPageAdmin, self).formfield_for_dbfield(db_field, **kwargs)
self.patch_translation_field(db_field, field, **kwargs)
return field
admin.site.unregister(User)
admin.site.register(User, UserAdmin)
admin.site.register(Application, ApplicationAdmin)
admin.site.register(Component, ComponentAdmin)
admin.site.register(Asset)
admin.site.register(License, LicenseAdmin)
admin.site.unregister(FlatPage)
admin.site.register(FlatPage, TranslatedFlatPageAdmin)
| portnov/assethub | assethub/assets/admin.py | Python | bsd-3-clause | 1,916 |
# Copyright 2016 the original author or authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
__author__ = "Julian Debatin"
__copyright__ = "The authors"
__license__ = "Apache 2"
__email__ = "[email protected]"
__status__ = "Production"
from ModuroModel.Sd.SdCdbPcdiInUa import SdCdbPcdiInUa
class SdCdbPcdiInDa(SdCdbPcdiInUa):
def __init__(self, sim, simthread):
SdCdbPcdiInUa.__init__(self, sim, simthread)
def _initModel(self):
self.name = "SdCdbPcdiInDa"
self.adhFactor = 0.25
self.cellTypes = self._createCellTypes()
self.energyMatrix = self._createEnergyMatrix()
self._run() # Must be the last statement.
def _createEnergyMatrix(self):
energyMatrix = [[0, 14, 14, 14, 14, 4],
[0, -1, 1, 3, 12, 12],
[0, 0, 6, 4, 8, 14],
[0, 0, 0, 5, 8, 12],
[0, 0, 0, 0, 6, 4],
[0, 0, 0, 0, 0, 2]]
return energyMatrix | informatik-mannheim/Moduro-CC3D | Simulation/ModuroModel/Sd/SdCdbPcdiInDa.py | Python | apache-2.0 | 1,535 |
# coding: utf-8
#!/usr/bin/python2.4
#
# Small script to show PostgreSQL and Pyscopg together
#
# https://wiki.postgresql.org/wiki/Psycopg2_Tutorial
#
# Notes:
# с транзакциями не так то просто. http://initd.org/psycopg/docs/connection.html#connection.autocommit
# http://stackoverflow.com/questions/5402805/error-when-creating-a-postgresql-database-using-python-sqlalchemy-and-psycopg2
# http://stackoverflow.com/questions/3413646/postgres-raises-a-active-sql-transaction-errcode-25001
# http://www.devx.com/opensource/Article/29071
import psycopg2
# PostgreSQL/C++/VS08/Win7/x64 - looks very hard
# http://qt-project.org/doc/qt-5/qsqldatabase.html
# http://qt-project.org/doc/qt-5/sql-driver.html#supported-databases
#
# http://www.youtube.com/watch?v=fBgJ9Azm_S0 - похоже нужен вариант с MinGW - WTF!? а остальные библиотеки!
# может и для студии нормально соберется?
# http://www.carnero.net/en/how-to-install-qt-mysql-driver-on-windows-7-x86-x64/
'''
Joel - 2008
// text
feature
task
note
// int
priority
// in hours
// FIXME: не ясно как рассчитывать - пока просто резерв
original_estimation
current_estimation
elapsed
remain
'''
cur = None
conn = None
table_name = "task_entity"
schedule_elem_entity = "schedule_elem_entity"
try:
conn = psycopg2.connect("dbname='postgres' user='postgres' host='localhost' password='123'")
conn.set_session(autocommit=True)
# Clear what done
cur = conn.cursor()
cur.execute("CREATE TABLE " +
"IF NOT EXISTS " + # v9.1 >=
schedule_elem_entity +
"(" + # // сделать чтобы было >0
"ID SERIAL PRIMARY KEY NOT NULL," +
"TASK_NAME TEXT NOT NULL, " +
"PRIORITY INT NOT NULL, " +
"DONE BOOLEAN DEFAULT FALSE);")
# Queries
cur.execute("SELECT * from " + table_name)
rows = cur.fetchall()
print "\nRows: \n"
for row in rows:
print " ", row[1]
# Remove
#conn.set_isolation_level(0) # Old
#drop_cur = conn.cursor()
cur.execute("DROP TABLE " + schedule_elem_entity) # не видит таблицу
#conn.set_isolation_level(1)
except psycopg2.OperationalError, e:
print "I am unable to connect to the database", e
except psycopg2.InternalError, e:
print "I can't drop our test database!", e
# except psycopg2.ProgrammingError, e:
# База была создана
# print e
finally:
if cur:
cur.close()
if conn:
conn.close()
| zaqwes8811/coordinator-tasks | projects/python-desk/python_pq_bridge/driver_test.py | Python | apache-2.0 | 2,680 |
import formencode
__author__ = 'Chirag Mistry (Tessella)'
class UpdateUserForm(formencode.Schema):
allow_extra_fields = True
filter_extra_fields = False
first_name = formencode.validators.String(not_empty=True, max=50)
last_name = formencode.validators.String(not_empty=True, max=50)
email = formencode.validators.Email(not_empty=True, max=255)
is_admin = formencode.validators.Bool()
class CreateUserForm(UpdateUserForm):
"""Used to validate data from the Create New User page"""
allow_extra_fields = False
filter_extra_fields = True
user_name = formencode.validators.String(not_empty=True) | NERC-CEH/ecomaps | ecomaps/model/create_new_user_form.py | Python | gpl-2.0 | 639 |
# -*- coding: utf-8 -*-
#
# RERO ILS
# Copyright (C) 2021 RERO
# Copyright (C) 2020 UCLouvain
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, version 3 of the License.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
"""Click command-line utilities."""
from __future__ import absolute_import, print_function
import click
from .fixtures import fixtures
from .index import index
from .users import users
from .utils import utils
from ..apiharvester.cli import apiharvester
from ..contributions.cli import contribution
from ..ebooks.cli import oaiharvester
from ..monitoring.cli import monitoring
from ..notifications.cli import notifications
from ..stats.cli import stats
from ...schedulers import scheduler
@click.group()
def reroils():
"""Reroils management commands."""
reroils.add_command(apiharvester)
reroils.add_command(contribution)
reroils.add_command(fixtures)
reroils.add_command(index)
reroils.add_command(monitoring)
reroils.add_command(notifications)
reroils.add_command(oaiharvester)
reroils.add_command(scheduler)
reroils.add_command(stats)
reroils.add_command(users)
reroils.add_command(utils)
| rero/reroils-app | rero_ils/modules/cli/reroils.py | Python | gpl-2.0 | 1,605 |
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from django.core.urlresolvers import reverse # noqa
from django import http
from mox import IsA # noqa
from openstack_dashboard import api
from openstack_dashboard.test import helpers as test
class FlavorExtrasTests(test.BaseAdminViewTests):
@test.create_stubs({api.nova: ('flavor_get_extras',
'flavor_get'), })
def test_list_extras_when_none_exists(self):
flavor = self.flavors.first()
extras = [api.nova.FlavorExtraSpec(flavor.id, 'k1', 'v1')]
# GET -- to determine correctness of output
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
api.nova.flavor_get_extras(IsA(http.HttpRequest),
flavor.id).AndReturn(extras)
self.mox.ReplayAll()
url = reverse('horizon:admin:flavors:extras:index', args=[flavor.id])
resp = self.client.get(url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp, "admin/flavors/extras/index.html")
@test.create_stubs({api.nova: ('flavor_extra_set', ), })
def test_extra_create_post(self):
flavor = self.flavors.first()
create_url = reverse('horizon:admin:flavors:extras:create',
args=[flavor.id])
index_url = reverse('horizon:admin:flavors:extras:index',
args=[flavor.id])
# GET to display the flavor_name
api.nova.flavor_extra_set(IsA(http.HttpRequest),
flavor.id,
{'k1': 'v1'})
self.mox.ReplayAll()
data = {'flavor_id': flavor.id,
'key': 'k1',
'value': 'v1'}
resp = self.client.post(create_url, data)
self.assertNoFormErrors(resp)
self.assertMessageCount(success=1)
self.assertRedirectsNoFollow(resp, index_url)
@test.create_stubs({api.nova: ('flavor_get', ), })
def test_extra_create_get(self):
flavor = self.flavors.first()
create_url = reverse('horizon:admin:flavors:extras:create',
args=[flavor.id])
api.nova.flavor_get(IsA(http.HttpRequest), flavor.id).AndReturn(flavor)
self.mox.ReplayAll()
resp = self.client.get(create_url)
self.assertEqual(resp.status_code, 200)
self.assertTemplateUsed(resp,
'admin/flavors/extras/create.html')
| ikargis/horizon_fod | openstack_dashboard/dashboards/admin/flavors/extras/tests.py | Python | apache-2.0 | 3,001 |
from io import BytesIO
import os
import zipfile
from django import forms
from django.contrib import messages
from django.db.models import Q
from django.http import HttpResponse, HttpResponseRedirect
from django.shortcuts import redirect
from django.utils.translation import ugettext_lazy, ugettext as _
from towel import modelview
from photos.models import Photo, determine_cover_photo
class PhotoUploadForm(forms.ModelForm):
title = forms.CharField(
max_length=100, label=ugettext_lazy('title'), required=False)
class Meta:
fields = ('photo', 'title')
model = Photo
class ModelView(modelview.ModelView):
"""
ModelView subclass holding behavior specific to the photos app.
"""
def editing_allowed(self, request, instance):
if request.user.is_staff or request.user == instance.created_by:
return True
return False
def deletion_allowed(self, request, instance):
if request.user.is_staff:
return True
if request.user == instance.created_by:
return self.deletion_allowed_if_only(
request, instance, [self.model])
return False
def get_form(self, request, instance=None, **kwargs):
return super(ModelView, self).get_form(
request, instance=instance, exclude=('created_by',))
def save_model(self, request, instance, form, change):
if not change:
instance.created_by = request.user
instance.save()
class AlbumModelView(ModelView):
def get_query_set(self, request, *args, **kwargs):
return super(AlbumModelView, self).get_query_set(
request, *args, **kwargs).transform(determine_cover_photo)
def additional_urls(self):
return [
(r'^%(detail)s/zip/$', self.crud_view_decorator(self.zip)),
]
def detail_view(self, request, *args, **kwargs):
instance = self.get_object_or_404(request, *args, **kwargs)
if request.method == 'POST':
form = PhotoUploadForm(request.POST, request.FILES)
if form.is_valid():
photo = form.save(commit=False)
photo.album = instance
photo.created_by = request.user
if not photo.title:
photo.title = photo.photo.name
photo.save()
messages.success(
request, _('The photo has been successfully uploaded.'))
return HttpResponseRedirect('.')
else:
form = PhotoUploadForm()
return self.render_detail(request, {
self.template_object_name: instance,
'editing_allowed': self.editing_allowed(request, instance),
'form': form,
})
def zip(self, request, *args, **kwargs):
instance = self.get_object_or_404(request, *args, **kwargs)
buf = BytesIO()
zf = zipfile.ZipFile(buf, 'w', zipfile.ZIP_DEFLATED)
for photo in instance.photos.all():
zf.writestr(os.path.basename(photo.photo.name), photo.photo.read())
zf.close()
buf.flush()
response = HttpResponse(buf.getvalue())
buf.close()
response['Content-Disposition'] = 'attachment; filename=album.zip'
return response
class PhotoModelView(ModelView):
def get_query_set(self, request, *args, **kwargs):
if request.user.is_staff:
return self.model.objects.all()
return self.model.objects.filter(
Q(is_flagged=False)
| Q(created_by=request.user))
def detail_view(self, request, *args, **kwargs):
instance = self.get_object_or_404(request, *args, **kwargs)
return redirect(instance.album)
| matthiask/feincms-photos | photos/views.py | Python | bsd-3-clause | 3,759 |
# -*- coding: utf-8 -*-
# Generated from the Telepathy spec
"""Copyright © 2008-2009 Collabora Ltd.
Copyright © 2008-2009 Nokia Corporation
This library is free software; you can redistribute it and/or
modify it under the terms of the GNU Lesser General Public
License as published by the Free Software Foundation; either
version 2.1 of the License, or (at your option) any later version.
This library is distributed in the hope that it will be useful,
but WITHOUT ANY WARRANTY; without even the implied warranty of
MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
Lesser General Public License for more details.
You should have received a copy of the GNU Lesser General Public
License along with this library; if not, write to the Free Software
Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301,
USA.
"""
import dbus.service
class ChannelDispatcher(dbus.service.Object):
"""\
The channel dispatcher is responsible for responding to new
channels and launching client processes to handle them. It also
provides functionality for client processes to request that new
channels are created.
If a channel dispatcher is running, it is responsible for dispatching
new channels on all
Connections
created by the
AccountManager.
Connections not created by the AccountManager are outside the scope
of the channel dispatcher.
Connections created by standalone Telepathy clients
that do not intend to interact with the channel dispatcher
should be ignored - otherwise, the channel dispatcher would try
to launch handlers for channels that the standalone client
was already handling internally.
The current channel dispatcher is defined to be the process that
owns the well-known bus name
org.freedesktop.Telepathy.ChannelDispatcher on
the session bus. This process MUST export an object with this
interface at the object path
/org/freedesktop/Telepathy/ChannelDispatcher.
Until a mechanism exists for making a reasonable automatic choice
of ChannelDispatcher implementation, implementations SHOULD NOT
register as an activatable service for the ChannelDispatcher's
well-known bus name. Instead, it is RECOMMENDED that some component
of the user's session will select and activate a particular
implementation, and that other Telepathy-enabled programs
can detect whether channel request/dispatch functionality is available
by checking whether the ChannelDispatcher's well-known name is in use
at runtime.
There are three categories of client process defined by this
specification:
Observer
Observers monitor the creation of new channels. This
functionality can be used for things like message logging.
All observers are notified simultaneously.
Approver
Approvers notify the user that new channels have been created,
and also select which channel handler will be used for the channel,
either by asking the user or by choosing the most appropriate
channel handler.
Handler
Each new channel or set of channels is passed to exactly one
handler as its final destination. A typical channel handler is a
user interface process handling channels of a particular type.
"""
@dbus.service.method('org.freedesktop.Telepathy.ChannelDispatcher', in_signature='oa{sv}xs', out_signature='o')
def CreateChannel(self, Account, Requested_Properties, User_Action_Time, Preferred_Handler):
"""
Start a request to create a channel. This initially just creates a
ChannelRequest
object, which can be used to continue the request and track its
success or failure.
The request can take a long time - in the worst case, the
channel dispatcher has to ask the account manager to put the
account online, the account manager has to ask the operating
system to obtain an Internet connection, and the operating
system has to ask the user whether to activate an Internet
connection using an on-demand mechanism like dialup.
This means that using a single D-Bus method call and response
to represent the whole request will tend to lead to that call
timing out, which is not the behaviour we want.
If this method is called for an Account that is disabled, invalid
or otherwise unusable, no error is signalled until
ChannelRequest.Proceed
is called, at which point
ChannelRequest.Failed
is emitted with an appropriate error.
This means there's only one code path for errors, apart from
InvalidArgument for "that request makes no sense".
It also means that the request will proceed if the account is
enabled after calling CreateChannel, but before calling
Proceed.
"""
raise NotImplementedError
@dbus.service.method('org.freedesktop.Telepathy.ChannelDispatcher', in_signature='oa{sv}xs', out_signature='o')
def EnsureChannel(self, Account, Requested_Properties, User_Action_Time, Preferred_Handler):
"""
Start a request to ensure that a channel exists, creating it if
necessary. This initially just creates a ChannelRequest
object, which can be used to continue the request and track its
success or failure.
If this method is called for an Account that is disabled, invalid
or otherwise unusable, no error is signalled until
ChannelRequest.Proceed
is called, at which point
ChannelRequest.Failed
is emitted with an appropriate error.
The rationale is as for CreateChannel.
"""
raise NotImplementedError
| epage/The-One-Ring | src/tp/_generated/Channel_Dispatcher.py | Python | lgpl-2.1 | 6,256 |
# uncompyle6 version 2.9.10
# Python bytecode 2.7 (62211)
# Decompiled from: Python 3.6.0b2 (default, Oct 11 2016, 05:27:10)
# [GCC 6.2.0 20161005]
# Embedded file name: mac_latin2.py
""" Python Character Mapping Codec generated from 'LATIN2.TXT' with gencodec.py.
Written by Marc-Andre Lemburg ([email protected]).
(c) Copyright CNRI, All Rights Reserved. NO WARRANTY.
(c) Copyright 2000 Guido van Rossum.
"""
import codecs
class Codec(codecs.Codec):
def encode(self, input, errors='strict'):
return codecs.charmap_encode(input, errors, encoding_map)
def decode(self, input, errors='strict'):
return codecs.charmap_decode(input, errors, decoding_map)
class IncrementalEncoder(codecs.IncrementalEncoder):
def encode(self, input, final=False):
return codecs.charmap_encode(input, self.errors, encoding_map)[0]
class IncrementalDecoder(codecs.IncrementalDecoder):
def decode(self, input, final=False):
return codecs.charmap_decode(input, self.errors, decoding_map)[0]
class StreamWriter(Codec, codecs.StreamWriter):
pass
class StreamReader(Codec, codecs.StreamReader):
pass
def getregentry():
return codecs.CodecInfo(name='mac-latin2', encode=Codec().encode, decode=Codec().decode, incrementalencoder=IncrementalEncoder, incrementaldecoder=IncrementalDecoder, streamreader=StreamReader, streamwriter=StreamWriter)
decoding_map = codecs.make_identity_dict(range(256))
decoding_map.update({128: 196,
129: 256,
130: 257,
131: 201,
132: 260,
133: 214,
134: 220,
135: 225,
136: 261,
137: 268,
138: 228,
139: 269,
140: 262,
141: 263,
142: 233,
143: 377,
144: 378,
145: 270,
146: 237,
147: 271,
148: 274,
149: 275,
150: 278,
151: 243,
152: 279,
153: 244,
154: 246,
155: 245,
156: 250,
157: 282,
158: 283,
159: 252,
160: 8224,
161: 176,
162: 280,
164: 167,
165: 8226,
166: 182,
167: 223,
168: 174,
170: 8482,
171: 281,
172: 168,
173: 8800,
174: 291,
175: 302,
176: 303,
177: 298,
178: 8804,
179: 8805,
180: 299,
181: 310,
182: 8706,
183: 8721,
184: 322,
185: 315,
186: 316,
187: 317,
188: 318,
189: 313,
190: 314,
191: 325,
192: 326,
193: 323,
194: 172,
195: 8730,
196: 324,
197: 327,
198: 8710,
199: 171,
200: 187,
201: 8230,
202: 160,
203: 328,
204: 336,
205: 213,
206: 337,
207: 332,
208: 8211,
209: 8212,
210: 8220,
211: 8221,
212: 8216,
213: 8217,
214: 247,
215: 9674,
216: 333,
217: 340,
218: 341,
219: 344,
220: 8249,
221: 8250,
222: 345,
223: 342,
224: 343,
225: 352,
226: 8218,
227: 8222,
228: 353,
229: 346,
230: 347,
231: 193,
232: 356,
233: 357,
234: 205,
235: 381,
236: 382,
237: 362,
238: 211,
239: 212,
240: 363,
241: 366,
242: 218,
243: 367,
244: 368,
245: 369,
246: 370,
247: 371,
248: 221,
249: 253,
250: 311,
251: 379,
252: 321,
253: 380,
254: 290,
255: 711
})
encoding_map = codecs.make_encoding_map(decoding_map) | DarthMaulware/EquationGroupLeaks | Leak #5 - Lost In Translation/windows/Resources/Python/Core/Lib/encodings/mac_latin2.py | Python | unlicense | 3,180 |
#!/usr/bin/env python
## ! DO NOT MANUALLY INVOKE THIS setup.py, USE CATKIN INSTEAD
from distutils.core import setup
from catkin_pkg.python_setup import generate_distutils_setup
# fetch values from package.xml
d = generate_distutils_setup(
packages = ['ros_qml'],
package_dir = {'': 'src'},
scripts = ['scripts/ros_qml'],
requires = ['roslib', 'rospkg', 'rospy_message_converter']
)
setup(**d)
| bgromov/ros_qml | setup.py | Python | bsd-3-clause | 414 |
from flowp.testing import Behavior, expect, only, skip
from flowp import testing
from unittest import mock
import flowp.testing.dummy
import tempfile
import os
expect_alias = expect
class Expect(Behavior):
class ToBeMethod(Behavior):
def it_should_do_true_assets(self):
expect(True).to_be(True)
expect([1]).to_be(True)
with expect.to_raise(AssertionError):
expect(False).to_be(True)
with expect.to_raise(AssertionError):
expect([]).to_be(True)
def it_should_do_false_asserts(self):
expect(False).to_be(False)
expect([]).to_be(False)
with expect.to_raise(AssertionError):
expect(True).to_be(False)
with expect.to_raise(AssertionError):
expect([1]).to_be(False)
def it_should_do_is_asserts(self):
a = object()
b = object()
expect(a).to_be(a)
with expect.to_raise(AssertionError):
expect(a).to_be(b)
class NotToBeMethod(Behavior):
def it_should_do_not_true_assets(self):
expect(False).not_to_be(True)
expect([]).not_to_be(True)
with expect.to_raise(AssertionError):
expect(True).not_to_be(True)
with expect.to_raise(AssertionError):
expect([1]).not_to_be(True)
def it_should_do_not_false_asserts(self):
expect(True).not_to_be(False)
expect([1]).not_to_be(False)
with expect.to_raise(AssertionError):
expect(False).not_to_be(False)
with expect.to_raise(AssertionError):
expect([]).not_to_be(False)
def it_should_do_is_asserts(self):
a = object()
b = object()
expect(a).not_to_be(b)
with expect.to_raise(AssertionError):
expect(a).not_to_be(a)
class ToRaiseMethod(Behavior):
def before_each(self):
class CustomException(Exception):
pass
self.CustomException = CustomException
def it_should_catch_expected_exceptions(self):
with expect.to_raise(AssertionError):
raise AssertionError()
with expect.to_raise(self.CustomException):
raise self.CustomException()
def it_should_raise_exception_if_none_exceptions_raised(self):
cought = False
try:
with expect.to_raise(AssertionError):
pass
except AssertionError:
cought = True
expect(cought).to_be(True)
def it_should_raise_exception_if_different_exception_raised(self):
cought = False
try:
with expect.to_raise(self.CustomException):
raise AssertionError()
except AssertionError:
cought = True
expect(cought).to_be(True)
def it_should_do_equality_asserts(self):
expect(1) == 1
expect(1) != 2
expect(2) < 3
expect(3) > 2
expect(2) <= 2
expect(3) >= 2
with expect.to_raise(AssertionError):
expect(1) == 2
with expect.to_raise(AssertionError):
expect(2) != 2
with expect.to_raise(AssertionError):
expect(3) < 2
with expect.to_raise(AssertionError):
expect(2) > 3
with expect.to_raise(AssertionError):
expect(3) <= 2
with expect.to_raise(AssertionError):
expect(2) >= 3
def it_should_do_instance_of_asserts(self):
class Test:
pass
obj = Test()
expect(1).to_be_instance_of(int)
expect(obj).to_be_instance_of(Test)
with expect.to_raise(AssertionError):
expect(1).to_be_instance_of(str)
with expect.to_raise(AssertionError):
expect(obj).to_be_instance_of(int)
def it_should_do_not_instance_of_asserts(self):
class Test:
pass
obj = Test()
expect(1).not_to_be_instance_of(str)
expect(obj).not_to_be_instance_of(int)
with expect.to_raise(AssertionError):
expect(1).not_to_be_instance_of(int)
with expect.to_raise(AssertionError):
expect(obj).not_to_be_instance_of(Test)
def it_should_do_in_asserts(self):
expect(1).to_be_in([1, 2, 3])
with expect.to_raise(AssertionError):
expect(4).to_be_in([1, 2, 3])
def it_should_do_not_in_asserts(self):
expect(4).not_to_be_in([1, 2, 3])
with expect.to_raise(AssertionError):
expect(1).not_to_be_in([1, 2, 3])
class MockExpectations(Behavior):
def before_each(self):
self.m = mock.Mock()
def it_should_do_called_assert(self):
self.m()
expect(self.m).to_have_been_called()
self.m.reset_mock()
with expect.to_raise(AssertionError):
expect(self.m).to_have_been_called()
def it_should_do_not_called_assert(self):
expect(self.m).not_to_have_been_called()
self.m()
with expect.to_raise(AssertionError):
expect(self.m).not_to_have_been_called()
def it_should_do_called_with_assert(self):
self.m(1, 2, 3)
expect(self.m).to_have_been_called_with(1, 2, 3)
with expect.to_raise(AssertionError):
expect(self.m).to_have_been_called_with(0, 2, 3)
def it_should_do_called_n_times_assert(self):
self.m()
self.m()
expect(self.m).to_have_been_called(2)
with expect.to_raise(AssertionError):
expect(self.m).to_have_been_called(1)
class BehaviorInstance(Behavior):
class MockMethod(Behavior):
class WhenTargetNotGiven(Behavior):
def it_creates_mocks(self):
m = self.mock()
expect(m).to_be_instance_of(mock.Mock)
def it_creates_mocks_with_attributes_specification(self):
m = self.mock(spec=['a'])
m.a
with expect.to_raise(AttributeError):
m.b
class WhenTargetGiven(Behavior):
def it_patch_concrete_places(self):
expect(flowp.testing.dummy.test_var) == 0
m = self.mock('flowp.testing.dummy.test_var')
expect(flowp.testing.dummy.test_var)\
.to_be_instance_of(mock.Mock)
expect(m).to_be(flowp.testing.dummy.test_var)
def it_patch_with_new_parameter(self):
expect(flowp.testing.dummy.test_var) == 0
self.mock('flowp.testing.dummy.test_var', new=1)
expect(flowp.testing.dummy.test_var) == 1
def it_patch_with_attributes_specification(self):
expect(flowp.testing.dummy.test_var) == 0
self.mock('flowp.testing.dummy.test_var', spec=['a'])
flowp.testing.dummy.test_var.a
with expect.to_raise(AttributeError):
flowp.testing.dummy.test_var.b
def it_raise_an_error_if_target_is_not_a_string(self):
o = object()
with expect.to_raise(TypeError):
self.mock(o)
class WhenTargetAndAttrGiven(Behavior):
def before_each(self):
class Object:
pass
self.o = Object()
self.o.a = 0
def it_patch_object_attributes(self):
m = self.mock(self.o, 'a')
expect(self.o.a).to_be_instance_of(mock.Mock)
expect(m).to_be(self.o.a)
def it_patch_with_new_parameter(self):
self.mock(self.o, 'a', new=1)
expect(self.o.a) == 1
def it_patch_with_attributes_specification(self):
expect(flowp.testing.dummy.test_var) == 0
self.mock(self.o, 'a', spec=['a'])
self.o.a
with expect.to_raise(AttributeError):
self.o.a.c
class RunMethod(Behavior):
def before_each(self):
class TestBehavior(Behavior):
executed = False
def it_is_test(self):
self.executed = True
def it_raise_exception(self):
raise AssertionError()
self.expect = expect
self.results = self.mock()
self.results.executed = 1
self.results.all = 2
self.pbehavior1 = self.mock(spec=['before_each', 'after_each'])
self.pbehavior2 = self.mock(spec=['before_each', 'after_each'])
self.behavior = TestBehavior('it_is_test', self.results)
self.behavior.parent_behaviors = (self.pbehavior1, self.pbehavior2)
self.behavior.after_each = self.mock()
def it_removes_mocks_patchers_after_each_test_method0(self):
expect(flowp.testing.dummy.test_var) == 0
expect(flowp.testing.dummy.test_obj.a) == 0
self.mock('flowp.testing.dummy.test_var', new=1)
self.mock(flowp.testing.dummy.test_obj, 'a', new=1)
expect(flowp.testing.dummy.test_var) == 1
expect(flowp.testing.dummy.test_obj.a) == 1
def it_removes_mocks_patchers_after_each_test_method(self):
expect(flowp.testing.dummy.test_var) == 0
expect(flowp.testing.dummy.test_obj.a) == 0
self.mock('flowp.testing.dummy.test_var', new=1)
self.mock(flowp.testing.dummy.test_obj, 'a', new=1)
expect(flowp.testing.dummy.test_var) == 1
expect(flowp.testing.dummy.test_obj.a) == 1
def it_should_always_call_after_each_methods(self):
self.behavior.method_name = 'it_raise_exception'
self.behavior.run()
expect(self.pbehavior1.after_each).to_have_been_called()
expect(self.pbehavior2.after_each).to_have_been_called()
expect(self.behavior.after_each).to_have_been_called()
class WhenOnlyMode(Behavior):
class AndMethodInMode(Behavior):
def it_should_execute_the_test(self):
self.behavior.__class__.it_is_test._only_mode = True
self.behavior.run(True)
expect(self.behavior.executed).to_be(True)
class AndMethodNotInMode(Behavior):
def it_should_skip_the_test(self):
self.behavior.run(True)
expect(self.behavior.executed).to_be(False)
expect(self.results.add_skipped).to_have_been_called(1)
class AndBehaviorInMode(Behavior):
def it_should_execute_the_test(self):
self.behavior._only_mode = True
self.behavior.run(True)
expect(self.behavior.executed).to_be(True)
class AndBehaviorNotInMode(Behavior):
def it_should_skip_the_test(self):
self.behavior.run(True)
expect(self.behavior.executed).to_be(False)
expect(self.results.add_skipped).to_have_been_called(1)
class AndParentBehaviorInMode(Behavior):
def it_should_execute_the_test(self):
self.pbehavior1.mock_add_spec([
'_only_mode', 'before_each', 'after_each'])
self.behavior.run(True)
expect(self.behavior.executed).to_be(True)
class WhenSkipped(Behavior):
class Method(Behavior):
def it_should_skip_the_test(self):
self.behavior.__class__.it_is_test._skipped = True
self.behavior.run(True)
expect(self.behavior.executed).to_be(False)
expect(self.results.add_skipped).to_have_been_called(1)
class Behavio(Behavior):
def it_should_skip_the_test(self):
self.behavior._skipped = True
self.behavior.run(True)
expect(self.behavior.executed).to_be(False)
expect(self.results.add_skipped).to_have_been_called(1)
class ParentBehavior(Behavior):
def it_should_skip_the_test(self):
self.pbehavior1.mock_add_spec([
'_skipped', 'before_each', 'after_each'])
self.behavior.run(True)
expect(self.behavior.executed).to_be(False)
expect(self.results.add_skipped).to_have_been_called(1)
class TemporaryDirectory(Behavior):
def before_each(self):
self.subject = testing.TemporaryDirectory()
def it_can_enter_and_exit_from_temporary_directory(self):
org_dir = os.getcwd()
self.subject.enter()
expect(os.path.samefile(org_dir, self.subject.name)).to_be(False)
expect(os.path.samefile(os.getcwd(), self.subject.name)).to_be(True)
self.subject.exit()
expect(os.path.samefile(os.getcwd(), org_dir)).to_be(True)
| localmed/flowp | spec/spec_testing.py | Python | bsd-3-clause | 13,328 |
# Licensed to the Apache Software Foundation (ASF) under one or more
# contributor license agreements. See the NOTICE file distributed with
# this work for additional information regarding copyright ownership.
# The ASF licenses this file to You under the Apache License, Version 2.0
# (the "License"); you may not use this file except in compliance with
# the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Provides generic deployment steps for machines post boot.
"""
from __future__ import with_statement
from typing import Union
from typing import Optional
from typing import List
from typing import IO
from typing import cast
import os
import re
import binascii
from libcloud.utils.py3 import basestring, PY3
from libcloud.compute.base import Node
from libcloud.compute.ssh import BaseSSHClient
class Deployment(object):
"""
Base class for deployment tasks.
"""
def run(self, node, client):
# type: (Node, BaseSSHClient) -> Node
"""
Runs this deployment task on node using the client provided.
:type node: :class:`Node`
:keyword node: Node to operate one
:type client: :class:`BaseSSHClient`
:keyword client: Connected SSH client to use.
:return: :class:`Node`
"""
raise NotImplementedError(
'run not implemented for this deployment')
def _get_string_value(self, argument_name, argument_value):
if not isinstance(argument_value, basestring) and \
not hasattr(argument_value, 'read'):
raise TypeError('%s argument must be a string or a file-like '
'object' % (argument_name))
if hasattr(argument_value, 'read'):
argument_value = argument_value.read()
return argument_value
class SSHKeyDeployment(Deployment):
"""
Installs a public SSH Key onto a server.
"""
def __init__(self, key):
# type: (Union[str, IO]) -> None
"""
:type key: ``str`` or :class:`File` object
:keyword key: Contents of the public key write or a file object which
can be read.
"""
self.key = self._get_string_value(argument_name='key',
argument_value=key)
def run(self, node, client):
# type: (Node, BaseSSHClient) -> Node
"""
Installs SSH key into ``.ssh/authorized_keys``
See also :class:`Deployment.run`
"""
client.put(".ssh/authorized_keys", contents=self.key, mode='a')
return node
def __str__(self):
return self.__repr__()
def __repr__(self):
key = self.key[:100]
return ("<SSHKeyDeployment key=%s...>" % (key))
class FileDeployment(Deployment):
"""
Installs a file on the server.
"""
def __init__(self, source, target):
# type: (str, str) -> None
"""
:type source: ``str``
:keyword source: Local path of file to be installed
:type target: ``str``
:keyword target: Path to install file on node
"""
self.source = source
self.target = target
def run(self, node, client):
# type: (Node, BaseSSHClient) -> Node
"""
Upload the file, retaining permissions.
See also :class:`Deployment.run`
"""
perms = int(oct(os.stat(self.source).st_mode)[4:], 8)
with open(self.source, 'rb') as fp:
client.putfo(path=self.target, chmod=perms,
fo=fp)
return node
def __str__(self):
return self.__repr__()
def __repr__(self):
return ("<FileDeployment source=%s, target=%s>" % (
self.source, self.target))
class ScriptDeployment(Deployment):
"""
Runs an arbitrary shell script on the server.
This step works by first writing the content of the shell script (script
argument) in a *.sh file on a remote server and then running that file.
If you are running a non-shell script, make sure to put the appropriate
shebang to the top of the script. You are also advised to do that even if
you are running a plan shell script.
"""
def __init__(self,
script, # type: str
args=None, # type: Optional[List[str]]
name=None, # type: Optional[str]
delete=False, # type bool
timeout=None # type: Optional[float]
):
# type: (...) -> None
"""
:type script: ``str``
:keyword script: Contents of the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
:param timeout: Optional run timeout for this command.
:type timeout: ``float``
"""
script = self._get_string_value(argument_name='script',
argument_value=script)
self.script = script
self.args = args or []
self.stdout = None # type: Optional[str]
self.stderr = None # type: Optional[str]
self.exit_status = None # type: Optional[int]
self.delete = delete
self.timeout = timeout
self.name = name # type: Optional[str]
if self.name is None:
# File is put under user's home directory
# (~/libcloud_deployment_<random_string>.sh)
random_string = '' # type: Union[str, bytes]
random_string = binascii.hexlify(os.urandom(4))
random_string = cast(bytes, random_string)
random_string = random_string.decode('ascii')
self.name = 'libcloud_deployment_%s.sh' % (random_string)
def run(self, node, client):
# type: (Node, BaseSSHClient) -> Node
"""
Uploads the shell script and then executes it.
See also :class:`Deployment.run`
"""
self.name = cast(str, self.name)
file_path = client.put(path=self.name, chmod=int('755', 8),
contents=self.script)
# Pre-pend cwd if user specified a relative path
if self.name and (self.name[0] not in ['/', '\\'] and
not re.match(r"^\w\:.*$", file_path)):
base_path = os.path.dirname(file_path)
name = os.path.join(base_path, self.name)
elif self.name and (self.name[0] == '\\' or
re.match(r"^\w\:.*$", file_path)):
# Absolute Windows path
name = file_path
else:
self.name = cast(str, self.name)
name = self.name
cmd = name
if self.args:
# Append arguments to the command
cmd = '%s %s' % (name, ' '.join(self.args))
else:
cmd = name
self.stdout, self.stderr, self.exit_status = \
client.run(cmd, timeout=self.timeout)
if self.delete:
client.delete(self.name)
return node
def __str__(self):
return self.__repr__()
def __repr__(self):
script = self.script[:15] + '...'
exit_status = self.exit_status
if exit_status is not None:
stdout = self.stdout[:30] + '...'
stderr = self.stderr[:30] + '...'
else:
exit_status = 'script didn\'t run yet'
stdout = None
stderr = None
return ("<ScriptDeployment script=%s, exit_status=%s, stdout=%s, "
"stderr=%s>" % (script, exit_status, stdout, stderr))
class ScriptFileDeployment(ScriptDeployment):
"""
Runs an arbitrary shell script from a local file on the server. Same as
ScriptDeployment, except that you can pass in a path to the file instead of
the script content.
"""
def __init__(self,
script_file, # type: str
args=None, # type: Optional[List[str]]
name=None, # type: Optional[str]
delete=False, # type bool
timeout=None # type: Optional[float]
):
# type: (...) -> None
"""
:type script_file: ``str``
:keyword script_file: Path to a file containing the script to run.
:type args: ``list``
:keyword args: Optional command line arguments which get passed to the
deployment script file.
:type name: ``str``
:keyword name: Name of the script to upload it as, if not specified,
a random name will be chosen.
:type delete: ``bool``
:keyword delete: Whether to delete the script on completion.
:param timeout: Optional run timeout for this command.
:type timeout: ``float``
"""
with open(script_file, 'rb') as fp:
content = fp.read() # type: Union[bytes, str]
if PY3:
content = cast(bytes, content)
content = content.decode('utf-8')
super(ScriptFileDeployment, self).__init__(script=content,
args=args,
name=name,
delete=delete,
timeout=timeout)
class MultiStepDeployment(Deployment):
"""
Runs a chain of Deployment steps.
"""
def __init__(self, add=None):
# type: (Optional[Union[Deployment, List[Deployment]]]) -> None
"""
:type add: ``list``
:keyword add: Deployment steps to add.
"""
self.steps = [] # type: list
if add:
self.add(add)
def add(self, add):
# type: (Union[Deployment, List[Deployment]]) -> None
"""
Add a deployment to this chain.
:type add: Single :class:`Deployment` or a ``list`` of
:class:`Deployment`
:keyword add: Adds this deployment to the others already in this
object.
"""
if add is not None:
add = add if isinstance(add, (list, tuple)) else [add]
self.steps.extend(add)
def run(self, node, client):
# type: (Node, BaseSSHClient) -> Node
"""
Run each deployment that has been added.
See also :class:`Deployment.run`
"""
for s in self.steps:
node = s.run(node, client)
return node
def __str__(self):
return self.__repr__()
def __repr__(self):
steps = []
for step in self.steps:
steps.append(str(step))
steps = ', '.join(steps)
return ("<MultiStepDeployment steps=[%s]>" % (steps))
| andrewsomething/libcloud | libcloud/compute/deployment.py | Python | apache-2.0 | 11,416 |
from os.path import join, dirname
dest = join(dirname(__file__), '../_static/generated')
from pystacia import lena
image = lena(128)
image.gamma(0.1)
image.write(join(dest, 'lena_gamma0.1.jpg'))
image.close()
image = lena(128)
image.gamma(0.3)
image.write(join(dest, 'lena_gamma0.3.jpg'))
image.close()
image = lena(128)
image.gamma(0.6)
image.write(join(dest, 'lena_gamma0.6.jpg'))
image.close()
image = lena(128)
image.gamma(1.5)
image.write(join(dest, 'lena_gamma1.5.jpg'))
image.close()
image = lena(128)
image.gamma(2)
image.write(join(dest, 'lena_gamma2.jpg'))
image.close()
| squeaky-pl/pystacia | doc/source/image/gamma.py | Python | mit | 588 |
import numpy as np
import npp
from itertools import combinations, chain
def make_data(N_R, N_P, P_parts, M, true_variances, noise_variance, combs, Pnoise_models,
P_models, use_mixing=True, orthogonalize=True, noise_scale=1.0, **etc):
# Generate timecourses for each partition
X_parts = [np.random.randn(p, N_R + N_P) for p in P_parts]
#print "X_parts[0].shape", X_parts[0].shape
XX = np.corrcoef(np.vstack(X_parts))
# Orthogonalize timecourses across and within partitions?
if orthogonalize:
cat_orthog_X_parts, _, _ = np.linalg.svd(np.vstack(X_parts).T, full_matrices=False)
X_parts = np.vsplit(npp.zs(cat_orthog_X_parts).T, np.cumsum(P_parts)[:-1])
XX_orthog = np.corrcoef(np.vstack(X_parts))
# Generate "true" weights used to construct Y
Btrue_parts = [np.random.randn(p, M) for p in P_parts]
#print "Btrue_parts[0].shape", Btrue_parts[0].shape
# Generate output timecourses for each partition
Y_parts = [B.T.dot(X).T for X,B in zip(X_parts, Btrue_parts)]
#print "Y_parts[0].shape", Y_parts[0].shape
# Rescale timecourses for each partition to have appropriate variance
scaled_Y_parts = [Y / Y.std(0) * np.sqrt(tv) for Y,tv in zip(Y_parts, true_variances)]
#print "scaled_Y_parts[0].shape", scaled_Y_parts[0].shape
# Generate noise timecourses scaled to have appropriate variance
Y_noise = np.random.randn(N_R + N_P, M)
scaled_Y_noise = Y_noise / Y_noise.std(0) * np.sqrt(noise_variance)
#print "scaled_Y_noise.shape", scaled_Y_noise.shape
# Construct Y from combination of partition timecourses
Y_total = np.array(scaled_Y_parts).sum(0) + scaled_Y_noise
zY_total = npp.zs(Y_total)
#print "Y_total.shape", Y_total.shape
# Generate feature timecourses
# Stack together partition features to make "true" features for each feature space
Xtrue_feats = [np.vstack([X_parts[c] for c in comb]) for comb in combs]
#print "Xtrue_feats[0].shape", Xtrue_feats[0].shape
# Generate noise features to round out each feature space
Xnoise_feats = [noise_scale * np.random.randn(Pnoise, N_R + N_P) for Pnoise in Pnoise_models]
#print "Xnoise_feats[0].shape", Xnoise_feats[0].shape
# Generate matrices to mix real and noise features in each space
mixing_mats = [np.random.randn(P, P) for P in P_models]
#print "mixing_mats[0].shape", mixing_mats[0].shape
# Use mixing matrices to generate feature timecourses
if use_mixing:
X_feats = [m.dot(np.vstack([Xt, Xn])) for m,Xt,Xn in zip(mixing_mats, Xtrue_feats, Xnoise_feats)]
else:
X_feats = [np.vstack([Xt, Xn]) for m,Xt,Xn in zip(mixing_mats, Xtrue_feats, Xnoise_feats)]
#print "X_feats[0].shape", X_feats[0].shape
return X_feats, Y_total
| alexhuth/n4cs-fa2017 | homeworks/homework_2/homework_2_utils.py | Python | gpl-3.0 | 2,791 |
'''
(Double) Deep Q-Learning Algorithm Implementation
Supports double deep Q-learning with on either GPU and CPU
'''
import numpy as np
import pickle # used to save the nets
from copy import deepcopy
class DQNLearner(object):
def __init__(self, settings, backend):
"""
Functions that must be defined by the custom learner:
- forward_loss(obs, a, r, obsp, term) # computes scores and loss
- forward(obs) # computes scores
- update(obs, a, r, obsp) # update the params
- get_net() # returns the network object
- set_net(net) # sets the source and target nets and moves to gpu (if needed)
Fields owned by the learner:
- source_net: generates source Q-vals
- target_net: generates target Q-vals
"""
self.backend = backend
self.clip_reward = settings.get('clip_reward', False)
self.reward_rescale = settings.get('reward_rescale', False)
self.r_max = 1 # keep the default value at 1
def update(self, obs, a, r, obsp, term):
r = self.pre_process_reward(r)
return self.backend.update(obs, a, r, obsp, term)
def forward_loss(self, obs, a, r, obsp, term):
return self.backend.forward_loss(obs, a, r, obsp, term)
def forward(self, obs):
return self.backend.forward(obs)
def copy_net_to_target_net(self):
''' update target net with the current net '''
self.backend.target_net = deepcopy(self.backend.source_net)
def save(self,obj,name):
pickle.dump(obj, open(name, "wb"))
def load(self,name):
return pickle.load(open(name, "rb"))
def save_net(self,name):
''' save a net to a path '''
self.save(self.backend.source_net,name)
def load_net(self,net):
''' load in a net from path or a variable'''
if isinstance(net, str): # if it is a string, load the net from the path
net = self.load(net)
self.backend.set_net(net)
def save_training_history(self, path='.'):
''' save training history '''
train_hist = np.array([range(len(self.train_rewards)),self.train_losses,self.train_rewards, self.train_qval_avgs, self.train_episodes, self.train_times]).T
eval_hist = np.array([range(len(self.val_rewards)),self.val_losses,self.val_rewards, self.val_qval_avgs, self.val_episodes, self.val_times]).T
# TODO: why is this here and not in agent?
np.savetxt(path + '/training_hist.csv', train_hist, delimiter=',')
np.savetxt(path + '/evaluation_hist.csv', eval_hist, delimiter=',')
def params(self):
"""
Returns an iterator over netwok parameters
Note: different back-ends will return different param containers
"""
# TODO: return a dictionary here?
self.backend.params()
def pre_process_reward(self, r):
"""
Clips and re-scales the rewards
"""
if self.clip_reward:
r = np.clip(r,-self.clip_reward,self.clip_reward)
if self.reward_rescale:
self.r_max = max(np.amax(np.absolute(r)),self.r_max)
r = r / self.r_max
return r
| sisl/Chimp | chimp/learners/dqn_learner.py | Python | apache-2.0 | 3,187 |
#!/bin/python
import unittest
import logging
import os
from src.db_client import db_client
from src.channel import channel
from src.user_state import user_state
from src.message import message
from src.contact import contact
from src.config import config
class db_client_test(unittest.TestCase):
def test_init(self):
#Initialization
logging.basicConfig(level=logging.INFO,
format='%(asctime)s - %(name)s - %(levelname)s - %(message)s')
logger = logging.getLogger()
conf = config(os.path.abspath(__file__ + "/../../test/config_test.yaml"))
conf.mode = 'COLD'
obj = db_client(logger, conf)
if os.path.isfile(obj.db_file):
os.remove(obj.db_file)
self.assertTrue(obj.init())
########################################################################
# Channels
# Check if table is created
obj.cursor.execute('''delete from %s where 1 = 1''' % obj.channels_table_name)
channel_record = channel(channel_id=7,
source_id=10,
source_chat_id='123456',
target_id=15,
target_chat_id='654321',
public=1,
type=1)
obj.insert(obj.channels_table_name, channel_record.str())
# Check if the row is inserted
row = obj.selectone(obj.channels_table_name, "*")
channel_record_from_row = channel.from_channel_record(row, False)
self.assertEqual(channel_record_from_row.date, channel_record.date)
self.assertEqual(channel_record_from_row.time, channel_record.time)
self.assertEqual(channel_record_from_row.channel_id, channel_record.channel_id)
self.assertEqual(channel_record_from_row.source_id, channel_record.source_id)
self.assertEqual(channel_record_from_row.source_chat_id, channel_record.source_chat_id)
self.assertEqual(channel_record_from_row.target_id, channel_record.target_id)
self.assertEqual(channel_record_from_row.target_chat_id, channel_record.target_chat_id)
self.assertEqual(channel_record_from_row.public, channel_record.public)
self.assertEqual(channel_record_from_row.type, channel_record.type)
# Update the row
channel_record.public = 0
channel_record.type = 0
obj.insert_or_replace(obj.channels_table_name, channel_record.str())
# Check if the row is inserted
row = obj.selectone(obj.channels_table_name, "*")
channel_record_from_row = channel.from_channel_record(row, False)
self.assertEqual(channel_record_from_row.channel_id, channel_record.channel_id)
self.assertEqual(channel_record_from_row.source_id, channel_record.source_id)
self.assertEqual(channel_record_from_row.source_chat_id, channel_record.source_chat_id)
self.assertEqual(channel_record_from_row.target_id, channel_record.target_id)
self.assertEqual(channel_record_from_row.target_chat_id, channel_record.target_chat_id)
self.assertEqual(channel_record_from_row.public, channel_record.public)
self.assertEqual(channel_record_from_row.type, channel_record.type)
row = obj.selectone(obj.channels_table_name, "*", "targetchatid='654321' and sourceid=10")
channel_record_from_row = channel.from_channel_record(row, False)
self.assertEqual(channel_record_from_row.channel_id, channel_record.channel_id)
self.assertEqual(channel_record_from_row.source_id, channel_record.source_id)
self.assertEqual(channel_record_from_row.source_chat_id, channel_record.source_chat_id)
self.assertEqual(channel_record_from_row.target_id, channel_record.target_id)
self.assertEqual(channel_record_from_row.target_chat_id, channel_record.target_chat_id)
self.assertEqual(channel_record_from_row.public, channel_record.public)
self.assertEqual(channel_record_from_row.type, channel_record.type)
########################################################################
# User_states
# Check if table is created
obj.cursor.execute('''delete from %s where 1 = 1''' % obj.user_states_table_name)
user_state_record = user_state(chat_id='1234',
state=user_state.states.START,
last_target_id=123,
last_msg_id=456)
obj.insert(obj.user_states_table_name, user_state_record.str())
# Check if the row is inserted
row = obj.selectone(obj.user_states_table_name, "*")
user_state_record_from_row = user_state.from_user_state_record(row, False)
self.assertEqual(user_state_record_from_row.date, user_state_record.date)
self.assertEqual(user_state_record_from_row.time, user_state_record.time)
self.assertEqual(user_state_record_from_row.chat_id, user_state_record.chat_id)
self.assertEqual(user_state_record_from_row.state, user_state_record.state)
self.assertEqual(user_state_record_from_row.last_target_id, user_state_record.last_target_id)
self.assertEqual(user_state_record_from_row.last_msg_id, user_state_record.last_msg_id)
########################################################################
# Messages
# Check if table is created
obj.cursor.execute('''delete from %s where 1 = 1''' % obj.messages_table_name)
message_record = message(msg_id=50,
channel_id=30,
source_id=29,
source_chat_id='111111',
msg='Hello world')
obj.insert(obj.messages_table_name, message_record.str())
# Check if the row is inserted
row = obj.selectone(obj.messages_table_name, "*")
message_record_from_row = message.from_message_record(row, False)
self.assertEqual(message_record_from_row.date, message_record.date)
self.assertEqual(message_record_from_row.time, message_record.time)
self.assertEqual(message_record_from_row.msg_id, message_record.msg_id)
self.assertEqual(message_record_from_row.channel_id, message_record.channel_id)
self.assertEqual(message_record_from_row.source_id, message_record.source_id)
self.assertEqual(message_record_from_row.source_chat_id, message_record.source_chat_id)
self.assertEqual(message_record_from_row.msg, message_record.msg)
########################################################################
# Contaccts
# Check if table is created
obj.cursor.execute('''delete from %s where 1 = 1''' % obj.contacts_table_name)
contact_record = contact(chat_id='123456',
phone_number='21223422',
first_name='David',
last_name='Jones')
obj.insert(obj.contacts_table_name, contact_record.str())
# Check if the row is inserted
row = obj.selectone(obj.contacts_table_name, "*")
contact_record_from_row = contact.from_contact_record(row, False)
self.assertEqual(contact_record_from_row.date, contact_record.date)
self.assertEqual(contact_record_from_row.time, contact_record.time)
self.assertEqual(contact_record_from_row.chat_id, contact_record.chat_id)
self.assertEqual(contact_record_from_row.phone_number, contact_record.phone_number)
self.assertEqual(contact_record_from_row.first_name, contact_record.first_name)
self.assertEqual(contact_record_from_row.last_name, contact_record.last_name)
# Close the connection
obj.close()
# Remove the file
os.remove(obj.db_file)
if __name__ == '__main__':
unittest.main() | gavincyi/Telex | test/db_client_test.py | Python | apache-2.0 | 8,050 |
# Copyright (c) 2006-2007 Open Source Applications Foundation
# Copyright (c) 2008-2009 Mikeal Rogers <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import windmill
import logging
from time import sleep
import os, sys
from datetime import datetime
from threading import Thread
import shutil
import socket
from windmill.dep import functest
functest.configure()
def process_options(argv_list):
"""Process all the command line options"""
import admin_options
admin_options.process_module(admin_options)
argv_list.pop(0)
action = None
# This might be the hairiest code in windmill :)
# We have a very specific way we need to parse arguments
# because of the way different arguments interact with each other
# 8/27/2007 Gawd this is ugly, i would love to refactor this but I've
# forgotten what it does -Mikeal
# 12/15/2007 Oh man, I'm going to add a feature to this without refactoring it.
# The issue with this code remains the same and no standard arg parsing
# module can do what we need.
for arg in argv_list:
# Grab the test url if one is given
if arg.startswith('http://') or arg.startswith('https://'):
windmill.settings['TEST_URL'] = arg
functest.registry['url'] = arg
elif arg.startswith('-'):
# Take something like -efg and set the e, f, and g options
options = arg.replace('-', '')
for option in options:
admin_options.flags_dict[option]()
else:
# Any argument not starting with - is a regular named option
value = None
if arg.find('=') is not -1:
name, value = arg.split('=')
else:
name = arg
if name in admin_options.options_dict:
processor = admin_options.options_dict[name]
if value is None:
processor()
else:
processor(value)
elif name in action_mapping:
action = action_mapping[name]
else:
print name, 'is not a windmill argument. Sticking in functest registry.'
if value is None:
value = True
functest.registry[name] = value
if action is None:
# If an action is not defined we default to running the service in the foreground
return action_mapping['runserver']
else:
return action
def setup_servers(console_level=logging.INFO):
"""Setup the server and return httpd and loggers"""
windmill.is_active = True
windmill.ide_is_awake = False
if len(logging.getLogger().handlers) > 0:
console_handler = logging.getLogger().handlers[0]
console_handler.setLevel(console_level)
httpd = windmill.server.wsgi.make_windmill_server()
return httpd
def run_threaded(console_level=logging.INFO):
"""Run the server threaded."""
httpd = setup_servers(console_level)
httpd_thread = Thread(target=httpd.start)
getattr(httpd_thread, 'setDaemon', lambda x: x)(True)
httpd_thread.start()
while not httpd.ready:
sleep(.25)
return httpd, httpd_thread
def configure_global_settings(logging_on=True):
"""Configure that global settings for the current run"""
# This logging stuff probably shouldn't be here, it should probably be abstracted
if logging_on:
logging.getLogger().setLevel(0)
console = logging.StreamHandler()
console.setLevel(logging.INFO)
formatter = logging.Formatter('%(name)-12s: %(levelname)-8s %(message)s')
console.setFormatter(formatter)
logging.getLogger().addHandler(console)
if os.environ.has_key('WINDMILL_CONFIG_FILE'):
local_settings = os.environ['WINDMILL_CONFIG_FILE']
else:
local_settings = None
windmill.settings = windmill.conf.configure_settings(localSettings=local_settings)
port = windmill.settings['SERVER_HTTP_PORT']
while 1:
try:
s = socket.socket(socket.AF_INET, socket.SOCK_STREAM)
s.connect(('127.0.0.1', port))
s.close()
port += 1
except socket.error:
break
windmill.settings['SERVER_HTTP_PORT'] = port
return windmill.settings
on_ide_awake = []
def setup():
"""Setup server and shell objects"""
global shell_objects_dict
shell_objects_dict = {}
windmill.settings['shell_objects'] = shell_objects_dict
assert not windmill.settings.get('setup_has_run', False)
httpd, httpd_thread = run_threaded(windmill.settings['CONSOLE_LOG_LEVEL'])
shell_objects_dict['httpd'] = httpd
shell_objects_dict['httpd_thread'] = httpd_thread
from windmill.bin import shell_objects
if windmill.settings['CONTINUE_ON_FAILURE'] is not False:
shell_objects.jsonrpc_client.add_json_command('{"method": "commands.setOptions", "params": {"stopOnFailure" : false}}')
if windmill.settings['EXTENSIONS_DIR'] is not None:
shell_objects.load_extensions_dir(windmill.settings['EXTENSIONS_DIR'])
if windmill.settings['RUN_TEST'] is not None:
shell_objects.run_test(windmill.settings['RUN_TEST'])
if windmill.settings['LOAD_TEST'] is not None:
shell_objects.load_test(windmill.settings['LOAD_TEST'])
if windmill.settings['JAVASCRIPT_TEST_DIR']:
shell_objects.run_js_tests(windmill.settings['JAVASCRIPT_TEST_DIR'],
windmill.settings['JAVASCRIPT_TEST_FILTER'],
windmill.settings['JAVASCRIPT_TEST_PHASE'])
browser = [setting for setting in windmill.settings.keys() if setting.startswith('START_') and \
windmill.settings[setting] is True]
import shell_objects
if len(browser) is 1:
shell_objects_dict['browser'] = getattr(shell_objects, browser[0].lower())()
for attribute in dir(shell_objects):
shell_objects_dict[attribute] = getattr(shell_objects, attribute)
shell_objects_dict['setup_has_run'] = True
return shell_objects_dict
def teardown(shell_objects):
"""Teardown the server, threads, and open browsers."""
if windmill.is_active:
windmill.is_active = False
shell_objects['clear_queue']()
for controller in windmill.settings['controllers']:
controller.stop()
del(controller)
if windmill.settings['START_FIREFOX'] and windmill.settings['MOZILLA_CREATE_NEW_PROFILE']:
shutil.rmtree(windmill.settings['MOZILLA_PROFILE'])
for directory in windmill.teardown_directories:
if os.path.isdir(directory):
shutil.rmtree(directory)
# while shell_objects['httpd_thread'].isAlive():
# try:
# shell_objects['httpd'].stop()
# except Exception, e:
# print "Exception occurred while shutting server down:"
# print e
#
# # Hacking workaround for port locking up on linux.
# if sys.platform == 'linux2':
# try:
# shell_objects['httpd'].socket.shutdown(socket.SHUT_RDWR)
# shell_objects['httpd'].socket.close()
# except: pass
shell_objects['httpd'].stop()
#shell_objects['httpd_thread'].join()
def runserver_action(shell_objects):
"""Run the server in the foreground with the options given to the command line"""
try:
if 'runserver' in sys.argv or len(windmill.settings['controllers']) is 0:
print 'Server running...'
if windmill.settings['EXIT_ON_DONE'] and not windmill.settings['JAVASCRIPT_TEST_DIR']:
while windmill.block_exit or (
len(shell_objects['httpd'].controller_queue.queue) is not 0 ) or (
len(shell_objects['httpd'].test_resolution_suite.unresolved) is not 0 ):
sleep(.25)
elif ( windmill.settings['RUN_TEST'] ):
windmill.runserver_running = True
while windmill.runserver_running:
sleep(.25)
else:
windmill.runserver_running = True
while windmill.runserver_running:
sleep(.25)
teardown(shell_objects)
if windmill.test_has_failed:
sys.exit(1)
except KeyboardInterrupt:
teardown(shell_objects)
sys.exit(1)
def shell_action(shell_objects):
"""Start the windmill shell environment"""
windmill.in_shell = True
# If ipython is installed and we weren't given the usecode option
try:
assert not windmill.settings['USECODE']
from IPython.Shell import IPShellEmbed
ipshell = IPShellEmbed()
ipshell(local_ns=shell_objects)
except:
import code
code.interact(local=shell_objects)
teardown(shell_objects)
# def wxui_action(shell_objects):
# """Start the wxPython based service GUI"""
# try:
# import wxui
# app = wxui.App(shell_objects)
# shell_objects['wxui_app'] = app
# app.MainLoop()
# teardown(shell_objects)
# except ImportError:
# print 'Failed to import wx, defaulting to the shell'
# shell_action(shell_objects)
# def tinderbox_action(shell_objects):
# """Tinderbox action for continuous integration"""
# shell_objects['jsonrpc_client'].add_json_command('{"method": "commands.setOptions", "params": {"stopOnFailure" : false}}')
#
# class ResultsProcessor(object):
# passed = 0
# failed = 0
# def success(self, test, debug):
# self.passed += 1
# def failure(self, test, debug):
# self.failed += 1
#
# result_processor = ResultsProcessor()
# shell_objects['httpd'].test_resolution_suite.result_processor = result_processor
#
# starttime = datetime.now()
# result = None
#
# if windmill.settings['RUN_TEST']:
# try:
# while ( len(shell_objects['httpd'].controller_queue.queue) is not 0 ) or (
# len(shell_objects['httpd'].test_resolution_suite.unresolved) is not 0 ):
# sleep(1)
#
# print '#TINDERBOX# Testname = FullSuite'
# print '#TINDERBOX# Time elapsed = %s' % str (datetime.now() - starttime)
#
# if result_processor.failed > 0 or result_processor.passed is 0:
# result = "FAILED"
# else:
# result = "PASSED"
#
# print '#TINDERBOX# Status = %s' % result
# teardown(shell_objects)
# if result == "FAILED":
# sys.exit(1)
#
# except KeyboardInterrupt:
# teardown(shell_objects)
# if result == "FAILED":
# sys.exit(1)
# else:
# try:
# while not windmill.TESTS_COMPLETED:
# sleep(1)
# except KeyboardInterrupt:
# teardown(shell_objects)
# if result == "FAILED":
# sys.exit(1)
#
# print '#TINDERBOX# Testname = FullSuite'
# print '#TINDERBOX# Time elapsed = %s' % str (datetime.now() - starttime)
# if windmill.RESULTS['fail'] > 0 or windmill.RESULTS['pass'] is 0:
# result = "FAILED"
# else:
# result = "PASSED"
#
# print '#TINDERBOX# Status = %s' % result
# teardown(shell_objects)
# if result == "FAILED":
# sys.exit(1)
def start_windmill():
"""Start windmill and return shell_objects"""
configure_global_settings()
shell_objects = setup()
return shell_objects
def command_line_startup():
"""Command line startup"""
windmill.stdout, windmill.stdin = sys.stdout, sys.stdin
configure_global_settings()
action = process_options(sys.argv)
shell_objects = setup()
action(shell_objects)
action_mapping = {'shell':shell_action, 'runserver':runserver_action,
'run_service':runserver_action}
| yongshengwang/hue | build/env/lib/python2.7/site-packages/windmill-1.3-py2.7.egg/windmill/bin/admin_lib.py | Python | apache-2.0 | 13,002 |
import sounderinterface
import subprocess
@sounderinterface.register
class LinuxSounder(sounderinterface.SounderInterface):
osname='posix'
def __init__(self):
self.lastsound = None
def play(self,soundfile):
"""play a soundfile and return immediately"""
self.stop()
self.lastsound=subprocess.Popen(
['mplayer', soundfile],
stdout=open('/dev/null','w'),
stderr=open('/dev/null','w')
)
def stop(self):
"""clear any currently playing sound"""
if self.lastsound!=None:
self.lastsound.terminate()
self.lastsound=None
| BackupTheBerlios/intertrain | lib/plat/posix_sounder.py | Python | gpl-3.0 | 642 |
#!/usr/bin/env vpython3
# Copyright 2016 The LUCI Authors. All rights reserved.
# Use of this source code is governed under the Apache License, Version 2.0
# that can be found in the LICENSE file.
import collections
import contextlib
import datetime
import json
import time
# Mutates sys.path.
import test_env
from utils import tools
tools.force_local_third_party()
# third_party/
import httplib2
from depot_tools import auto_stub
from utils import oauth
class LuciContextAuthTest(auto_stub.TestCase):
@contextlib.contextmanager
def lucicontext(self, token_response=None, response_code=200):
class MockLuciContextServer(object):
@staticmethod
def request(uri=None, method=None, body=None, headers=None):
self.assertEqual('POST', method)
self.assertEqual(
'http://127.0.0.1:0/rpc/LuciLocalAuthService.GetOAuthToken', uri)
self.assertEqual({'Content-Type': 'application/json'}, headers)
data = json.loads(body)
self.assertEqual('secret', data['secret'])
self.assertEqual(1, len(data['scopes']))
self.assertEqual('acc_a', data['account_id'])
self.assertEqual('https://www.googleapis.com/auth/userinfo.email',
data['scopes'][0])
response = collections.namedtuple('HttpResponse', ['status'])
response.status = response_code
content = json.dumps(token_response)
return response, content
self.mock_local_auth(MockLuciContextServer())
yield
def setUp(self):
super(LuciContextAuthTest, self).setUp()
self.mock_time(0)
def mock_local_auth(self, server):
def load_local_auth():
return oauth.LocalAuthParameters(
rpc_port=0,
secret='secret',
accounts=[oauth.LocalAuthAccount('acc_a')],
default_account_id='acc_a')
self.mock(oauth, '_load_local_auth', load_local_auth)
def http_server():
return server
self.mock(httplib2, 'Http', http_server)
def mock_time(self, delta):
self.mock(time, 'time', lambda: delta)
def _utc_datetime(self, delta):
return datetime.datetime.utcfromtimestamp(delta)
def test_get_access_token(self):
t_expire = 100
with self.lucicontext({'access_token': 'notasecret', 'expiry': t_expire}):
token = oauth._get_luci_context_access_token(oauth._load_local_auth())
self.assertEqual('notasecret', token.token)
self.assertEqual(self._utc_datetime(t_expire), token.expires_at)
def test_get_missing_token(self):
t_expire = 100
with self.lucicontext({'expiry': t_expire}):
token = oauth._get_luci_context_access_token(oauth._load_local_auth())
self.assertIsNone(token)
def test_get_missing_expiry(self):
with self.lucicontext({'access_token': 'notasecret'}):
token = oauth._get_luci_context_access_token(oauth._load_local_auth())
self.assertIsNone(token)
def test_get_access_token_with_errors(self):
with self.lucicontext({'error_code': 5, 'error_msg': 'fail'}):
token = oauth._get_luci_context_access_token(oauth._load_local_auth())
self.assertIsNone(token)
def test_validation(self):
t_expire = self._utc_datetime(120)
good_token = oauth.AccessToken("secret", t_expire)
self.assertEqual(
True,
oauth._validate_luci_context_access_token(good_token))
no_token = oauth.AccessToken("", t_expire)
self.assertEqual(
False,
oauth._validate_luci_context_access_token(no_token))
not_a_token = {'token': "secret", 'expires_at': t_expire}
self.assertEqual(
False,
oauth._validate_luci_context_access_token(not_a_token))
self.mock_time(50)
self.assertEqual(
True,
oauth._validate_luci_context_access_token(good_token))
self.mock_time(100)
self.assertEqual(
False,
oauth._validate_luci_context_access_token(good_token))
if __name__ == '__main__':
test_env.main()
| endlessm/chromium-browser | tools/swarming_client/tests/auth_test.py | Python | bsd-3-clause | 3,933 |
from django.test import TestCase, Client
from django.conf import settings
from django.db import IntegrityError
from django.contrib.auth.models import User
from api.models import Project, Task, Resource
from tokenauth.authbackends import TokenAuthBackend
from datetime import date
import requests
import responses
import json
def mock_auth_success(user=None):
url = '{0}/api/v1/users/me/' . format(settings.USERSERVICE_BASE_URL)
response_string = '{"username": "TEST"}'
if user is not None:
response_string = {
"username": user.username,
"id": user.pk
}
response_string = json.dumps(response_string)
responses.add(responses.GET, url,
body=response_string, status=200,
content_type='application/json')
def mock_auth_failure():
url = '{0}/api/v1/users/me/' . format(settings.USERSERVICE_BASE_URL)
responses.add(responses.GET, url,
body='', status=401,
content_type='application/json')
class ProjectModelTestCase(TestCase):
def test_project_unicode(self):
project = Project.quick_create(title="Test")
assert project.__unicode__() == 'Test'
def test_quick_create(self):
project = Project.quick_create()
assert isinstance(project, Project), 'Project instance is created'
class ResourceModelTestCase(TestCase):
def test_resource_quick_create(self):
resource = Resource.quick_create()
assert isinstance(resource, Resource)
def test_resource_quick_create_with_details(self):
project = Project.quick_create(title="TEST")
extra_data = {
"rate": 100
}
resource = Resource.quick_create(project=project, **extra_data)
assert resource.project.title == 'TEST', 'Expect project is explicitly set'
assert resource.rate == 100.00, 'Expect rate to be set by kwargs'
def test_project_user_unique_together(self):
project = Project.quick_create()
start_date = date.today()
Resource.objects.create(project=project, user=1, start_date=start_date)
Resource.objects.create(project=project, user=2, start_date=start_date)
try:
Resource.objects.create(project=project, user=2)
self.fail("Should not be able to add the same project and user twice")
except IntegrityError:
pass
class TaskModelTestCase(TestCase):
def test_quick_create(self):
task = Task.quick_create()
assert isinstance(task, Task), 'Task instance is created'
class ProjectEndpointTestCase(TestCase):
def setUp(self):
self.c = Client(Authorization='Token 123')
self.joe_admin = User.objects.create_superuser(username="admin", password="test", email="[email protected]")
self.joe_soap = User.objects.create_user(username="joe", password="test")
self.joe_soap.save()
## setup a bunch of Projects
p1 = Project.quick_create(title="P1", description="Search me", is_billable=True)
p2 = Project.quick_create(title="P2", is_billable=True)
p3 = Project.quick_create(title="P3", is_active=False)
p4 = Project.quick_create(title="P4", user=self.joe_soap.pk)
p5 = Project.quick_create(title="P5", user=self.joe_soap.pk)
p6 = Project.quick_create(title="P6")
Resource.quick_create(user=self.joe_soap.pk, project=p4)
Resource.quick_create(user=self.joe_soap.pk, project=p3)
Resource.quick_create(user=self.joe_admin.pk, project=p1)
Resource.quick_create(user=self.joe_admin.pk, project=p2)
@responses.activate
def test_get_projects_list_requires_auth(self):
mock_auth_failure()
response = self.c.get("/api/v1/projects/")
assert response.status_code == 403, 'Expect permission denied'
@responses.activate
def test_get_project_list(self):
mock_auth_success(self.joe_soap)
#self.c.logout()
#login_result = self.c.login(username="joe", password="test")
response = self.c.get("/api/v1/projects/")
assert response.status_code == 200, 'Expect 200 OK'
assert len(json.loads(response.content)) == 2, 'Expect 2 projects back'
@responses.activate
def test_get_project_list_admin_gets_all_projects(self):
mock_auth_success(self.joe_admin)
#self.c.logout()
#login_result = self.c.login(username="joe", password="test")
response = self.c.get("/api/v1/projects/")
assert response.status_code == 200, 'Expect 200 OK'
assert len(json.loads(response.content)) == 6, 'Expect 6 projects back'
@responses.activate
def test_get_project_list_filter_on_active(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?is_active=False")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P3']
assert titles == expected_titles, 'Expect results to be filtered on is_active=False'
@responses.activate
def test_get_project_list_filter_on_billable(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?is_billable=True")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1', 'P2']
assert titles == expected_titles, 'Expect results to be filtered on is_billable=True'
@responses.activate
def test_get_project_list_search_title(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?search=P1")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1']
assert titles == expected_titles, 'Expect search to return matching title'
@responses.activate
def test_get_project_list_search_description(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?search=Search")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1']
assert titles == expected_titles, 'Expect search to return matching description'
@responses.activate
def test_get_project_orders_by_title(self):
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/?ordering=title")
titles = [project.get("title") for project in json.loads(response.content)]
expected_titles = ['P1', 'P2', 'P3', 'P4', 'P5', 'P6']
assert titles == expected_titles, 'Expect search results ordered by title'
@responses.activate
def test_get_project(self):
project = Project.quick_create()
mock_auth_success(self.joe_admin)
response = self.c.get("/api/v1/projects/{0}/" . format (project.pk))
expected_fields = ['pk', 'title', 'description', 'start_date', 'end_date', 'is_billable', 'is_active', 'task_set', 'resource_set']
for field in expected_fields:
assert response.data.get(field, "NOTSET") != "NOTSET", 'Assert field {0} is returned in the response' . format (field)
assert response.status_code == 200, 'Expect 200 OK'
class TaskEndpointTestCase(TestCase):
def setUp(self):
self.c = Client(Authorization='Token 123')
# create some Tasks
| TangentMicroServices/ProjectService | api/tests/tests.py | Python | gpl-2.0 | 6,753 |
from django.contrib.auth.models import User
from django.core.urlresolvers import reverse
from django.db import models
class BlogPost(models.Model):
author = models.ForeignKey(User)
title = models.CharField(max_length=50)
timestamp = models.DateTimeField()
body = models.TextField()
summary = models.CharField(max_length=100)
slug = models.SlugField()
url_fields = {
'slug': 'slug',
}
class Meta:
ordering = ['-timestamp']
def __unicode__(self):
return "%s (%s)" % (self.title, self.timestamp.strftime("%B %d, %Y"))
def get_absolute_url(self):
return reverse('news_view', args=[self.slug])
def get_num_comments(self):
return self.blogcomment_set.count()
class BlogComment(models.Model):
author = models.ForeignKey(User)
post = models.ForeignKey(BlogPost)
body = models.TextField()
| dellsystem/wikinotes | blog/models.py | Python | gpl-3.0 | 888 |
import os
# <ref>
| siosio/intellij-community | python/testData/resolve/multiFile/relativeAndSameDirectoryImports/python2OrdinaryPackageImportPrioritizeSameDirectoryModuleOverSdk/ordinaryPackage/script.py | Python | apache-2.0 | 24 |
# -*- coding: utf-8 -*-
import json
import os
import platform
import sys
def format_full_version(info):
version = "{0.major}.{0.minor}.{0.micro}".format(info)
kind = info.releaselevel
if kind != "final":
version += kind[0] + str(info.serial)
return version
# Support for 508's implementation_version.
if hasattr(sys, "implementation"):
implementation_version = format_full_version(sys.implementation.version)
else:
implementation_version = "0"
# Default to cpython for 2.7.
if hasattr(sys, "implementation"):
implementation_name = sys.implementation.name
else:
implementation_name = "cpython"
lookup = {
"os_name": os.name,
"sys_platform": sys.platform,
"platform_machine": platform.machine(),
"platform_python_implementation": platform.python_implementation(),
"platform_release": platform.release(),
"platform_system": platform.system(),
"platform_version": platform.version(),
"python_version": platform.python_version()[:3],
"python_full_version": platform.python_version(),
"implementation_name": implementation_name,
"implementation_version": implementation_version,
}
if __name__ == "__main__":
print(json.dumps(lookup))
| kennethreitz/pipenv | pipenv/pep508checker.py | Python | mit | 1,220 |
'''
Integration test for testing power off mini hosts.
#1.operations & power off random hosts
#2.start hosts
#3.duplicated operation
@author: zhaohao.chen
'''
import apibinding.inventory as inventory
import zstackwoodpecker.test_util as test_util
import zstackwoodpecker.test_state as test_state
import zstackwoodpecker.test_lib as test_lib
import zstackwoodpecker.operations.resource_operations as res_ops
import zstackwoodpecker.zstack_test.zstack_test_vm as test_vm_header
import zstackwoodpecker.zstack_test.zstack_test_volume as test_volume_header
import zstackwoodpecker.operations.vm_operations as vm_ops
import zstackwoodpecker.operations.volume_operations as vol_ops
import zstackwoodpecker.operations.host_operations as host_ops
import zstackwoodpecker.operations.scenario_operations as sce_ops
import time
import os
import random
import threading
import hashlib
import random
MN_IP = res_ops.query_resource(res_ops.MANAGEMENT_NODE)[0].hostName
admin_password = hashlib.sha512('password').hexdigest()
zstack_management_ip = os.environ.get('zstackManagementIp')
test_stub = test_lib.lib_get_test_stub()
test_obj_dict = test_state.TestStateDict()
def recover_hosts(host_uuids, host_ips, wait_time):
for ip in host_ips:
cond = res_ops.gen_query_conditions('vmNics.ip', '=', ip)
vm = sce_ops.query_resource(zstack_management_ip, res_ops.VM_INSTANCE, cond).inventories[0]
if vm.state != 'Stopped':
test_util.test_fail("Fail to power off host:{}".format(vm.uuid))
sce_ops.start_vm(zstack_management_ip, vm.uuid)
time.sleep(wait_time)#wait MN
for uuid in host_uuids:
host_ops.reconnect_host(uuid)
def operations_shutdown(shutdown_thread, host_uuids, host_ips, wait_time, operation_thread=None):
if operation_thread:
operation_thread.start()
shutdown_thread.start()
shutdown_thread.join()
time.sleep(180)
recover_hosts(host_uuids, host_ips, wait_time)
def test():
global test_obj_dict
wait_time = 120
round = 2
test_util.test_logger("@@:mnip:{}".format(zstack_management_ip))
cond = res_ops.gen_query_conditions('managementIp', '=', MN_IP)
MN_HOST = res_ops.query_resource(res_ops.HOST, cond)[0]
cluster_list = res_ops.get_resource(res_ops.CLUSTER)
vm = test_stub.create_vm()
test_obj_dict.add_vm(vm)
for i in range(round):
host_uuids = []
host_ips = []
mn_flag = None # if candidate hosts including MN node
#operations & power off random hosts
test_util.test_logger("round {}".format(i))
cluster_uuid = random.choice(cluster_list).uuid
cond = res_ops.gen_query_conditions('cluster.uuid', '=', cluster_uuid)
cluster_hosts = res_ops.query_resource(res_ops.HOST, cond)
for host in cluster_hosts:
if host.uuid == MN_HOST.uuid:
mn_flag = 1
wait_time = 900 #wait mn up
host_uuids.append(host.uuid)
host_ips.append(host.managementIp)
migrate_thread = threading.Thread(target=test_stub.migrate_vm_to_random_host, args=(vm,))
power_off_thread = threading.Thread(target=host_ops.poweroff_host, args=(host_uuids, admin_password, mn_flag))
operations_shutdown(power_off_thread, host_uuids, host_ips, wait_time, migrate_thread)
test_util.test_pass("pass")
def error_cleanup():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
def env_recover():
global test_obj_dict
test_lib.lib_error_cleanup(test_obj_dict)
| zstackio/zstack-woodpecker | integrationtest/vm/mini/poweroff/test_poweroff_host_migrate.py | Python | apache-2.0 | 3,539 |
#!/usr/bin/env python
"""
Calculate entropies of each leaf on each branch node of a tree for each column
Usage:
entropy.py -a ali.s9t100.fa -n gpcrdb_gapped_tm_numbering.csv -t ali.s9t100.ph > ali.s9t100.entropies
"""
import argparse
import collections
import logging
import math
from Bio import Phylo, AlignIO
import snooker
def calculate_entropies(tree_file, alignment_file, numbering_file,
min_node_size, max_node_size, number_format):
numberings = snooker.Numberings.from_file(numbering_file)
ali2gpcrdb = numberings.lookup(snooker.ALIGNMENT_POSITION, number_format)
alignment = AlignIO.read(alignment_file, 'fasta')
id2seq = {row.id: row.seq for row in alignment}
tree = Phylo.read(tree_file, 'newick')
all_leafs = set([leaf.name for leaf in tree.get_terminals()])
# for each column determine the aa distribution
all_counters = {}
for col in ali2gpcrdb:
all_counters[col] = collections.Counter([seq[col - 1] for seq in id2seq.values()])
print('{},{},{},{},{},{},{},{}'.format('node_id',
'alignment_pos',
number_format,
'entropy_inside',
'entropy_outside',
'score',
'variability_inside',
'variability_outside',
))
for node_id, node in enumerate(tree.get_nonterminals()):
leafs_of_node = set([leaf.name for leaf in node.get_terminals()])
if not (min_node_size <= len(leafs_of_node) <= max_node_size):
msg = '{} has {} leafs, skipping'.format(node, len(leafs_of_node))
logging.info(msg)
continue
leafs_outside_node = all_leafs - leafs_of_node
seqs_inside = [id2seq[v] for v in leafs_of_node]
nr_inside = float(len(leafs_of_node))
nr_outside = float(len(leafs_outside_node))
# loop over columns
for col in ali2gpcrdb:
aa_inside = collections.Counter([seq[col - 1] for seq in seqs_inside])
f_i_inside = 0
for count in aa_inside.values():
f_i_inside += count / nr_inside * math.log(count / nr_inside)
entropy_inside = -1 * f_i_inside
variability_inside = len(aa_inside)
aa_outside = all_counters[col] - aa_inside
f_i_outside = 0
for aa, count in aa_outside.items():
f_i_outside += count / nr_outside * math.log(count / nr_outside)
entropy_outside = -1 * f_i_outside
variability_outside = len(aa_outside)
distinct_aa = 21 # all amino acids and gap (-)
score = math.sqrt(pow(abs(math.log(1.0 / distinct_aa)) - entropy_outside, 2)
+ pow(entropy_inside, 2))
print('{},{},{},{},{},{},{},{}'.format(node_id,
col,
ali2gpcrdb[col],
entropy_inside,
entropy_outside,
score,
variability_inside,
variability_outside,
))
parser = argparse.ArgumentParser(description='Calculate entropies of each leaf on each branch node of a tree for each column')
parser.add_argument('-a', '--alignment', type=argparse.FileType('r'), required=True, help='Multiple sequence alignment (fasta format)')
parser.add_argument('-n', '--numbering', type=argparse.FileType('r'), required=True, help='Numbering file, translate sequence alignment position into generic numbering scheme')
parser.add_argument('-t', '--tree', type=argparse.FileType('r'), required=True, help='Tree of multiple sequence alignment (newick format)')
parser.add_argument('--min_node_size', type=int, default=20, help='Calculate entropies for nodes with a minimum number of leafs')
parser.add_argument('--max_node_size', type=int, default=20, help='Calculate entropies for nodes with a maximum number of leafs')
parser.add_argument('--number_format', default='gpcrdb_alignment', help='Column from numbering file to include in output')
args = parser.parse_args()
calculate_entropies(args.tree, args.alignment, args.numbering,
args.min_node_size, args.max_node_size, args.number_format)
| 3D-e-Chem/snooker-alignment | scripts/entropy.py | Python | apache-2.0 | 4,710 |
from prompt_toolkit.shortcuts import prompt
from prompt_toolkit.styles import style_from_dict
from prompt_toolkit.token import Token
from prompt_toolkit.validation import Validator, ValidationError
class _OptionValidator(Validator):
def __init__(self, options, default):
super().__init__()
self.options = [o.lower() for o in options]
self.defaultAllowed = default is not None
def validate(self, document):
text = document.text.lower().strip()
if self.defaultAllowed and not text:
return
elif text not in self.options:
raise ValidationError(message='Invalid response', cursor_position=len(document.text))
class _StringValidator(Validator):
def validate(self, document):
text = document.text.strip()
if not text:
raise ValidationError(message='Invalid response', cursor_position=len(document.text))
_prompt_styles = style_from_dict({
Token: '#dddddd',
Token.Sigil: '#00ff00',
Token.Prompt: '#dddddd',
Token.Symbol: '#777777',
Token.Option: '#00ffff',
Token.Default: '#ff77ff',
})
def promptOptions(msg, options, default=None):
tokens = [(Token.Sigil, "* "),
(Token.Prompt, msg),
(Token.Symbol, " ["),]
first = True
for option in options:
if first:
first = False
else:
tokens.append((Token.Symbol, ","))
if option == default:
tokens.append((Token.Default, option.upper()))
else:
tokens.append((Token.Option, option))
tokens.append((Token.Symbol, "] : "))
val = prompt(get_prompt_tokens=lambda x: tokens, style=_prompt_styles, validator=_OptionValidator(options, default))
if val:
return val.lower().strip()
return default
def promptString(msg):
tokens = [(Token.Sigil, "* "),
(Token.Prompt, msg),
(Token.Symbol, " : ")]
val = prompt(get_prompt_tokens=lambda x: tokens, style=_prompt_styles, validator=_StringValidator())
if val:
return val.strip()
return None
| tmearnest/sbd | pdfs/Prompt.py | Python | mit | 2,116 |
# -*- coding: utf-8 -*-
import re
from module.plugins.internal.SimpleHoster import SimpleHoster, create_getInfo
class UserscloudCom(SimpleHoster):
__name__ = "UserscloudCom"
__type__ = "hoster"
__version__ = "0.01"
__status__ = "testing"
__pattern__ = r'https?://(?:www\.)?userscloud\.com/\w{12}'
__description__ = """Userscloud.com hoster plugin"""
__license__ = "GPLv3"
__authors__ = [("GammaC0de", None)]
NAME_PATTERN = r'<h2 class="strong margin-none">(?P<N>.+?)<'
SIZE_PATTERN = r'<div class="ribbon">(?P<S>[\d.,]+) (?P<U>[\w^_]+)<'
OFFLINE_PATTERN = r'The file you are trying to download is no longer available'
def setup(self):
self.multiDL = True
self.resume_download = False
self.chunk_limit = 1
def handle_free(self, pyfile):
self.download(pyfile.url,
post=dict(re.findall(r'<input type="hidden" name="(.+?)" value="(.*?)">', self.html)))
getInfo = create_getInfo(UserscloudCom)
| ace02000/pyload | module/plugins/hoster/UserscloudCom.py | Python | gpl-3.0 | 1,042 |
# -*- coding: utf-8 -*-
# Copyright 2021 OpenSynergy Indonesia
# Copyright 2021 PT. Simetri Sinergi Indonesia
# License AGPL-3.0 or later (http://www.gnu.org/licenses/agpl).
from . import models
| open-synergy/opnsynid-partner-contact | partner_contact_age/__init__.py | Python | agpl-3.0 | 196 |
"""
SASNets main file. Contains the main neural network code used for training
networks.
SASNets uses Keras and Tensorflow for the networks. You can change the backend
to Theano or CNTK through the Keras config file.
"""
# System imports
import argparse
import logging
import os
import sys
import time
import random
import json
# Installed packages
import numpy as np
from sklearn.model_selection import train_test_split
from sklearn.preprocessing import LabelEncoder
from tensorflow import keras
from tensorflow.keras.callbacks import TensorBoard, EarlyStopping, ModelCheckpoint
from tensorflow.keras.layers import Conv1D, Dropout, Flatten, Dense, Embedding, \
MaxPooling1D, InputLayer
from tensorflow.keras.models import Sequential
from tensorflow.keras.utils import to_categorical
# SASNets packages
from . import sas_io
from .util.utils import inepath
# Define the argparser parameters
parser = argparse.ArgumentParser(
description="Use neural nets to classify scattering data.")
parser.add_argument(
"--train", type=str, default="train",
help="Training data table.")
parser.add_argument(
"--validation", type=str, default="30",
help="Validation data table, or percent of training data.")
parser.add_argument(
"--test", type=str, default="10",
help="Test data table, or percent of training data.")
parser.add_argument(
"--database", type=str, default=sas_io.DB_FILE,
help="Path to the sqlite database file.")
parser.add_argument(
"--steps", type=int, default=None,
help="Number of steps per epochs.")
parser.add_argument(
"--epochs", type=int, default=50,
help="Number of epochs.")
parser.add_argument(
"--batch", type=int, default=5,
help="Batch size.")
parser.add_argument(
"--tensorboard", type=str, default="tensorboard",
help="Tensorboard directory.")
parser.add_argument(
"-v", "--verbose", action="store_true",
help="Control output verbosity")
parser.add_argument(
"-r", "--resume", action='store_true', dest='resume',
help="resume fit stored in --save-path")
parser.add_argument(
"--noresume", action='store_false', dest='resume',
help="don't resume fit stored in --save-path")
parser.set_defaults(feature=True)
parser.add_argument(
"-s", "--save-path", default="./savenet/out",
help="Path to save model weights and info to")
class OnehotEncoder:
def __init__(self, categories):
self.categories = sorted(categories)
self.label_encoder = LabelEncoder().fit(self.categories)
self.hotlen = len(categories)
def __call__(self, y):
return to_categorical(self.label_encoder.transform(y), self.hotlen)
def index(self, y):
return self.label_encoder.transform(y)
def label(self, index):
return self.label_encoder.inverse_transform(index)
def fix_dims(*args):
"""
Insert extra dimension on inputs for image channel.
Keras seems to require an extra dimension on the inputs, which is
either at the start or the end of the input item, depending on the
backend in use. Usual is at the end?
"""
order = keras.backend.image_data_format()
if order == 'channels_last':
fixer = lambda x: np.asarray(x)[..., None]
elif order == 'channels_first':
fixer = lambda x: np.asarray(x)[:, None, ...]
else:
raise ValueError(f"unknown image data format {order}")
if len(args) == 0:
return fixer
if len(args) == 1:
return fixer(args[0])
return (fixer(v) for v in args)
def reload_net(path):
"""
Loads a classifier saved by sasnets from *path*.
:param path: Relative or absolute path to the .h5 model file
:return: The loaded classifier.
"""
return keras.models.load_model(os.path.normpath(path))
def save_output(save_path, model, encoder, history, seed, score):
# Create output directory.
basename = inepath(save_path)
if basename is not None:
model.save(basename + ".h5")
out = {
'categories': encoder.categories,
'history': history.history,
# seed is used to split training and evaluation data.
# for sqlite it does nothing.
'seed': seed,
'score': score,
}
with open(basename + ".history.json", 'w') as fd:
json.dump(out, fd, cls=sas_io.NpEncoder)
plot_history(history, basename=basename)
def sql_net(opts):
"""
A 1D convnet that uses a generator reading from a SQL database
instead of loading all files into memory at once.
"""
verbose = 1 if opts.verbose else 0
db = sas_io.sql_connect(opts.database)
counts = model_counts(db, tag=opts.train)
encoder = OnehotEncoder(counts.keys())
train_seq = sas_io.iread_sql(
db, opts.train, encoder=encoder, batch_size=opts.batch)
validation_seq = sas_io.iread_sql(
db, opts.validation, encoder=encoder, batch_size=opts.batch)
# Grab some training data so we can see how big it is
x, y = next(train_seq)
tb = TensorBoard(log_dir=opts.tensorboard, histogram_freq=1)
es = EarlyStopping(min_delta=0.001, patience=15, verbose=verbose)
# Begin model definitions
nq = len(x[0])
model = Sequential()
model.add(Conv1D(nq, kernel_size=8, activation='relu', input_shape=[nq, 1]))
model.add(MaxPooling1D(pool_size=4))
model.add(Dropout(.17676))
model.add(Conv1D(nq//2, kernel_size=6, activation='relu'))
model.add(MaxPooling1D(pool_size=3))
model.add(Dropout(.20782))
model.add(Flatten())
model.add(Dense(nq//4, activation='tanh'))
model.add(Dropout(.20582))
model.add(Dense(nq//4, activation='softmax'))
model.compile(loss="categorical_crossentropy",
optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
# Model Run
if verbose > 0:
print(model.summary())
history = model.fit_generator(
train_seq, steps_per_epoch=opts.steps, epochs=opts.epochs,
workers=1, verbose=verbose, validation_data=validation_seq,
max_queue_size=1, callbacks=[tb, es])
score = None
if xval is not None and yval is not None:
score = model.evaluate(xval, yval, verbose=verbose)
print('\nTest loss: ', score[0])
print('Test accuracy:', score[1])
save_output(
save_path=opts.save_path,
model=model,
encoder=encoder,
history=history,
seed=None,
score=score)
logging.info("Complete.")
def oned_convnet(opts, x, y, test=None, seed=235):
"""
Runs a 1D convolutional classification neural net on the input data x and y.
:param x: List of training data x.
:param y: List of corresponding categories for each vector in x.
:param xtest: List of evaluation data.
:param ytest: List of corresponding categories for each vector in x.
:param seed: Random seed. Defaults to 235 for reproducibility purposes, but should be set randomly in an actual run.
:param verbose: Either true or false. Controls level of output.
:param save_path: The path to save the model to. If it points to a directory, writes to a file named the current unix time. If it points to a file, the file is overwritten.
:return: None.
"""
verbose = 1 if opts.verbose else 0
# 1-hot encoding.
categories = sorted(set(y))
encoder = OnehotEncoder(categories)
# Split data into train and validation.
test_size = float(opts.validation)/100
xtrain, xval, ytrain, yval = train_test_split(
x, encoder(y), test_size=test_size, random_state=seed)
# We need to poke an extra dimension into our input data for some reason.
xtrain, xval = fix_dims(xtrain, xval)
nq, nlabels = x.shape[1], len(categories)
# Check that the validation data covers all the categories
#if categories != sorted(set(ytrain)):
# raise ValueError("Training data is missing categories.")
#if categories != sorted(set(yval)):
# raise ValueError("Test data is missing categories.")
tb = TensorBoard(log_dir=opts.tensorboard, histogram_freq=1)
#es = EarlyStopping(min_delta=0.005, patience=5, verbose=verbose)
basename = inepath(opts.save_path)
checkpoint = ModelCheckpoint(
filepath=basename+"-check.h5", # or "-check{epoch:03d}.h5",
## To keep best loss, and not overwrite every epoch.
#monitor='loss', save_best_only=True, mode='auto',
)
if opts.resume:
model = reload_net(inepath(opts.save_path)+'.h5')
else:
# Begin model definitions
model = Sequential()
#model.add(Embedding(4000, 128, input_length=x.shape[1]))
model.add(InputLayer(input_shape=(nq,1)))
model.add(Conv1D(nq, kernel_size=6, activation='relu'))
model.add(MaxPooling1D(pool_size=4))
model.add(Dropout(.17676))
model.add(Conv1D(nq//2, kernel_size=6, activation='relu'))
model.add(MaxPooling1D(pool_size=4))
model.add(Dropout(.20782))
model.add(Flatten())
model.add(Dense(nq//4, activation='tanh'))
model.add(Dropout(.20582))
model.add(Dense(nlabels, activation='softmax'))
loss = ('binary_crossentropy' if nlabels == 2
else 'categorical_crossentropy')
model.compile(loss=loss, optimizer=keras.optimizers.Adadelta(),
metrics=['accuracy'])
if verbose > 0:
print(model.summary())
# Model Run
history = model.fit(
xtrain, ytrain, batch_size=opts.batch,
steps_per_epoch=opts.steps, epochs=opts.epochs,
verbose=verbose, validation_data=(xval, yval),
#callbacks=[tb, es, checkpoint],
callbacks=[tb, checkpoint],
)
# Check the results against the validation data.
score = None
if test is not None:
if categories != sorted(set(test[1])):
raise ValueError("Validation data has missing categories.")
score = model.evaluate(test[0], encoder(test[1]), verbose=verbose)
print('\nTest loss: ', score[0])
print('Test accuracy:', score[1])
save_output(
save_path=opts.save_path,
model=model,
encoder=encoder,
history=history,
seed=seed,
score=score)
logging.info("Complete.")
def trad_nn(x, y, xtest=None, ytest=None, seed=235):
"""
Runs a traditional MLP categorisation neural net on the input data x and y.
:param x: List of training data x.
:param y: List of corresponding categories for each vector in x.
:param seed: Random seed. Defaults to 235 for reproducibility purposes, but should be set randomly in an actual run.
:param xevl: Evaluation data for model.
:param yevl: Evaluation data for model.
:return: None
"""
verbose = 1
categories = sorted(set(y))
encoder = OnehotEncoder(categories)
xtrain, xval, ytrain, yval= train_test_split(
x, encoder(y), test_size=.25, random_state=seed)
model = Sequential()
model.add(Dense(128, activation='relu', input_dim=x.shape[1]))
model.add(Dropout(0.25))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(256, activation='relu'))
model.add(Dropout(0.25))
model.add(Dense(512, activation='relu'))
model.add(Dropout(0.5))
model.add(Dense(len(set(y)), activation='softmax'))
model.compile(loss='categorical_crossentropy', optimizer="adam",
metrics=['accuracy'])
print(model.summary())
history = model.fit(xtrain, ytrain, batch_size=10, epochs=10,
verbose=verbose, validation_data=(xval, yval))
if xtest and ytest:
score = model.evaluate(xtest, ytest, verbose=verbose)
print('Test loss: ', score[0])
print('Test accuracy:', score[1])
#plot_history(history, basename=basename)
def plot_history(history, basename=None):
import matplotlib.pyplot as plt
plt.plot(history.history['accuracy'])
plt.plot(history.history['val_accuracy'])
plt.title('model accuracy')
plt.ylabel('accuracy')
plt.xlabel('epoch')
plt.legend(['train', 'test'], loc='upper left')
with open(basename + ".svg", 'w') as fd:
plt.savefig(fd, format='svg', bbox_inches='tight')
def read_data(opts):
time_start = time.perf_counter()
#q, iq, label, n = sas_io.read_1d_seq(opts.path, tag=opts.train, verbose=verbose)
db = sas_io.sql_connect(opts.database)
iq, label = sas_io.read_sql(db, opts.train)
db.close()
time_end = time.perf_counter()
logging.info(f"File I/O Took {time_end-time_start} seconds for {len(label)} points of data.")
return np.asarray(iq), label
def main(args):
"""
Main method. Takes in arguments from command line and runs a model.
:param args: Command line args.
:return: None.
"""
opts = parser.parse_args(args)
data, label = read_data(opts)
#print(data.shape)
seed = random.randint(0, 2 ** 32 - 1)
logging.info(f"Random seed for this iter is {seed}")
oned_convnet(opts, data, label, seed=seed)
if __name__ == '__main__':
logging.basicConfig(level=logging.INFO)
main(sys.argv[1:])
| scattering/sasnets | sasnets/sasnet.py | Python | bsd-3-clause | 13,191 |
import collections
import ipaddress
from visidata import VisiData, vd, Sheet, options, Column, asyncthread, Progress, TsvSheet, getattrdeep, ColumnAttr, date, vlen, filesize
vd.option('pcap_internet', 'n', '(y/s/n) if save_dot includes all internet hosts separately (y), combined (s), or does not include the internet (n)')
protocols = collections.defaultdict(dict) # ['ethernet'] = {[6] -> 'IP'}
_flags = collections.defaultdict(dict) # ['tcp'] = {[4] -> 'FIN'}
url_oui = 'https://visidata.org/plugins/pcap/wireshark-oui.tsv'
url_iana = 'https://visidata.org/plugins/pcap/iana-ports.tsv'
oui = {} # [macprefix (like '01:02:dd:0')] -> 'manufacturer'
services = {} # [('tcp', 25)] -> 'smtp'
@VisiData.api
def open_pcap(vd, p):
return PcapSheet(p.name, source=p)
open_cap = open_pcap
open_pcapng = open_pcap
open_ntar = open_pcap
def manuf(mac):
return oui.get(mac[:13]) or oui.get(mac[:10]) or oui.get(mac[:8])
def macaddr(addrbytes):
mac = ':'.join('%02x' % b for b in addrbytes)
return mac
def macmanuf(mac):
manuf = oui.get(mac[:13])
if manuf:
return manuf + mac[13:]
manuf = oui.get(mac[:10])
if manuf:
return manuf + mac[10:]
manuf = oui.get(mac[:8])
if manuf:
return manuf + mac[8:]
return mac
def norm_host(host):
if not host:
return None
srcmac = str(host.macaddr)
if srcmac == 'ff:ff:ff:ff:ff:ff': return None
srcip = str(host.ipaddr)
if srcip == '0.0.0.0' or srcip == '::': return None
if srcip == '255.255.255.255': return None
if host.ipaddr:
if host.ipaddr.is_global:
opt = options.pcap_internet
if opt == 'n':
return None
elif opt == 's':
return "internet"
if host.ipaddr.is_multicast:
# include in multicast (minus dns?)
return 'multicast'
names = [host.hostname, host.ipaddr, macmanuf(host.macaddr)]
return '\\n'.join(str(x) for x in names if x)
def FlagGetter(flagfield):
def flags_func(fl):
return ' '.join([flagname for f, flagname in _flags[flagfield].items() if fl & f])
return flags_func
def init_pcap():
if protocols: # already init'ed
return
global dpkt, dnslib
import dpkt
import dnslib
load_consts(protocols['ethernet'], dpkt.ethernet, 'ETH_TYPE_')
load_consts(protocols['ip'], dpkt.ip, 'IP_PROTO_')
load_consts(_flags['ip_tos'], dpkt.ip, 'IP_TOS_')
load_consts(protocols['icmp'], dpkt.icmp, 'ICMP_')
load_consts(_flags['tcp'], dpkt.tcp, 'TH_')
load_oui(url_oui)
load_iana(url_iana)
def read_pcap(f):
try:
return dpkt.pcapng.Reader(f.open_bytes())
except ValueError:
return dpkt.pcap.Reader(f.open_bytes())
@asyncthread
def load_oui(url):
vsoui = TsvSheet('vsoui', source=vd.urlcache(url, days=30))
vsoui.reload.__wrapped__(vsoui)
for r in vsoui.rows:
if r.prefix.endswith('/36'): prefix = r.prefix[:13]
elif r.prefix.endswith('/28'): prefix = r.prefix[:10]
else: prefix = r.prefix[:8]
try:
oui[prefix.lower()] = r.shortname
except Exception as e:
vd.exceptionCaught(e)
@asyncthread
def load_iana(url):
ports_tsv = TsvSheet('ports_tsv', source=vd.urlcache(url, days=30))
ports_tsv.reload.__wrapped__(ports_tsv)
for r in ports_tsv.rows:
try:
services[(r.transport, int(r.port))] = r.service
except Exception as e:
vd.exceptionCaught(e)
class Host:
dns = {} # [ipstr] -> dnsname
hosts = {} # [macaddr] -> { [ipaddr] -> Host }
@classmethod
def get_host(cls, pkt, field='src'):
mac = macaddr(getattr(pkt, field))
machosts = cls.hosts.get(mac, None)
if not machosts:
machosts = cls.hosts[mac] = {}
ipraw = getattrdeep(pkt, 'ip.'+field, None)
if ipraw is not None:
ip = ipaddress.ip_address(ipraw)
if ip not in machosts:
machosts[ip] = Host(mac, ip)
return machosts[ip]
else:
if machosts:
return list(machosts.values())[0]
return Host(mac, None)
@classmethod
def get_by_ip(cls, ip):
'Returns Host instance for the given ip address.'
ret = cls.hosts_by_ip.get(ip)
if ret is None:
ret = cls.hosts_by_ip[ip] = [Host(ip)]
return ret
def __init__(self, mac, ip):
self.ipaddr = ip
self.macaddr = mac
self.mac_manuf = None
def __str__(self):
return str(self.hostname or self.ipaddr or macmanuf(self.macaddr))
def __lt__(self, x):
if isinstance(x, Host):
return str(self.ipaddr) < str(x.ipaddr)
return True
@property
def hostname(self):
return Host.dns.get(str(self.ipaddr))
def load_consts(outdict, module, attrprefix):
for k in dir(module):
if k.startswith(attrprefix):
v = getattr(module, k)
outdict[v] = k[len(attrprefix):]
def getTuple(pkt):
if getattrdeep(pkt, 'ip.tcp', None):
tup = ('tcp', Host.get_host(pkt, 'src'), pkt.ip.tcp.sport, Host.get_host(pkt, 'dst'), pkt.ip.tcp.dport)
elif getattrdeep(pkt, 'ip.udp', None):
tup = ('udp', Host.get_host(pkt, 'src'), pkt.ip.udp.sport, Host.get_host(pkt, 'dst'), pkt.ip.udp.dport)
else:
return None
a,b,c,d,e = tup
if b > d:
return a,d,e,b,c # swap src/sport and dst/dport
else:
return tup
def getService(tup):
if not tup: return
transport, _, sport, _, dport = tup
if (transport, dport) in services:
return services.get((transport, dport))
if (transport, sport) in services:
return services.get((transport, sport))
def get_transport(pkt):
ret = 'ether'
if getattr(pkt, 'arp', None):
return 'arp'
if getattr(pkt, 'ip', None):
ret = 'ip'
if getattr(pkt.ip, 'tcp', None):
ret = 'tcp'
elif getattr(pkt.ip, 'udp', None):
ret = 'udp'
elif getattr(pkt.ip, 'icmp', None):
ret = 'icmp'
if getattr(pkt, 'ip6', None):
ret = 'ipv6'
if getattr(pkt.ip6, 'tcp', None):
ret = 'tcp'
elif getattr(pkt.ip6, 'udp', None):
ret = 'udp'
elif getattr(pkt.ip6, 'icmp6', None):
ret = 'icmpv6'
return ret
def get_port(pkt, field='sport'):
return getattrdeep(pkt, 'ip.tcp.'+field, None) or getattrdeep(pkt, 'ip.udp.'+field, None)
class EtherSheet(Sheet):
'Layer 2 (ethernet) packets'
rowtype = 'packets'
columns = [
ColumnAttr('timestamp', type=date, fmtstr="%H:%M:%S.%f"),
Column('ether_manuf', type=str, getter=lambda col,row: mac_manuf(macaddr(row.src))),
Column('ether_src', type=str, getter=lambda col,row: macaddr(row.src), width=6),
Column('ether_dst', type=str, getter=lambda col,row: macaddr(row.dst), width=6),
ColumnAttr('ether_data', 'data', type=vlen, width=0),
]
@VisiData.api
class IPSheet(Sheet):
rowtype = 'packets'
columns = [
ColumnAttr('timestamp', type=date, fmtstr="%H:%M:%S.%f"),
ColumnAttr('ip', type=str, width=0),
Column('ip_src', type=str, width=14, getter=lambda col,row: ipaddress.ip_address(row.ip.src)),
Column('ip_dst', type=str, width=14, getter=lambda col,row: ipaddress.ip_address(row.ip.dst)),
ColumnAttr('ip_hdrlen', 'ip.hl', type=int, width=0, helpstr="IPv4 Header Length"),
ColumnAttr('ip_proto', 'ip.p', type=lambda v: protocols['ip'].get(v), width=8, helpstr="IPv4 Protocol"),
ColumnAttr('ip_id', 'ip.id', type=int, width=10, helpstr="IPv4 Identification"),
ColumnAttr('ip_rf', 'ip.rf', type=int, width=10, helpstr="IPv4 Reserved Flag (Evil Bit)"),
ColumnAttr('ip_df', 'ip.df', type=int, width=10, helpstr="IPv4 Don't Fragment flag"),
ColumnAttr('ip_mf', 'ip.mf', type=int, width=10, helpstr="IPv4 More Fragments flag"),
ColumnAttr('ip_tos', 'ip.tos', width=10, type=FlagGetter('ip_tos'), helpstr="IPv4 Type of Service"),
ColumnAttr('ip_ttl', 'ip.ttl', type=int, width=10, helpstr="IPv4 Time To Live"),
ColumnAttr('ip_ver', 'ip.v', type=int, width=10, helpstr="IPv4 Version"),
]
def iterload(self):
for pkt in Progress(self.source.rows):
if getattr(pkt, 'ip', None):
yield pkt
@VisiData.api
class TCPSheet(IPSheet):
columns = IPSheet.columns + [
ColumnAttr('tcp_srcport', 'ip.tcp.sport', type=int, width=8, helpstr="TCP Source Port"),
ColumnAttr('tcp_dstport', 'ip.tcp.dport', type=int, width=8, helpstr="TCP Dest Port"),
ColumnAttr('tcp_opts', 'ip.tcp.opts', width=0),
ColumnAttr('tcp_flags', 'ip.tcp.flags', type=FlagGetter('tcp'), helpstr="TCP Flags"),
]
def iterload(self):
for pkt in Progress(self.source.rows):
if getattrdeep(pkt, 'ip.tcp', None):
yield pkt
class UDPSheet(IPSheet):
columns = IPSheet.columns + [
ColumnAttr('udp_srcport', 'ip.udp.sport', type=int, width=8, helpstr="UDP Source Port"),
ColumnAttr('udp_dstport', 'ip.udp.dport', type=int, width=8, helpstr="UDP Dest Port"),
ColumnAttr('ip.udp.data', type=vlen, width=0),
ColumnAttr('ip.udp.ulen', type=int, width=0),
]
def iterload(self):
for pkt in Progress(self.source.rows):
if getattrdeep(pkt, 'ip.udp', None):
yield pkt
class PcapSheet(Sheet):
rowtype = 'packets'
columns = [
ColumnAttr('timestamp', type=date, fmtstr="%H:%M:%S.%f"),
Column('transport', type=get_transport, width=5),
Column('srcmanuf', type=str, getter=lambda col,row: manuf(macaddr(row.src))),
Column('srchost', type=str, getter=lambda col,row: row.srchost),
Column('srcport', type=int, getter=lambda col,row: get_port(row, 'sport')),
Column('dstmanuf', type=str, getter=lambda col,row: manuf(macaddr(row.dst))),
Column('dsthost', type=str, getter=lambda col,row: row.dsthost),
Column('dstport', type=int, getter=lambda col,row: get_port(row, 'dport')),
ColumnAttr('ether_proto', 'type', type=lambda v: protocols['ethernet'].get(v), width=0),
ColumnAttr('tcp_flags', 'ip.tcp.flags', type=FlagGetter('tcp'), helpstr="TCP Flags"),
Column('service', type=str, getter=lambda col,row: getService(getTuple(row))),
ColumnAttr('data', type=vlen),
ColumnAttr('ip_len', 'ip.len', type=int),
ColumnAttr('tcp', 'ip.tcp', width=4, type=vlen),
ColumnAttr('udp', 'ip.udp', width=4, type=vlen),
ColumnAttr('icmp', 'ip.icmp', width=4, type=vlen),
ColumnAttr('dns', type=str, width=4),
]
def iterload(self):
init_pcap()
self.pcap = read_pcap(self.source)
self.rows = []
with Progress(total=filesize(self.source)) as prog:
for ts, buf in self.pcap:
eth = dpkt.ethernet.Ethernet(buf)
yield eth
prog.addProgress(len(buf))
eth.timestamp = ts
if not getattr(eth, 'ip', None):
eth.ip = getattr(eth, 'ip6', None)
eth.dns = try_apply(lambda eth: dnslib.DNSRecord.parse(eth.ip.udp.data), eth)
if eth.dns:
for rr in eth.dns.rr:
Host.dns[str(rr.rdata)] = str(rr.rname)
eth.srchost = Host.get_host(eth, 'src')
eth.dsthost = Host.get_host(eth, 'dst')
flowtype = collections.namedtuple('flow', 'packets transport src sport dst dport'.split())
@VisiData.api
class PcapFlowsSheet(Sheet):
rowtype = 'netflows' # rowdef: flowtype
_rowtype = flowtype
columns = [
ColumnAttr('transport', type=str),
Column('src', type=str, getter=lambda col,row: row.src),
ColumnAttr('sport', type=int),
Column('dst', type=str, getter=lambda col,row: row.dst),
ColumnAttr('dport', type=int),
Column('service', type=str, width=8, getter=lambda col,row: getService(getTuple(row.packets[0]))),
ColumnAttr('packets', type=vlen),
Column('connect_latency_ms', type=float, getter=lambda col,row: col.sheet.latency[getTuple(row.packets[0])]),
]
def iterload(self):
self.flows = {}
self.latency = {} # [flowtuple] -> float ms of latency
self.syntimes = {} # [flowtuple] -> timestamp of SYN
flags = FlagGetter('tcp')
for pkt in Progress(self.source.rows):
tup = getTuple(pkt)
if tup:
flowpkts = self.flows.get(tup)
if flowpkts is None:
flowpkts = self.flows[tup] = []
yield flowtype(flowpkts, *tup)
flowpkts.append(pkt)
if not getattr(pkt.ip, 'tcp', None):
continue
tcpfl = flags(pkt.ip.tcp.flags)
if 'SYN' in tcpfl:
if 'ACK' in tcpfl:
if tup in self.syntimes:
self.latency[tup] = (pkt.timestamp - self.syntimes[tup])*1000
else:
self.syntimes[tup] = pkt.timestamp
def openRow(self, row):
return PcapSheet("%s_packets"%flowname(row), rows=row.packets)
def flowname(flow):
return '%s_%s:%s-%s:%s' % (flow.transport, flow.src, flow.sport, flow.dst, flow.dport)
def try_apply(func, *args, **kwargs):
try:
return func(*args, **kwargs)
except Exception as e:
pass
PcapSheet.addCommand('W', 'flows', 'vd.push(PcapFlowsSheet(sheet.name+"_flows", source=sheet))')
PcapSheet.addCommand('2', 'l2-packet', 'vd.push(IPSheet("L2packets", source=sheet))')
PcapSheet.addCommand('3', 'l3-packet', 'vd.push(TCPSheet("L3packets", source=sheet))')
vd.addMenuItem('View', 'Packet capture', 'flows', 'flows')
vd.addMenuItem('View', 'Packet capture', 'IP (L2)', 'l2-packet')
vd.addMenuItem('View', 'Packet capture', 'TCP (L3)', 'l3-packet')
| saulpw/visidata | visidata/loaders/pcap.py | Python | gpl-3.0 | 14,150 |
AR = '/usr/bin/ar'
ARFLAGS = 'rcs'
CCFLAGS = ['-g']
CCFLAGS_MACBUNDLE = ['-fPIC']
CCFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CC_VERSION = ('4', '6', '1')
COMPILER_CXX = 'g++'
CPP = '/usr/bin/cpp'
CPPFLAGS_NODE = ['-D_GNU_SOURCE']
CPPPATH_NODE = '/usr/local/include/node'
CPPPATH_ST = '-I%s'
CXX = ['/usr/bin/g++']
CXXDEFINES_ST = '-D%s'
CXXFLAGS = ['-g', '-Wall', '-O3']
CXXFLAGS_DEBUG = ['-g']
CXXFLAGS_NODE = ['-D_LARGEFILE_SOURCE', '-D_FILE_OFFSET_BITS=64']
CXXFLAGS_RELEASE = ['-O2']
CXXLNK_SRC_F = ''
CXXLNK_TGT_F = ['-o', '']
CXX_NAME = 'gcc'
CXX_SRC_F = ''
CXX_TGT_F = ['-c', '-o', '']
DEST_BINFMT = 'elf'
DEST_CPU = 'x86_64'
DEST_OS = 'linux'
FULLSTATIC_MARKER = '-static'
LIBDIR = '/home/robd/.node_libraries'
LIBPATH_HIREDIS = '../deps/hiredis'
LIBPATH_NODE = '/usr/local/lib'
LIBPATH_ST = '-L%s'
LIB_HIREDIS = 'hiredis'
LIB_ST = '-l%s'
LINKFLAGS_MACBUNDLE = ['-bundle', '-undefined', 'dynamic_lookup']
LINK_CXX = ['/usr/bin/g++']
NODE_PATH = '/home/robd/.node_libraries'
PREFIX = '/usr/local'
PREFIX_NODE = '/usr/local'
RANLIB = '/usr/bin/ranlib'
RPATH_ST = '-Wl,-rpath,%s'
SHLIB_MARKER = '-Wl,-Bdynamic'
SONAME_ST = '-Wl,-h,%s'
STATICLIBPATH_ST = '-L%s'
STATICLIB_MARKER = '-Wl,-Bstatic'
STATICLIB_ST = '-l%s'
macbundle_PATTERN = '%s.bundle'
program_PATTERN = '%s'
shlib_CXXFLAGS = ['-fPIC', '-DPIC']
shlib_LINKFLAGS = ['-shared']
shlib_PATTERN = 'lib%s.so'
staticlib_LINKFLAGS = ['-Wl,-Bstatic']
staticlib_PATTERN = 'lib%s.a'
| tparisi/Skybox | service/node_modules/faye/node_modules/hiredis/build/c4che/Release.cache.py | Python | mit | 1,461 |
# -*- coding: utf-8 -*-
# Copyright(C) 2014 Romain Bignon
#
# This file is part of weboob.
#
# weboob is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# weboob is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with weboob. If not, see <http://www.gnu.org/licenses/>.
import datetime
from dateutil.relativedelta import relativedelta
from decimal import Decimal
from weboob.deprecated.browser import Page
from weboob.deprecated.browser.parsers.csvparser import CsvParser
from weboob.capabilities.bank import Account, AccountNotFound
from .accounthistory import Transaction, AccountHistory
class RedirectPage(Page):
pass
class HistoryParser(CsvParser):
FMTPARAMS = {'delimiter': ';'}
class ProAccountsList(Page):
ACCOUNT_TYPES = {u'Comptes épargne': Account.TYPE_SAVINGS,
u'Comptes courants': Account.TYPE_CHECKING,
}
def get_accounts_list(self):
for table in self.document.xpath('//div[@class="comptestabl"]/table'):
try:
account_type = self.ACCOUNT_TYPES[table.xpath('./caption/text()')[0].strip()]
except (IndexError,KeyError):
account_type = Account.TYPE_UNKNOWN
for tr in table.xpath('./tbody/tr'):
cols = tr.findall('td')
link = cols[0].find('a')
if link is None:
continue
a = Account()
a.type = account_type
a.id, a.label = map(unicode, link.attrib['title'].split(' ', 1))
tmp_balance = self.parser.tocleanstring(cols[1])
a.currency = a.get_currency(tmp_balance)
a.balance = Decimal(Transaction.clean_amount(tmp_balance))
a._card_links = []
a._link_id = link.attrib['href']
yield a
def get_account(self, id):
for account in self.get_accounts_list():
if account.id == id:
return account
raise AccountNotFound('Unable to find account: %s' % id)
class ProAccountHistory(Page):
def on_loaded(self):
link = self.document.xpath('//a[contains(@href, "telechargercomptes.ea")]/@href')[0]
self.browser.location(link)
class ProAccountHistoryDownload(Page):
def on_loaded(self):
self.browser.select_form(name='telechargement')
self.browser['dateDebutPeriode'] = (datetime.date.today() - relativedelta(months=11)).strftime('%d/%m/%Y')
self.browser.submit()
class ProAccountHistoryCSV(AccountHistory):
def get_next_link(self):
return False
def get_history(self, deferred=False):
for line in self.document.rows:
if len(line) < 4 or line[0] == 'Date':
continue
t = Transaction()
t.parse(raw=line[1], date=line[0])
t.set_amount(line[2])
t._coming = False
yield t
| sputnick-dev/weboob | modules/bp/pages/pro.py | Python | agpl-3.0 | 3,390 |
#!/usr/bin/env python
testdat = '../data/fm_train_real.dat'
traindat = '../data/fm_test_real.dat'
parameter_list=[[traindat,testdat,1.7],[traindat,testdat,1.8]]
def kernel_distance (train_fname=traindat,test_fname=testdat,width=1.7):
from shogun import RealFeatures, DistanceKernel, EuclideanDistance, CSVFile
feats_train=RealFeatures(CSVFile(train_fname))
feats_test=RealFeatures(CSVFile(test_fname))
distance=EuclideanDistance()
kernel=DistanceKernel(feats_train, feats_test, width, distance)
km_train=kernel.get_kernel_matrix()
kernel.init(feats_train, feats_test)
km_test=kernel.get_kernel_matrix()
return km_train,km_test,kernel
if __name__=='__main__':
print('Distance')
kernel_distance(*parameter_list[0])
| OXPHOS/shogun | examples/undocumented/python/kernel_distance.py | Python | gpl-3.0 | 730 |
"""training_feedback URL Configuration
The `urlpatterns` list routes URLs to views. For more information please see:
https://docs.djangoproject.com/en/2.0/topics/http/urls/
Examples:
Function views
1. Add an import: from my_app import views
2. Add a URL to urlpatterns: path('', views.home, name='home')
Class-based views
1. Add an import: from other_app.views import Home
2. Add a URL to urlpatterns: path('', Home.as_view(), name='home')
Including another URLconf
1. Import the include() function: from django.urls import include, path
2. Add a URL to urlpatterns: path('blog/', include('blog.urls'))
"""
from django.urls import include, path
from django.contrib import admin
urlpatterns = [
path('intview/', include('interview.urls')),
path('polls/', include('polls.urls')),
path('admin/', admin.site.urls),
]
| mayankjohri/LetsExplorePython | Section 2 - Advance Python/Chapter S2.06 - Web Development/code/django/training/training/urls.py | Python | gpl-3.0 | 860 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2011 OpenStack LLC.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import datetime
import urlparse
from nova.api.openstack import extensions
from nova.api.openstack import wsgi
from nova.api.openstack import xmlutil
from nova.compute import api
from nova import exception
from nova.openstack.common import timeutils
authorize_show = extensions.extension_authorizer('compute',
'simple_tenant_usage:show')
authorize_list = extensions.extension_authorizer('compute',
'simple_tenant_usage:list')
def make_usage(elem):
for subelem_tag in ('tenant_id', 'total_local_gb_usage',
'total_vcpus_usage', 'total_memory_mb_usage',
'total_hours', 'start', 'stop'):
subelem = xmlutil.SubTemplateElement(elem, subelem_tag)
subelem.text = subelem_tag
server_usages = xmlutil.SubTemplateElement(elem, 'server_usages')
server_usage = xmlutil.SubTemplateElement(server_usages, 'server_usage',
selector='server_usages')
for subelem_tag in ('instance_id', 'name', 'hours', 'memory_mb',
'local_gb', 'vcpus', 'tenant_id', 'flavor',
'started_at', 'ended_at', 'state', 'uptime'):
subelem = xmlutil.SubTemplateElement(server_usage, subelem_tag)
subelem.text = subelem_tag
class SimpleTenantUsageTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('tenant_usage', selector='tenant_usage')
make_usage(root)
return xmlutil.MasterTemplate(root, 1)
class SimpleTenantUsagesTemplate(xmlutil.TemplateBuilder):
def construct(self):
root = xmlutil.TemplateElement('tenant_usages')
elem = xmlutil.SubTemplateElement(root, 'tenant_usage',
selector='tenant_usages')
make_usage(elem)
return xmlutil.MasterTemplate(root, 1)
class SimpleTenantUsageController(object):
def _hours_for(self, instance, period_start, period_stop):
launched_at = instance['launched_at']
terminated_at = instance['terminated_at']
if terminated_at is not None:
if not isinstance(terminated_at, datetime.datetime):
terminated_at = timeutils.parse_strtime(terminated_at,
"%Y-%m-%d %H:%M:%S.%f")
if launched_at is not None:
if not isinstance(launched_at, datetime.datetime):
launched_at = timeutils.parse_strtime(launched_at,
"%Y-%m-%d %H:%M:%S.%f")
if terminated_at and terminated_at < period_start:
return 0
# nothing if it started after the usage report ended
if launched_at and launched_at > period_stop:
return 0
if launched_at:
# if instance launched after period_started, don't charge for first
start = max(launched_at, period_start)
if terminated_at:
# if instance stopped before period_stop, don't charge after
stop = min(period_stop, terminated_at)
else:
# instance is still running, so charge them up to current time
stop = period_stop
dt = stop - start
seconds = (dt.days * 3600 * 24 + dt.seconds +
dt.microseconds / 100000.0)
return seconds / 3600.0
else:
# instance hasn't launched, so no charge
return 0
def _tenant_usages_for_period(self, context, period_start,
period_stop, tenant_id=None, detailed=True):
compute_api = api.API()
instances = compute_api.get_active_by_window(context,
period_start,
period_stop,
tenant_id)
rval = {}
flavors = {}
for instance in instances:
info = {}
info['hours'] = self._hours_for(instance,
period_start,
period_stop)
flavor_type = instance['instance_type_id']
if not flavors.get(flavor_type):
try:
it_ref = compute_api.get_instance_type(context,
flavor_type)
flavors[flavor_type] = it_ref
except exception.InstanceTypeNotFound:
# can't bill if there is no instance type
continue
flavor = flavors[flavor_type]
info['instance_id'] = instance['uuid']
info['name'] = instance['display_name']
info['memory_mb'] = flavor['memory_mb']
info['local_gb'] = flavor['root_gb'] + flavor['ephemeral_gb']
info['vcpus'] = flavor['vcpus']
info['tenant_id'] = instance['project_id']
info['flavor'] = flavor['name']
info['started_at'] = instance['launched_at']
info['ended_at'] = instance['terminated_at']
if info['ended_at']:
info['state'] = 'terminated'
else:
info['state'] = instance['vm_state']
now = timeutils.utcnow()
if info['state'] == 'terminated':
delta = info['ended_at'] - info['started_at']
else:
delta = now - info['started_at']
info['uptime'] = delta.days * 24 * 3600 + delta.seconds
if not info['tenant_id'] in rval:
summary = {}
summary['tenant_id'] = info['tenant_id']
if detailed:
summary['server_usages'] = []
summary['total_local_gb_usage'] = 0
summary['total_vcpus_usage'] = 0
summary['total_memory_mb_usage'] = 0
summary['total_hours'] = 0
summary['start'] = period_start
summary['stop'] = period_stop
rval[info['tenant_id']] = summary
summary = rval[info['tenant_id']]
summary['total_local_gb_usage'] += info['local_gb'] * info['hours']
summary['total_vcpus_usage'] += info['vcpus'] * info['hours']
summary['total_memory_mb_usage'] += (info['memory_mb'] *
info['hours'])
summary['total_hours'] += info['hours']
if detailed:
summary['server_usages'].append(info)
return rval.values()
def _parse_datetime(self, dtstr):
if not dtstr:
return timeutils.utcnow()
elif isinstance(dtstr, datetime.datetime):
return dtstr
try:
return timeutils.parse_strtime(dtstr, "%Y-%m-%dT%H:%M:%S")
except Exception:
try:
return timeutils.parse_strtime(dtstr, "%Y-%m-%dT%H:%M:%S.%f")
except Exception:
return timeutils.parse_strtime(dtstr, "%Y-%m-%d %H:%M:%S.%f")
def _get_datetime_range(self, req):
qs = req.environ.get('QUERY_STRING', '')
env = urlparse.parse_qs(qs)
# NOTE(lzyeval): env.get() always returns a list
period_start = self._parse_datetime(env.get('start', [None])[0])
period_stop = self._parse_datetime(env.get('end', [None])[0])
detailed = env.get('detailed', ['0'])[0] == '1'
return (period_start, period_stop, detailed)
@wsgi.serializers(xml=SimpleTenantUsagesTemplate)
def index(self, req):
"""Retrieve tenant_usage for all tenants."""
context = req.environ['nova.context']
authorize_list(context)
(period_start, period_stop, detailed) = self._get_datetime_range(req)
now = timeutils.utcnow()
if period_stop > now:
period_stop = now
usages = self._tenant_usages_for_period(context,
period_start,
period_stop,
detailed=detailed)
return {'tenant_usages': usages}
@wsgi.serializers(xml=SimpleTenantUsageTemplate)
def show(self, req, id):
"""Retrieve tenant_usage for a specified tenant."""
tenant_id = id
context = req.environ['nova.context']
authorize_show(context, {'project_id': tenant_id})
(period_start, period_stop, ignore) = self._get_datetime_range(req)
now = timeutils.utcnow()
if period_stop > now:
period_stop = now
usage = self._tenant_usages_for_period(context,
period_start,
period_stop,
tenant_id=tenant_id,
detailed=True)
if len(usage):
usage = usage[0]
else:
usage = {}
return {'tenant_usage': usage}
class Simple_tenant_usage(extensions.ExtensionDescriptor):
"""Simple tenant usage extension."""
name = "SimpleTenantUsage"
alias = "os-simple-tenant-usage"
namespace = ("http://docs.openstack.org/compute/ext/"
"os-simple-tenant-usage/api/v1.1")
updated = "2011-08-19T00:00:00+00:00"
def get_resources(self):
resources = []
res = extensions.ResourceExtension('os-simple-tenant-usage',
SimpleTenantUsageController())
resources.append(res)
return resources
| maoy/zknova | nova/api/openstack/compute/contrib/simple_tenant_usage.py | Python | apache-2.0 | 10,487 |
from . import repo
from . import issues
from . import comments
from . import hooks
from . import wiki
| alphagov/ghtools | ghtools/migrators/__init__.py | Python | mit | 102 |
import os
import subprocess
import pandas as pd
import numpy as np
from pathlib import Path
from subprocess import call, STDOUT
from .. import utils
from .. import printer
def extractLineFromROutput(output, key):
i = output.rfind(key)
return (output[i + len(key) + 1:].split("\n")[0]).split(" ")[1].strip()
def extractLinesFromROutput(output, key, numLines):
i = output.rfind(key)
return "\n".join(output[i + len(key) + 1:].split("\n")[0:numLines+1]).strip()
def pandasTableFromROutput(output, key, numLines, tmpCsv="tmp.csv"):
output_extr = extractLinesFromROutput(output, key, numLines=numLines)
utils.save_to_file(tmpCsv, output_extr)
res = pd.read_csv(tmpCsv, header=0, index_col=0, delimiter=r"\s+")
# call(["rm", "-f", tmpCsv])
return res
class FriedmanResult:
"""Stores results of the Friedman test."""
def __init__(self, output, p_value, ranks, cmp_matrix=None, cmp_method="", binary_cmp_matrix=False):
"""
:param output: raw output as returned by the R script.
:param p_value: p value returned by R script.
:param ranks: pandas DataFrame containing ranks for particular approaches.
:param cmp_matrix: pandas DataFrame approaches x approaches, where 1 means that approach is significantly
better, and -1 that it is significantly worse.
:param cmp_method: method of the post-hoc test.
"""
assert p_value is None or isinstance(p_value, float)
self.output = output
self.p_value = p_value
self.ranks = ranks
self.cmp_matrix = cmp_matrix
self.cmp_method = cmp_method
self.binary_cmp_matrix = binary_cmp_matrix
def getSignificantPairs(self, p_threshold=0.05):
"""Returns a list of tuples, where the first element is significantly better than the second."""
if self.cmp_matrix is None:
return []
else:
res = []
for i in range(self.cmp_matrix.shape[0]):
for j in range(self.cmp_matrix.shape[1]):
if (self.binary_cmp_matrix and self.cmp_matrix.iat[i,j] == 1) or\
(not self.binary_cmp_matrix and self.cmp_matrix.iat[i,j] <= p_threshold):
L = self.cmp_matrix.index.values[i]
R = self.cmp_matrix.columns.values[j]
res.append((L, R))
res.sort(key=lambda t: t[0])
return res
def getSignificantPairsText(self):
"""Returns a formatted text for significant pairs."""
return "\n".join(["{0}\t>\t{1}".format(L, R) for L, R in self.getSignificantPairs()])
def __str__(self):
return self.output
def runFriedmanPython(table):
"""Runs a Friedman statistical test with Nemenyi posthoc test using implementation in scikit_posthocs package."""
assert isinstance(table, printer.Table)
try:
data = np.array(table.content.cells, dtype=np.float32)
return runFriedmanPython_array(data)
except ValueError:
return None
def runFriedmanPython_array(data):
import scipy.stats as ss
import scikit_posthocs as sp
p_statistic, p_value = ss.friedmanchisquare(*data.T)
# https://scikit-posthocs.readthedocs.io/en/latest/generated/scikit_posthocs.posthoc_nemenyi_friedman/#id2
# P. Nemenyi (1963) Distribution-free Multiple Comparisons. Ph.D. thesis, Princeton University.
pc = sp.posthoc_nemenyi_friedman(data)
return FriedmanResult("", p_value, None, cmp_matrix=pc, binary_cmp_matrix=False, cmp_method="nemenyi")
def runFriedmanKK(table):
"""Runs a Friedman statistical test using a mysterious script provided to me by KK.
Input is a Table."""
assert isinstance(table, printer.Table)
return runFriedmanKK_csv(table.renderCsv(delim=";"))
def runFriedmanKK_csv(text):
"""Runs a Friedman statistical test using a mysterious script provided to me by KK.
Input is a CSV-formatted text."""
csvFile = "tmp.csv"
thisScriptPath = Path(os.path.abspath(__file__))
print("thisScriptPath.parent", thisScriptPath.parent)
cwd = os.getcwd()
os.chdir(str(thisScriptPath.parent))
utils.save_to_file(csvFile, text)
# Running command
pathFriedmanScript = "friedman_kk.r"
# pathFriedmanScript = "friedman_penn.r"
try:
output = subprocess.check_output(["Rscript", pathFriedmanScript, csvFile, "FALSE"], stderr=STDOUT,
universal_newlines=True)
output = output[output.rfind("$p.value"):]
print(output)
print('\n\n')
p_value = float(extractLineFromROutput(output, "$p.value"))
cmp_method = extractLineFromROutput(output, "$cmp.method").replace("\"", "")
# print("p_value: '{0}'".format(p_value))
# print("cmp_method: '{0}'".format(cmp_method))
ranks = pandasTableFromROutput(output, "$ranks", numLines=2, tmpCsv="tmpRanks.csv")
# print("ranks:", ranks)
i = output.rfind("$cmp.matrix")
if i == -1:
cmp_matrix = None
else:
cmp_matrix = pandasTableFromROutput(output, "$cmp.matrix", numLines=ranks.shape[1]+1, tmpCsv="tmpCmpMatrix.csv")
# print("cmp_matrix:", cmp_matrix)
friedmanResult = FriedmanResult(output, p_value, ranks, cmp_matrix, cmp_method=cmp_method, binary_cmp_matrix=True)
except subprocess.CalledProcessError as exc:
output = exc.output #.decode("utf-8")
output = output.replace("\\n", "\n")
print("Status: FAIL, return code: {0}, msg: {1}".format(exc.returncode, output))
friedmanResult = FriedmanResult(output, None, None, None)
call(["rm", "-f", csvFile])
os.chdir(cwd)
return friedmanResult
def runWilcoxonSignedRank(x, y):
"""Runs a Wilcoxon signed-rank test. x and y are the data vectors to be compared."""
import scipy.stats as ss
w, p_value = ss.wilcoxon(x, y)
return p_value | iwob/evoplotter | evoplotter/stats/nonparametric.py | Python | mit | 5,964 |
# Imports environment-specific settings.
import os
import sys
try:
from colorama import init as colorama_init
except ImportError:
def colorama_init(autoreset=False, convert=None, strip=None, wrap=True):
"""
Fallback function that initializes colorama.
"""
pass
try:
from termcolor import colored
except ImportError:
def colored(text, color=None, on_color=None, attrs=None):
"""
Fallback function to colorize text when termcolor is not installed.
"""
return text
# Use production settings by default as it is the secure setup. To use local
# settings: $ export PRODUCTION=0
production = 'PRODUCTION' not in os.environ or os.environ['PRODUCTION'].lower() in [True, 'y', 'yes', '1',]
local = not production
platform = sys.platform
linux = platform == 'linux2'
os_x = platform == 'darwin'
win32 = platform == 'win32'
# Don't initialize colorama when on Windows and running the shell because the
# ipython colors get confused.
if not win32 or not 'shell' in sys.argv:
colorama_init()
current_settings = []
if production:
current_settings.append(colored('Production', 'green', attrs=['bold']))
from production_settings import *
if local:
current_settings.append(colored('Local', 'yellow', attrs=['bold']))
from local_settings import *
if linux:
current_settings.append(colored('Linux', 'blue', attrs=['bold']))
from linux_settings import *
if os_x:
current_settings.append(colored('OS X', 'blue', attrs=['bold']))
from os_x_settings import *
if win32:
current_settings.append(colored('Windows', 'blue', attrs=['bold']))
from win32_settings import *
if 'runserver' in sys.argv:
print '-' * 80
print ' :: '.join(current_settings)
print '-' * 80
color = '[1;93m' # Bold High Intensity Yellow + Underline
version = 'Development'
if production:
color = '[1;92m' # Bold High Intensity Green + Underline
version = 'Production'
print '\n{star} \x1b{color}{version}\x1b[0m {star}\n'.format(color=color,
star='\xE2\x98\x85',
version=version)
| django-settings/django-settings | myproject/myproject/user_settings.py | Python | unlicense | 2,239 |
# $Id: benchmark.py 2923 2006-11-19 08:05:45Z fredrik $
# simple elementtree benchmark program
from elementtree import ElementTree, XMLTreeBuilder
try:
import cElementTree
except ImportError:
try:
from xml.etree import cElementTree
except ImportError:
cElementTree = None
try:
from elementtree import SimpleXMLTreeBuilder # xmllib
except ImportError:
SimpleXMLTreeBuilder = None
try:
from elementtree import SgmlopXMLTreeBuilder # sgmlop
except ImportError:
SgmlopXMLTreeBuilder = None
try:
from xml.dom import minidom # pyexpat+minidom
except ImportError:
minidom = None
import sys, time
try:
file = sys.argv[1]
except IndexError:
file = "hamlet.xml"
def benchmark(file, builder_module):
source = open(file, "rb")
t0 = time.time()
parser = builder_module.TreeBuilder()
while 1:
data = source.read(32768)
if not data:
break
parser.feed(data)
tree = parser.close()
t1 = time.time()
print "%s: %d nodes read in %.3f seconds" % (
builder_module.__name__, len(tree.getiterator()), t1-t0
)
raw_input("press return to continue...")
del tree
def benchmark_parse(file, driver):
t0 = time.time()
tree = driver.parse(file)
t1 = time.time()
print driver.__name__ + ".parse done in %.3f seconds" % (t1-t0)
raw_input("press return to continue...")
del tree
def benchmark_minidom(file):
t0 = time.time()
dom = minidom.parse(file)
t1 = time.time()
print "minidom tree read in %.3f seconds" % (t1-t0)
raw_input("press return to continue...")
del dom
benchmark_parse(file, ElementTree)
if cElementTree:
benchmark_parse(file, cElementTree)
if sys.platform != "cli":
benchmark(file, XMLTreeBuilder)
benchmark(file, SimpleXMLTreeBuilder) # use xmllib
try:
benchmark(file, SgmlopXMLTreeBuilder) # use sgmlop
except RuntimeError, v:
print "=== SgmlopXMLTreeBuilder not available (%s)" % v
if minidom:
benchmark_minidom(file)
else:
print "=== minidom not available"
| prats226/python-amazon-product-api-0.2.8 | tests/build/elementtree/benchmark.py | Python | bsd-3-clause | 2,170 |
# Copyright 2022 UW-IT, University of Washington
# SPDX-License-Identifier: Apache-2.0
"""
This module provides affiliations of the current user
"""
import logging
import traceback
from myuw.dao import log_err
from myuw.dao.exceptions import IndeterminateCampusException
from myuw.dao.enrollment import (
get_main_campus, get_cur_class_level, get_latest_class_level)
from myuw.dao.gws import (
is_clinician, is_staff_employee, is_student_employee,
is_alum_asso, is_student, is_grad_student, is_undergrad_student,
is_pce_student, is_seattle_student, is_bothell_student, is_tacoma_student,
is_applicant, is_grad_c2, is_undergrad_c2, in_hxtoolkit_group)
from myuw.dao.instructor import is_instructor
from myuw.dao.pws import (
get_employee_campus, is_employee, is_faculty, is_prior_employee,
is_prior_student, is_retiree, is_alumni)
from myuw.dao.uwnetid import is_2fa_permitted
from myuw.dao.student_profile import get_profile_of_current_user
logger = logging.getLogger(__name__)
def get_all_affiliations(request):
"""
return a dictionary of affiliation indicators.
The first class affiliations:
["all_employee"]: employee or clinician (include student employee)
["employee"]: True if is current employee (not student employee, clinician)
["clinician"]: True if in uw affiliation clinical groups
["faculty"]: True if the user is currently faculty.
["instructor"]: True if is instructor in the past 6 years
["staff_employee"]: True if the user is currently staff.
["student"]: True if the user is currently an UW student.
["stud_employee"]: True if the user is currently a student employee.
["grad"]: True if the user is currently an UW graduate student.
["undergrad"]: True if the user is currently an UW undergraduate student.
["applicant"]: True if the user is currently a UW applicant
["pce"]: True if the user is an UW PCE student.
["grad_c2"]: True if the user takes UW PCE grad courses
["undergrad_c2"]: True if the user takes UW PCE undergrad courses
["seattle"]: True if the user is an UW Seattle student
["bothell"]: True if the user is an UW Bothell student
["tacoma"]: True if the user is an UW Tacoma student
["official_seattle"]: True if the user is Seattle employee
["official_bothell"]: True if the user is Bothell employee
["official_tacoma"]: True if the user is Tacoma employee
["official_pce"]: waiting on sws to add a field in Enrollment.
["class_level"]: class level in current term enrollment.
["latest_class_level"]: the class level in the latest enrollment.
["F1"]: F1 international student
["J1"]: J1 international student
["intl_stud"]: F1 or J1 international student
["hxt_viewer"]: Husky Experience Toolkit viewer
["no_1st_class_affi"]: not applicant, current employee,
clinician, student, instructor
The following are secondary affiliations (without 1st_class_aff):
["alumni"]: True if the user is currently an UW alumni and NOT
current student, employee, applicant
["alum_asso"]: alumni association member
["retiree"]: True if the user is a retired staff and NOT
current applicant, student, employee
["past_employee"]: True if the user is a former employee and NOT
current student, applicant
["past_stud"]: True if the user is a former student and NOT
current employee, applicant
"""
if hasattr(request, 'myuw_user_affiliations'):
return request.myuw_user_affiliations
not_major_affi = (not is_applicant(request) and
not is_employee(request) and
not is_clinician(request) and
not is_instructor(request) and
not is_student(request))
(is_sea_stud, is_undergrad, is_hxt_viewer) = get_is_hxt_viewer(request)
data = {"class_level": None,
"latest_class_level": get_latest_class_level(request),
"grad": is_grad_student(request),
"undergrad": is_undergrad,
"applicant": is_applicant(request),
"student": is_student(request),
"pce": is_pce_student(request),
"grad_c2": is_grad_c2(request),
"undergrad_c2": is_undergrad_c2(request),
"F1": False,
"J1": False,
"intl_stud": False,
"2fa_permitted": is_2fa_permitted(request),
"all_employee": is_employee(request) or is_clinician(request),
"clinician": is_clinician(request),
"employee": (is_employee(request) and
not is_student_employee(request)),
"faculty": is_faculty(request),
"instructor": is_instructor(request),
"staff_employee": is_staff_employee(request),
"stud_employee": is_student_employee(request),
"seattle": is_sea_stud,
"bothell": is_bothell_student(request),
"tacoma": is_tacoma_student(request),
"official_seattle": False,
"official_bothell": False,
"official_tacoma": False,
"hxt_viewer": is_hxt_viewer,
"alum_asso": is_alum_asso(request),
"alumni": is_alumni(request) and not_major_affi,
"retiree": is_retiree(request) and not_major_affi,
"past_employee": is_prior_employee(request) and not_major_affi,
"past_stud": is_prior_student(request) and not_major_affi,
"no_1st_class_affi": not_major_affi,
}
campuses = []
if data["student"]:
data["class_level"] = get_cur_class_level(request)
try:
sws_person = get_profile_of_current_user(request)
data["F1"] = sws_person.is_F1()
data["J1"] = sws_person.is_J1()
data["intl_stud"] = data["F1"] or data["J1"]
except Exception:
log_err(logger, "get_profile_of_current_user", traceback, request)
# enhance student campus with current and future enrollments
campuses = get_main_campus(request)
if len(campuses) > 0:
data["enrolled_stud"] = True
data['seattle'] = data['seattle'] or ('Seattle' in campuses)
data['bothell'] = data['bothell'] or ('Bothell' in campuses)
data['tacoma'] = data['tacoma'] or ('Tacoma' in campuses)
if data['seattle']:
data["hxt_viewer"] = (data["hxt_viewer"] or
data['seattle'] and data["undergrad"])
if is_employee(request):
# determine employee primary campus based on their mailstop
try:
employee_campus = get_employee_campus(request)
data['official_seattle'] = ('Seattle' == employee_campus)
data['official_bothell'] = ('Bothell' == employee_campus)
data['official_tacoma'] = ('Tacoma' == employee_campus)
except IndeterminateCampusException:
pass
request.myuw_user_affiliations = data
return data
def get_is_hxt_viewer(request):
is_sea_stud = is_seattle_student(request)
is_undergrad = is_undergrad_student(request)
# MUWM-4798
is_viewer = is_sea_stud and is_undergrad or in_hxtoolkit_group(request)
return (is_sea_stud, is_undergrad, is_viewer)
| uw-it-aca/myuw | myuw/dao/affiliation.py | Python | apache-2.0 | 7,365 |
# Python 3 Spelling Corrector
#
# Copyright 2014 Jonas McCallum.
# Updated for Python 3, based on Peter Norvig's
# 2007 version: http://norvig.com/spell-correct.html
#
# Open source, MIT license
# http://www.opensource.org/licenses/mit-license.php
"""
File reader, concat function and dict wrapper
Author: Jonas McCallum
https://github.com/foobarmus/autocorrect
"""
import re, os, tarfile
from contextlib import closing
from itertools import chain
PATH = os.path.abspath(os.path.dirname(__file__))
BZ2 = 'words.bz2'
RE = '[A-Za-z]+'
def words_from_archive(filename, include_dups=False, map_case=False):
"""extract words from a text file in the archive"""
bz2 = os.path.join(PATH, BZ2)
tar_path = '{}/{}'.format('words', filename)
with closing(tarfile.open(bz2, 'r:bz2')) as t:
with closing(t.extractfile(tar_path)) as f:
words = re.findall(RE, f.read().decode(encoding='utf-8'))
if include_dups:
return words
elif map_case:
return {w.lower():w for w in words}
else:
return set(words)
def concat(*args):
"""reversed('th'), 'e' => 'hte'"""
try:
return ''.join(args)
except TypeError:
return ''.join(chain.from_iterable(args))
class Zero(dict):
"""dict with a zero default"""
def __getitem__(self, key):
return self.get(key)
def get(self, key):
try:
return super(Zero, self).__getitem__(key)
except KeyError:
return 0
zero_default_dict = Zero
| phatpiglet/autocorrect | autocorrect/utils.py | Python | mit | 1,509 |
# vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 Nebula, Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
from __future__ import absolute_import
from django import template
from django.utils.datastructures import SortedDict # noqa
from django.utils.encoding import force_unicode # noqa
from django.utils.translation import ugettext_lazy as _ # noqa
from horizon.base import Horizon # noqa
from horizon import conf
register = template.Library()
@register.filter
def has_permissions(user, component):
"""
Checks if the given user meets the permissions requirements for
the component.
"""
return user.has_perms(getattr(component, 'permissions', set()))
@register.filter
def has_permissions_on_list(components, user):
return [component for component
in components if has_permissions(user, component)]
@register.inclusion_tag('horizon/_nav_list.html', takes_context=True)
def horizon_main_nav(context):
""" Generates top-level dashboard navigation entries. """
if 'request' not in context:
return {}
current_dashboard = context['request'].horizon.get('dashboard', None)
dashboards = []
for dash in Horizon.get_dashboards():
if callable(dash.nav) and dash.nav(context):
dashboards.append(dash)
elif dash.nav:
dashboards.append(dash)
return {'components': dashboards,
'user': context['request'].user,
'current': current_dashboard,
'request': context['request']}
@register.inclusion_tag('horizon/_subnav_list.html', takes_context=True)
def horizon_dashboard_nav(context):
""" Generates sub-navigation entries for the current dashboard. """
if 'request' not in context:
return {}
dashboard = context['request'].horizon['dashboard']
panel_groups = dashboard.get_panel_groups()
non_empty_groups = []
for group in panel_groups.values():
allowed_panels = []
for panel in group:
if callable(panel.nav) and panel.nav(context):
allowed_panels.append(panel)
elif not callable(panel.nav) and panel.nav:
allowed_panels.append(panel)
if allowed_panels:
non_empty_groups.append((group.name, allowed_panels))
return {'components': SortedDict(non_empty_groups),
'user': context['request'].user,
'current': context['request'].horizon['panel'].slug,
'request': context['request']}
@register.filter
def quota(val, units=None):
if val == float("inf"):
return _("No Limit")
elif units is not None:
return "%s %s %s" % (val, units, force_unicode(_("Available")))
else:
return "%s %s" % (val, force_unicode(_("Available")))
class JSTemplateNode(template.Node):
""" Helper node for the ``jstemplate`` template tag. """
def __init__(self, nodelist):
self.nodelist = nodelist
def render(self, context, ):
output = self.nodelist.render(context)
output = output.replace('[[[', '{{{').replace(']]]', '}}}')
output = output.replace('[[', '{{').replace(']]', '}}')
output = output.replace('[%', '{%').replace('%]', '%}')
return output
@register.tag
def jstemplate(parser, token):
"""
Replaces ``[[[`` and ``]]]`` with ``{{{`` and ``}}}``,
``[[`` and ``]]`` with ``{{`` and ``}}`` and
``[%`` and ``%]`` with ``{%`` and ``%}`` to avoid conflicts
with Django's template engine when using any of the Mustache-based
templating libraries.
"""
nodelist = parser.parse(('endjstemplate',))
parser.delete_first_token()
return JSTemplateNode(nodelist)
@register.assignment_tag
def load_config():
return conf.HORIZON_CONFIG
| neumerance/deploy | horizon/templatetags/horizon.py | Python | apache-2.0 | 4,298 |
from pandas import Series, DataFrame
import os, json, common
"""
Iterate though the json data files, save the results and times for each prover.
Add these to the goal syntax metrics and save as a csv file for later.
Andrew Healy, Aug. 2016
"""
provers = common.PROVERS
here = os.getcwd()
folders = sorted([f for f in os.listdir(os.path.join(here,'data'))
if os.path.isdir(os.path.join(here,'data', f))
])
bigd = {}
for folder in folders:
with open(os.path.join('data',folder, folder+'.json')) as jfile:
timings_json = json.load(jfile)
timings = timings_json[folder+'.mlw']
with open(os.path.join('data',folder,'stats.json')) as jfile:
st = json.load(jfile)
for g in st:
theory = g['theory']
goal = g['goal']
g['file'] = folder+'.mlw'
for goalname in timings[theory]:
if goalname.endswith(goal):
#per-prover scores
for p in provers:
try:
g[p+' result'] = timings[theory][goalname][p]['10secs']
except KeyError:
print 'Couldn\'t find '+folder+':'+theory+':'+goalname+':'+p+':10secs'
try:
g[p+' time'] = timings[theory][goalname][p]['time10']
except KeyError:
print 'Couldn\'t find '+folder+':'+theory+':'+goalname+':'+p+':time'
bigd[folder+':'+theory+':'+goal] = Series(g)
df = DataFrame(bigd).transpose()
df.to_csv('whygoal_stats.csv')
| ahealy19/F-IDE-2016 | create_stats_df.py | Python | apache-2.0 | 1,333 |
from django.shortcuts import render
### Rest API setup
import rest_framework.routers
import rest_framework.viewsets
import rest_framework.filters
from voxel_globe.websockets.serializers import LogMessageSerializer
from voxel_globe.websockets.models import LogMessage
router = rest_framework.routers.DefaultRouter()
class LogMessageViewSet(rest_framework.viewsets.ModelViewSet):
queryset = LogMessage.objects.all()
serializer_class = LogMessageSerializer
filter_backends = (rest_framework.filters.DjangoFilterBackend,)
filter_fields = ['message_text', 'message_type', 'task_id']
def get_queryset(self):
return super(LogMessageViewSet, self).get_queryset().filter(owner=self.request.user)
| ngageoint/voxel-globe | voxel_globe/websockets/views.py | Python | mit | 705 |
"""Definition of metrics we care about.
"""
from collections import defaultdict
from statistics import mean, median
from datetime import timedelta, time, datetime
from django.utils import timezone
from monitoring.models import Metric
from learn.models import Action, Task, TaskSession
def get_last_measured_date():
last_metric = Metric.objects.last()
return last_metric.time if last_metric else None
def get_first_unmeasured_date():
last_measured_date = get_last_measured_date()
if last_measured_date:
return last_measured_date + timedelta(days=1)
else:
return Action.objects.first().time.date()
def get_yesterday():
return timezone.now().date() - timedelta(days=1)
def to_timezone_aware(date, last_second=False):
time_part = time.max if last_second else time.min
naive_datetime = datetime.combine(date, time_part)
aware_datetime = timezone.make_aware(naive_datetime, timezone=timezone.utc)
return aware_datetime
def dates_range(first_date, last_date):
n_days = (last_date - first_date).days + 1
dates = [first_date + timedelta(days=d) for d in range(n_days)]
return dates
def group_by_date(task_sessions):
groups = defaultdict(list)
for ts in task_sessions:
groups[ts.date].append(ts)
return groups
def group_by_task(task_sessions):
groups = defaultdict(list)
for ts in task_sessions:
groups[ts.task_id].append(ts)
return groups
def generate_active_students_metric(task_sessions, dates):
"""Yield active-students metric for each date in dates.
Active student = has solved at least 1 task (in given day).
"""
solved_task_sessions = [ts for ts in task_sessions if ts.solved]
groups = group_by_date(solved_task_sessions)
for date in dates:
n_active_students = len({ts.student_id for ts in groups[date]})
yield Metric(name='active-students', time=date, value=n_active_students)
def generate_solved_count_metric(task_sessions, dates):
"""Yield solved-count metric for each date in dates.
"""
solved_task_sessions = [ts for ts in task_sessions if ts.solved]
groups = group_by_date(solved_task_sessions)
for date in dates:
n_solved_tasks = len(groups[date])
yield Metric(name='solved-count', time=date, value=n_solved_tasks)
def generate_solved_count_for_task_metric(task_sessions, date, tasks):
"""Yield solved-count metric for each task and given date.
"""
recent_solved_task_sessions = [
ts for ts in task_sessions
if ts.solved and ts.date > date - timedelta(days=30)]
groups = group_by_task(recent_solved_task_sessions)
for task in tasks:
solved_count = len(groups[task.id])
group_name = 'task.' + task.name
yield Metric(
name='solved-count', group=group_name, time=date,
value=solved_count)
def compute_success_ratio(task_sessions):
return mean(ts.solved for ts in task_sessions) if task_sessions else 0
def generate_success_ratio_metric(task_sessions, dates):
"""Yield success-ratio metric for each date in dates.
"""
groups = group_by_date(task_sessions)
for date in dates:
success_ratio = compute_success_ratio(groups[date])
yield Metric(name='success-ratio', time=date, value=success_ratio)
def generate_success_ratio_for_task_metric(task_sessions, date, tasks):
"""Yield success ratio metric for each task and given date.
"""
recent_task_sessions = [
ts for ts in task_sessions
if ts.date > date - timedelta(days=30)]
groups = group_by_task(recent_task_sessions)
for task in tasks:
success_ratio = compute_success_ratio(groups[task.id])
group_name = 'task.' + task.name
yield Metric(
name='success-ratio', group=group_name, time=date,
value=success_ratio)
def generate_solving_hours_metric(task_sessions, dates):
"""Yield solving-time metric for each date in dates.
"""
solved_task_sessions = [ts for ts in task_sessions if ts.solved]
groups = group_by_date(solved_task_sessions)
hour = 3600
for date in dates:
total_solving_hours = sum(ts.time_spent for ts in groups[date]) / hour
yield Metric(name='solving-hours', time=date, value=total_solving_hours)
def generate_median_time_for_task_metric(task_sessions, date, tasks):
"""Yield median-time metric for each task and given date.
"""
recent_solved_task_sessions = [
ts for ts in task_sessions
if ts.solved and ts.date > date - timedelta(days=30)]
groups = group_by_task(recent_solved_task_sessions)
for task in tasks:
times = [ts.time_spent for ts in groups[task.id]]
median_time = median(times) if times else 0
group_name = 'task.' + task.name
yield Metric(name='median-time', group=group_name, time=date, value=median_time)
def generate_metrics(dates):
# Select all task sessions which might be possibly needed.
time_range = (
to_timezone_aware(min(dates[0], dates[-1] - timedelta(days=30))),
to_timezone_aware(dates[-1], last_second=True))
# NOTE: If you add a new metric, make sure to prefetch requried data (such
# as task_session.snapshots or task_session.task) to avoid excess SQL
# queries). Currently, only a separate lists of TaskSessions and Tasks are
# enought for all computation.
# TODO: test that no SQL queries are generated in metrics generators
task_sessions = list(TaskSession.objects.filter(end__date__range=time_range))
# global metrics
yield from generate_active_students_metric(task_sessions, dates)
yield from generate_solved_count_metric(task_sessions, dates)
yield from generate_success_ratio_metric(task_sessions, dates)
yield from generate_solving_hours_metric(task_sessions, dates)
# task-specific metrics
tasks = list(Task.objects.all())
yield from generate_solved_count_for_task_metric(task_sessions, dates[-1], tasks)
yield from generate_success_ratio_for_task_metric(task_sessions, dates[-1], tasks)
yield from generate_median_time_for_task_metric(task_sessions, dates[-1], tasks)
def make_metrics_generator(first_date=None):
"""Return metrics generator and dates for which will be metrics computed.
"""
first_date = first_date or get_first_unmeasured_date()
last_date = get_yesterday()
dates = dates_range(first_date, last_date)
def generate_and_save_metrics():
# If the first_date was set manually, it's necessary to delete
# previously computed metrics before they are replaced by the new ones.
Metric.objects.filter(time__gte=first_date).delete()
# Delete recent group-specific metrics - it's not necessary to store
# them for every day.
group_metrics = Metric.objects.filter(group__isnull=False)
recent_group_metrics = group_metrics.filter(time__gt=last_date-timedelta(days=10))
recent_group_metrics.delete()
new_metrics = []
for metric in generate_metrics(dates):
new_metrics.append(metric)
yield metric
# All generated metris are stored to DB in a single SQL query.
Metric.objects.bulk_create(new_metrics)
return generate_and_save_metrics, dates
| adaptive-learning/robomission | backend/monitoring/metrics.py | Python | gpl-3.0 | 7,312 |
import six
from six import StringIO
from six.moves import configparser
from bamp.config.bumpversion import config_dump
def make_config(content):
config = configparser.ConfigParser()
input_file = StringIO(content)
if six.PY2:
config.readfp(input_file)
else:
config.read_file(input_file)
return config
def test_empty_config():
config = make_config("")
assert {} == config_dump(config)
def test_one_section_no_values():
config = make_config("[bumpversion]")
assert {} == config_dump(config)
def test_one_section_simple_value():
config = make_config("[bumpversion]\n" "cheese = cheddar")
assert {"cheese": "cheddar"} == config_dump(config)
def test_one_file_section():
config = make_config("[bumpversion:files:./file_one.php]")
assert {"files": ("./file_one.php",)} == config_dump(config)
def test_two_file_sections():
config = make_config(
"[bumpversion:files:./file_one.php]\n" "[bumpversion:files:./file_two.php]"
)
assert {"files": ("./file_one.php", "./file_two.php")} == config_dump(config)
def test_current_version_renamed():
config = make_config("[bumpversion]\n" "current_version = 1.0.42")
assert {"version": "1.0.42"} == config_dump(config)
| inirudebwoy/bamp | tests/test_bumpversion_config.py | Python | mit | 1,259 |
""" BasePlugin definitions. """
import logging
import threading
import traceback
import os
import queue
from sjutils import threadpool
class BasePluginError(Exception):
"""Raised by BasePlugin."""
def __init__(self, error):
"""Init method."""
Exception.__init__(self, error)
class BasePlugin(threading.Thread):
"""Base class for job implementation in spvd."""
name = ""
require = {}
optional = {
"debug": bool,
"max_parallel_checks": int,
"max_checks_queue": int,
"check_poll": int,
"check_timeout": int,
"result_threshold": int,
"limit_group": str,
"limit_check": str,
"limit_commit": int,
}
def __init__(self, options, event, params=None):
"""Init method.
@params is a dictionary of optional parameters among:
max_parallel_checks: maximum number of threads for this plugin.
max_checks_queue: maximum number of checks to get from
the DB and queue for execution.
check_poll: interval between two get_checks call.
check_timeout: maximum wait time for get_checks calls.
debug: enable debugging information.
result_threshold: number of results waiting for a commit that
will trigger a main-loop wake up.
"""
threading.Thread.__init__(self)
self.setDaemon(True)
self.dismiss = event
self.resqueue = {}
self.checks = {}
self.rescommit = threading.Event()
self.params = {
"max_parallel_checks": 3,
"max_checks_queue": 9,
"check_poll": 60,
"check_timeout": None,
"debug": False,
"result_threshold": 5,
"limit_group": None,
"limit_check": None,
"limit_commit": 40,
}
if params:
self.params.update(params)
# Limiting groups
self.limit_group = None
if self.params["limit_group"]:
self.limit_group = [
group.strip()
for group in self.params["limit_group"].split(",")
if group.strip()
]
if len(self.limit_group) == 1:
self.limit_group = self.limit_group[0]
# Limiting checks
self.limit_check = None
if self.params["limit_check"]:
self.limit_check = [
check.strip()
for check in self.params["limit_check"].split(",")
if check.strip()
]
if len(self.limit_check) == 1:
self.limit_check = self.limit_check[0]
self.options = options
self.log = logging.getLogger("spvd.plugins." + self.name)
# Set up logging
if not self.options.nodaemon:
log_dir = options.logdir + "/" + self.name
if not os.path.exists(log_dir):
os.mkdir(log_dir)
log_handler = logging.FileHandler(log_dir + "/" + self.name + ".log")
log_format = logging.Formatter("%(asctime)s %(levelname)s %(message)s")
log_handler.setFormatter(log_format)
self.log.addHandler(log_handler)
if self.params.get("debug", False):
self.log.setLevel(logging.DEBUG)
else:
self.log.setLevel(logging.INFO)
self.log.propagate = False
# Finalize init
self.job_pool = threadpool.ThreadPool(int(self.params["max_parallel_checks"]))
for widx, worker in enumerate(self.job_pool.workers):
worker.setName("%s-#%d" % (self.name, widx))
# Plugins or Subclasses must start Thread by themselves
# self.start()
# self.log.info(self)
def __str__(self):
return "<BasePlugin>"
@staticmethod
def __prepare_status_update(check):
"""Prepare a structure for status update."""
status = {
"status_id": check["status"]["status_id"],
"sequence_id": check["status"]["seq_id"],
"status": check["status"]["check_status"],
"message": check["status"]["check_message"],
"status_infos": check["status"]["status_infos"],
}
if "status_infos" in check:
status["status_infos"] = check["status_infos"]
return status
def job_start(self, check):
"""Starts a job."""
job = self.create_new_job(check)
job.log.debug("check started")
self.log.debug("Work request #%s started." % check["status"]["status_id"])
self.checks[check["status"]["status_id"]] = job
return job.run()
def job_stop(self, request, result):
"""Stops a job."""
self.checks[request.request_id].log.info(
"check result is %s : (%s)"
% (result["status"]["check_status"], result["status"]["check_message"])
)
self.log.debug("Work request #%s finished." % request.request_id)
update = self.__prepare_status_update(result)
self.resqueue.update({result["status"]["status_id"]: update})
if len(self.resqueue) > self.params["result_threshold"]:
self.rescommit.set()
del self.checks[request.request_id]
def handle_exception(self, request, exc_info):
"""Handle exception in a job."""
if not isinstance(exc_info, tuple):
# Something is seriously wrong...
self.log.critical("*** Worker thread raised an exception ***")
self.log.critical(request)
self.log.critical(exc_info)
raise SystemExit
self.log.error(
"Exception occured in request #%s: %s" % (request.request_id, exc_info)
)
for line in traceback.format_exception(exc_info[0], exc_info[1], exc_info[2]):
self.log.error(line)
def run(self):
"""Run method."""
self.log.info("plugin started")
first = True
while not self.dismiss.isSet():
try:
if not first:
self.rescommit.wait(self.params["check_poll"])
first = False
self.log.debug(
"number of threads alive %d/%d"
% (
len(
[
thread
for thread in self.job_pool.workers
if thread.isAlive()
]
),
int(self.params["max_parallel_checks"]),
)
)
self.log.debug("jobs waiting to be reported: %d" % len(self.resqueue))
self.log.debug(
"jobs waiting to be executed: %d (approx)"
% self.job_pool._requests_queue.qsize()
)
try:
self.job_pool.poll()
except threadpool.NoResultsPending:
self.log.debug("there was no result to poll")
# Commit pending results
if self.resqueue:
self.log.debug("%d results to commit" % len(self.resqueue))
self.commit_checks()
# Determine maximum number of checks to get
# Queue.qsize is unreliable, try to mitigate its weirdness
limit_fetch = (
self.params["max_checks_queue"]
- self.job_pool._requests_queue.qsize()
)
limit_fetch = min(abs(limit_fetch), self.params["max_checks_queue"])
# Determine if we need to fetch more work
if self.job_pool._requests_queue.full() or limit_fetch == 0:
self.log.info("queue estimated full")
continue
# Non sensical value or no check to fetch
if limit_fetch > self.params["max_checks_queue"] or limit_fetch < 0:
self.log.info(
"*** Runtime inconsistency, trying to fetch %d checks ***"
% limit_fetch
)
continue
# Get checks for the current plugin
self.log.debug("*** fetching %s checks" % limit_fetch)
checks = self.fetch_checks(limit_fetch)
if not checks:
continue
if checks.get("status", None) is None:
self.log.error("remote module did not return any work")
continue
if len(checks["status"]) > 0:
self.log.debug("got %s checks" % len(checks["status"]))
# Queue checks
try:
for status in checks["status"]:
req = threadpool.WorkRequest(
self.job_start,
[
{
"check": checks["checks"][status["chk_id"]],
"group": checks["groups"][status["grp_id"]],
"object": checks["objects"][str(status["obj_id"])],
"status": status,
}
],
None,
request_id=status["status_id"],
callback=self.job_stop,
exc_callback=self.handle_exception,
)
self.job_pool.queue_request(req, self.params["check_poll"])
self.log.debug("Work request #%s added." % req.request_id)
except queue.Full:
self.log.error("queue is full")
continue
except Exception as error:
self.log.error("caught unknown exception:")
self.log.exception(error)
continue
self.log.info("dismissing workers")
self.job_pool.dismiss_workers(int(self.params["max_parallel_checks"]))
# Do not join, takes time and results will not be written to database anyway
self.log.info("plugin stopped")
def create_new_job(self, _job):
"""Dummy method. To be overridden in plugins."""
raise BasePluginError(
"Plugin %s does not implement <create_new_job>" % self.name
)
def fetch_checks(self, _limit_fetch):
"""Dummy method. To be overridden in plugins."""
raise BasePluginError("Plugin %s does not implement <fetch_checks>" % self.name)
def commit_checks(self):
"""Dummy method. To be overridden in plugins."""
raise BasePluginError(
"Plugin %s does not implement <commit_checks>" % self.name
)
| SmartJog/spvd | share/baseplugin.py | Python | lgpl-2.1 | 11,049 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
This plugin fix problem described in
http://forum.ispsystem.com/ru/showthread.php?t=24014
There is a bug in ISPmanager with scheme
where nginx and apache+itk is used.
Private directories are not accesible
because config files passwd and .htaccess are
created with wrong permissions. This plugin fixes the problem.
Author: Andrey Scopenco <[email protected]>
'''
PLUGIN_NAME = 'diraccess_fix_perms'
LOG_FILE = '/usr/local/ispmgr/var/ispmgr.log'
from xml.dom import minidom
from pwd import getpwuid, getpwnam
from os import chdir, getpid, environ, access, R_OK, chown, listdir
from sys import exit, stderr
from cgi import FieldStorage
from traceback import format_exc
class ExitOk(Exception):
pass
class Log(object):
'''Class used for add debug to ispmgr.log'''
def __init__(self, plugin=None, output=LOG_FILE):
import time
timenow = time.localtime(time.time())
self.timef = time.strftime("%b %d %H:%M:%S", timenow)
self.log = output
self.plugin_name = plugin
self.fsock = open(self.log, 'a+')
self.pid = getpid()
self.script_name = __file__
def write(self, desc):
if not (desc == "\n"):
if (desc[-1:] == "\n"):
self.fsock.write(
'%s [%s] ./%s \033[36;40mPLUGIN %s :: %s\033[0m' % (
self.timef, self.pid, self.script_name,
self.plugin_name, desc))
else:
self.fsock.write(
'%s [%s] ./%s \033[36;40mPLUGIN %s :: %s\033[0m\n' % (
self.timef, self.pid, self.script_name,
self.plugin_name, desc))
def close(self):
self.fsock.close()
def xml_doc(elem=None, text=None):
xmldoc = minidom.Document()
doc = xmldoc.createElement('doc')
xmldoc.appendChild(doc)
if elem:
emp = xmldoc.createElement(elem)
doc.appendChild(emp)
if text:
msg_text = xmldoc.createTextNode(text)
emp.appendChild(msg_text)
return xmldoc.toxml('UTF-8')
def xml_error(text, code_num=None):
xmldoc = minidom.Document()
doc = xmldoc.createElement('doc')
xmldoc.appendChild(doc)
error = xmldoc.createElement('error')
doc.appendChild(error)
if code_num:
code = xmldoc.createAttribute('code')
error.setAttributeNode(code)
error.setAttribute('code', str(code_num))
if code_num in [2, 3, 6]:
obj = xmldoc.createAttribute('obj')
error.setAttributeNode(obj)
error.setAttribute('obj', str(text))
return xmldoc.toxml('UTF-8')
elif code_num in [4, 5]:
val = xmldoc.createAttribute('val')
error.setAttributeNode(val)
error.setAttribute('val', str(text))
return xmldoc.toxml('UTF-8')
error_text = xmldoc.createTextNode(text.decode('utf-8'))
error.appendChild(error_text)
return xmldoc.toxml('UTF-8')
def domain_to_idna(dom):
''' convert domain to idna format'''
dom_u = unicode(dom, 'utf-8')
return dom_u.encode("idna")
if __name__ == "__main__":
chdir('/usr/local/ispmgr/')
# activate logging
# stderr ==> ispmgr.log
log = Log(plugin=PLUGIN_NAME)
stderr = log
try:
# get cgi vars
req = FieldStorage(keep_blank_values=True)
func = req.getvalue('func')
elid = req.getvalue('elid')
sok = req.getvalue('sok')
dir = req.getvalue('name')
log.write('func %s, elid %s, sok %s' % (func, elid, sok))
if func != 'diraccess.edit' or not sok:
print xml_doc()
raise ExitOk('no action')
user = req.getvalue('owner')
if not user:
user = environ.get('REMOTE_USER')
if not user:
raise Exception('cant set user')
try:
pw_user = getpwnam(user)
except KeyError:
print xml_doc()
raise ExitOk('user not found')
pw_apache = getpwnam('apache')
log.write('user %s has uid %s' % (user, pw_user.pw_uid))
chgrp = []
passwd_dir = '%s/etc' % pw_user.pw_dir
for passwd in listdir(passwd_dir):
if 'passwd' in passwd:
chgrp.append('%s/%s' % (passwd_dir, passwd))
chgrp.append('%s/%s/.htaccess' % (pw_user.pw_dir, dir))
for conf in chgrp:
if access(conf, R_OK):
log.write(
'chown %s:%s %s' % (
pw_user.pw_uid, pw_apache.pw_gid, conf))
chown(conf, pw_user.pw_uid, pw_apache.pw_gid)
print xml_doc('ok')
raise ExitOk('done')
except ExitOk, e:
log.write(e)
except:
print xml_error('please contact support team', code_num='1')
log.write(format_exc())
exit(0)
| scopenco/isp-plugin-diraccess | addon/diraccess_fix_perms.py | Python | bsd-3-clause | 4,929 |
# cython: linetrace=True
import struct
import typing
rtphdr = struct.Struct('!HHII')
rtpevent = struct.Struct('!BBH')
class RTP(typing.NamedTuple):
version: int = 2
padding: bool = 0
ext: int = 0
csrc_items: int = 0
marker: bool = 0
p_type: int = 0
seq: int = 0
timestamp: int = 0
ssrc: int = 0
payload: bytes = b''
@classmethod
def parse(cls, data):
rtp = rtphdr.unpack(data[:rtphdr.size])
return cls(
version=(rtp[0] >> 14) & 0x3,
padding=(rtp[0] >> 13) & 0x1,
ext=(rtp[0] >> 12) & 0x1,
csrc_items=(rtp[0] >> 8) & 0xF,
marker=(rtp[0] >> 7) & 0x1,
p_type=rtp[0] & 0x7f,
seq=rtp[1],
timestamp=rtp[2],
ssrc=rtp[3],
payload=data[rtphdr.size:]
)
def __bytes__(self):
header = rtphdr.pack(
(self.version & 0x3) << 14
| (self.padding & 0x1) << 13
| (self.ext & 0x1) << 12
| (self.csrc_items & 0xF) << 8
| (self.marker & 0x1) << 7
| (self.p_type & 0x7f),
self.seq,
self.timestamp,
self.ssrc
)
return b''.join([header, bytes(self.payload)])
class RTPEvent(typing.NamedTuple):
event_id: int
end_of_event: bool
reserved: bool
volume: int
duration: int
@classmethod
def parse(cls, data):
event = rtpevent.unpack(data[:rtpevent.size])
return cls(
event_id=event[0],
end_of_event=(event[1] >> 7) & 0x01,
reserved=(event[1] >> 6) & 0x01,
volume=event[1] & 0x3f,
duration=event[2]
)
def __bytes__(self):
return rtpevent.pack(
self.event_id,
(self.end_of_event & 0x01) << 7
| (self.reserved & 0x01) << 6
| (self.volume & 0x3f),
self.duration
)
| vodik/aiortp | aiortp/packet.py | Python | apache-2.0 | 1,959 |
#!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright 2010 British Broadcasting Corporation and Kamaelia Contributors(1)
#
# (1) Kamaelia Contributors are listed in the AUTHORS file and at
# http://www.kamaelia.org/AUTHORS - please extend this file,
# not this notice.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import socket
from Axon.LikeFile import likefile, background
from Kamaelia.Protocol.HTTP.HTTPClient import SimpleHTTPClient
background().start()
import Axon
from Kamaelia.Chassis.ConnectedServer import MoreComplexServer
from Kamaelia.Protocol.HTTP.HTTPServer import HTTPServer
from Kamaelia.Protocol.HTTP.Handlers.Minimal import Minimal
import Kamaelia.Protocol.HTTP.ErrorPages as ErrorPages
from Kamaelia.Chassis.Pipeline import Pipeline
# Our configuration
homedirectory = "/srv/www/htdocs/"
indexfilename = "index.html"
def requestHandlers(URLHandlers):
def createRequestHandler(request):
if request.get("bad"):
return ErrorPages.websiteErrorPage(400, request.get("errormsg",""))
else:
for (prefix, handler) in URLHandlers:
if request["raw-uri"][:len(prefix)] == prefix:
request["uri-prefix-trigger"] = prefix
request["uri-suffix"] = request["raw-uri"][len(prefix):]
return handler(request)
return ErrorPages.websiteErrorPage(404, "No resource handlers could be found for the requested URL")
return createRequestHandler
def servePage(request):
return Minimal(request=request, homedirectory=homedirectory, indexfilename=indexfilename)
class PeerProxy(Axon.ThreadedComponent.threadedcomponent):
def __init__(self, request):
super(PeerProxy, self).__init__()
self.request = request
def main(self):
uri = self.request.get('uri-suffix',"/")
p = likefile( SimpleHTTPClient() )
p.put("http://kamaelia.sourceforge.net/%s" % uri)
pagedata= p.get()
p.shutdown()
print pagedata
resource = {
"type" : "text/html",
"statuscode" : "200",
}
self.send(resource, "outbox")
page = { "data" : pagedata }
self.send(page, "outbox")
self.send(Axon.Ipc.producerFinished(self), "signal")
def HTTPProtocol(*args,**argd):
return HTTPServer(requestHandlers([
["/peer", PeerProxy ],
["/", servePage ],
]))
MoreComplexServer(protocol=HTTPProtocol,
port=8082,
socketOptions=(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1) ).activate()
import time
while 1:
time.sleep(1)
| sparkslabs/kamaelia | Sketches/MPS/HTTP/demo_http.py | Python | apache-2.0 | 3,182 |
# -*- coding: utf-8 -*-
from pyload.plugin.OCR import OCR
class ShareonlineBiz(OCR):
__name = "ShareonlineBiz"
__type = "ocr"
__version = "0.11"
__description = """Shareonline.biz ocr plugin"""
__license = "GPLv3"
__authors = [("RaNaN", "[email protected]")]
def __init__(self):
OCR.__init__(self)
def get_captcha(self, image):
self.load_image(image)
self.to_greyscale()
self.image = self.image.resize((160, 50))
self.pixels = self.image.load()
self.threshold(1.85)
# self.eval_black_white(240)
# self.derotate_by_average()
letters = self.split_captcha_letters()
final = ""
for letter in letters:
self.image = letter
self.run_tesser(True, True, False, False)
final += self.result_captcha
return final
# tesseract at 60%
| ardi69/pyload-0.4.10 | pyload/plugin/ocr/ShareonlineBiz.py | Python | gpl-3.0 | 917 |
'''
Generic message-based protocol used by Bitcoin and P2Pool for P2P communication
'''
import hashlib
import struct
from twisted.internet import protocol
from twisted.python import log
import p2pool
from p2pool.bitcoin import data as bitcoin_data
from p2pool.util import datachunker, variable
class TooLong(Exception):
pass
class Protocol(protocol.Protocol):
def __init__(self, message_prefix, max_payload_length, traffic_happened=variable.Event(), ignore_trailing_payload=False):
self._message_prefix = message_prefix
self._max_payload_length = max_payload_length
self.dataReceived2 = datachunker.DataChunker(self.dataReceiver())
self.traffic_happened = traffic_happened
self.ignore_trailing_payload = ignore_trailing_payload
def dataReceived(self, data):
self.traffic_happened.happened('p2p/in', len(data))
self.dataReceived2(data)
def dataReceiver(self):
while True:
start = ''
while start != self._message_prefix:
start = (start + (yield 1))[-len(self._message_prefix):]
command = (yield 12).rstrip('\0')
length, = struct.unpack('<I', (yield 4))
if length > self._max_payload_length:
print 'length too large'
continue
checksum = yield 4
payload = yield length
if bitcoin_data.grshash(payload)[:4] != checksum:
print 'invalid hash for', self.transport.getPeer().host, repr(command), length, checksum.encode('hex')
if p2pool.DEBUG:
print __import__('groestlcoin_hash').getHash(payload, len(payload))[:4].encode('hex'), payload.encode('hex')
self.badPeerHappened()
continue
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
if p2pool.DEBUG:
print 'no type for', repr(command)
continue
try:
self.packetReceived(command, type_.unpack(payload, self.ignore_trailing_payload))
except:
print 'RECV', command, payload[:100].encode('hex') + ('...' if len(payload) > 100 else '')
log.err(None, 'Error handling message: (see RECV line)')
self.disconnect()
def packetReceived(self, command, payload2):
handler = getattr(self, 'handle_' + command, None)
if handler is None:
if p2pool.DEBUG:
print 'no handler for', repr(command)
return
if getattr(self, 'connected', True) and not getattr(self, 'disconnecting', False):
handler(**payload2)
def disconnect(self):
if hasattr(self.transport, 'abortConnection'):
# Available since Twisted 11.1
self.transport.abortConnection()
else:
# This doesn't always close timed out connections! warned about in main
self.transport.loseConnection()
def badPeerHappened(self):
self.disconnect()
def sendPacket(self, command, payload2):
if len(command) >= 12:
raise ValueError('command too long')
type_ = getattr(self, 'message_' + command, None)
if type_ is None:
raise ValueError('invalid command')
#print 'SEND', command, repr(payload2)[:500]
payload = type_.pack(payload2)
if len(payload) > self._max_payload_length:
raise TooLong('payload too long')
data = self._message_prefix + struct.pack('<12sI', command, len(payload)) + bitcoin_data.grshash(payload)[:4] + payload
self.traffic_happened.happened('p2p/out', len(data))
self.transport.write(data)
def __getattr__(self, attr):
prefix = 'send_'
if attr.startswith(prefix):
command = attr[len(prefix):]
return lambda **payload2: self.sendPacket(command, payload2)
#return protocol.Protocol.__getattr__(self, attr)
raise AttributeError(attr)
| GroestlCoin/p2pool-grs | p2pool/util/p2protocol.py | Python | gpl-3.0 | 4,140 |
#!/usr/bin/env python
# -*- coding: UTF-8 -*-
#######################################################
'''
original Created on Mar 15, 2011
'''
import tags as NMhtml
tagOld=NMhtml.tag
cssConditional="""
<!-- paulirish.com/2008/conditional-stylesheets-vs-css-hacks-answer-neither/ -->
<!--[if lt IE 7 ]> <html class="no-js ie6" lang="en"> <![endif]-->
<!--[if IE 7 ]> <html class="no-js ie7" lang="en"> <![endif]-->
<!--[if IE 8 ]> <html class="no-js ie8" lang="en"> <![endif]-->
<!--[if (gte IE 9)|!(IE)]><!--> <html class="no-js" lang="en"> <!--<![endif]-->
"""
class tag(tagOld):
def getByAttr(self,attr,val,repl=None,insertIfNF=False):
#TODO: works for 1st level make it recursive
for ix,item in enumerate(self.contents):
if isinstance(item,tag) and item.attrGet(attr) == val:
if repl is not None:
self.contents[ix]=repl
return item
if insertIfNF and repl is not None:self.insertContents(repl)
return None
#return [item for item in self.contents if isinstance(item,tag) and item.attrGet(attr) == val ]
#class tagBlank(tag):
# __slots__ = []
# _nonclosing=True
# def __init__(self,contents='', attributes=''): tag.__init__(self,contents,attributes, name=u"")
class script_ganalytics(NMhtml.script):
def __init__(self,account):
contents="""var _gaq = _gaq || [];
_gaq.push(['_setAccount', '$account$']);
_gaq.push(['_trackPageview']);
(function() {
var ga = document.createElement('script'); ga.type = 'text/javascript'; ga.async = true;
ga.src = ('https:' == document.location.protocol ? 'https://ssl' : 'http://www') + '.google-analytics.com/ga.js';
(document.getElementsByTagName('head')[0] || document.getElementsByTagName('body')[0]).appendChild(ga);
})();"""
super(script_ganalytics, self).__init__(contents.replace('$account$',account))
self._name='script'
class html(tag):
doctype=""
preHtml="" #whatever we want after doc declaration and before html tag
class head(tag):
class meta(tag):
__slots__ = []
_nonclosing=True
def __init__(self,contents="",attributes="",name="",description=""):
tag.__init__(self,contents,attributes)
if name !="":self.attrSetName(name)
if description!="":self.attrSetDescription(description)
def attrSetDescription(self, val):
self.attrSet('name','description')
return self.attrSet('content', val)
class link(tag):
__slots__ = []
_nonclosing=True
def __init__(self,contents='',rel='',href=''):
tag.__init__(self,contents)
if rel!='':self.attrSet('rel',rel)
if href!='':self.attrSet('href',href)
class title(tag) :
_slots__ = []
def __init__(self,title=''): tag.__init__(self,title)
class body(tag):
__slots__ = []
def __init__(self): tag.__init__(self,name=u"body")
def __init__(self,doctype=5):
if doctype==5:self.doctype="<!doctype html>"
elif doctype==4:self.doctype= '<!DOCTYPE HTML PUBLIC "-//W3C//DTD HTML 4.01//EN" "http://www.w3.org/TR/html4/strict.dtd">'
self.head=self.head()
self.body=self.body()
tag.__init__(self, [self.head, self.body], name='html')
def _toStrOpen(self):
return self.doctype +'\n' + self.preHtml + tag._toStrOpen(self)
class html5bp(html):
#TODO:Scripts,media handheld
_relFavicon="shortcut icon"
_relTouchIcon="apple-touch-icon"
def __init__(self,title='',description='',author='',stylesheets=[]):
html.__init__(self,5)
self.preHtml=cssConditional
self.head.insertContents(self.head.meta("",'charset=\"utf-8\"'))
self.head.insertContents(self.head.meta("",'content-language=\"en\"'))
self.head.insertContents(self.head.meta("",'X-UA-Compatible=\"IE=edge,chrome=1\"'))
if title!="":self.head.insertContents(self.head.title(title))
self.head.insertContents(self.head.meta("","","description",description))
self.head.insertContents(self.head.meta("","","author",author))
self.head.insertContents(self.head.meta("",'viewport=\"width=device-width, initial-scale=1.0\"'))
self.docSetFavicon("/favicon.ico")
self.docSetTouchIcon("/apple-touch-icon.png")
for item in stylesheets:self.head.insertContents(self.head.link("","stylesheet",item))
self.container=self.body.insertContents(NMhtml.div("",'id=\"container\"'))
self.header=self.container.insertContents(tag(name="header"))
self.main=self.container.insertContents(NMhtml.div("",'id=\"main\" role=\"main\"'))
self.footer=self.container.insertContents(tag(name="footer"))
def docSetFavicon(self,href):
self.head.getByAttr("rel",self._relFavicon,self.head.link("",self._relFavicon,href),True)
def docSetTouchIcon(self,href):
self.head.getByAttr("rel",self._relFavicon,self.head.link("",self._relTouchIcon,href),True)
class html5Twt(html5bp):
def __init__(self,title='',description='',author='',stylesheets=[]):
super(html5Twt, self).__init__(self,title='',description='',author='',stylesheets=[])
self.header.insertContents(script_ganalytics("UA-10439541-3"))
##################################### html5 forms
class label(tag):
def __init__(self, contents='', attributes='', _for=''):
tag.__init__(self,contents, attributes)
self.attrSetFor(_for )
def attrGetFor(self): return self.attrGet('for')
def attrSetFor(self, val): return self.attrSet('for', val)
class form_tag(tag):
_nonclosing=True
def __init__(self, contents='', attributes='', value=False,name=False,placeholder=False,required=False):
tag.__init__(self,contents, attributes)
if value:self.attrSetValue (value)
if name:self.attrSet('name', name)
if placeholder:self.attrSetPlaceholder(placeholder)
if required:self.setRequired()
def setRequired(self):
if self.contents.find('required')==-1:self.insertContents("required",-1)
def attrGetValue(self): return self.attrGet('value')
def attrSetValue(self, val): return self.attrSet('value', val)
def attrGetPlaceholder(self): return self.attrGet('placeholder')
def attrSetPlaceholder(self, val): return self.attrSet('placeholder', val)
class input(form_tag): #@ReservedAssignment
def __init__(self, contents='', attributes='', type='text', value=False,name=False,placeholder=False,required=False): #@ReservedAssignment
form_tag.__init__(self,contents, attributes,value,name,placeholder,required)
#if value:self.attrSet('value', value)
self.attrSet('type', type)
def attrGetMin(self): return self.attrGet('min')
def attrSetMin(self, val): return self.attrSet('min', val)
def attrGetMax(self): return self.attrGet('max')
def attrSetMax(self, val): return self.attrSet('max', val)
def attrGetStep(self): return self.attrGet('step')
def attrSetStep(self, val): return self.attrSet('step', val)
class textarea(form_tag):
_nonclosing=False
def __init__(self, attributes='', type='text', value=False,name=False,placeholder=False,required=False,rows="10",cols="60"): #@ReservedAssignment
form_tag.__init__(self,"", attributes,value,name,placeholder,required)
#if value:self.attrSet('value', value)
if rows:self.attrSet('rows', rows)
if cols:self.attrSet('cols', cols)
class radioList(NMhtml.tag_dummy):
def __init__(self,name="gr1",valList=[],required=False):
NMhtml.tag_dummy.__init__(self)
# align='align=\"xx\"'.replace("xx",align)
for ix, item in enumerate(valList):
idvl="frm-%s-%d" %(name,ix)
inp=input(value=item[0],type="radio",required=True,name=name)
inp.attrSetId(id)
self.insertContents(NMhtml.span([inp,label(item[1],"",idvl)]))
#self.insertContents(NMhtml.span(item[1]))
#if value:self.attrSet('value', value)
#return [ input(align, value=i,type="radio",name=name) for i in valList ]
class select(tag):
__slots__ = []
def __init__(self, optionslst=[], attributes='',selected=None,multiple=None,size=None):
tag.__init__(self,"", attributes)
if multiple is not None:self.attrSet('multiple', "multiple")
if multiple is not None:self.attrSet('size', size)
for cnt,opt in enumerate(optionslst):
optSel=True if selected is not None and selected == opt[0] else False
curOption=option(opt[1],"",opt[0],selected=optSel)
curOption.attrAppendCnt(cnt+1)
self.insertContents(curOption)
class option(form_tag):
__slots__ = []
_nonclosing=False
def __init__(self, contents='', attributes='', value=False, selected=False):
form_tag.__init__(self,contents, attributes)
if value:self.attrSet('value', value)
if selected:self.attrSet('selected', 'selected')
class button(form_tag):
__slots__ = []
def __init__(self, contents='', attributes='', OnClick=False):
form.form_tag.__init__(self,contents, attributes)
if OnClick:self.attrSet('OnClick', OnClick)
class form(tag):
__slots__ = []
def __init__(self, contents='', attributes='', method="post", action=False):
tag.__init__(self,contents, attributes)
if method:self.attrSetMethod (method)
if action:self.attrSetAction (action)
def attrSetMethod(self, val): return self.attrSet('method', val)
def attrGetAction(self): return self.attrGet('action')
def attrSetAction(self, val): return self.attrSet('action', val)
def setNovalidate(self):self.attributes.setFlag("novalidate")
#if self.contents.find('novalidate')==-1:self.insertContents("novalidate",-1)
def formItemDiv(title,formElem):
return NMhtml.div([title,formElem],'class=\"nm-frm-item\"') | nickmilon/milonpy | milonpy/html_gen/page2011.py | Python | apache-2.0 | 10,463 |
#!/usr/bin/env python3
import os
import sys
if __name__ == "__main__":
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "djing.settings")
from django.core.management import execute_from_command_line
execute_from_command_line(sys.argv)
| bashmak/djing | manage.py | Python | unlicense | 249 |
from webapp2_extras.appengine.auth.models import User
from google.appengine.ext import ndb
from google.appengine.ext import blobstore
upload_url = blobstore.create_upload_url('/upload')
class User(User):
"""
Universal user model. Can be used with App Engine's default users API,
own auth or third party authentication methods (OpenID, OAuth etc).
based on https://gist.github.com/kylefinley
"""
#: Creation date.
created = ndb.DateTimeProperty(auto_now_add=True)
#: Modification date.
updated = ndb.DateTimeProperty(auto_now=True)
#: User defined unique name, also used as key_name.
# Not used by OpenID
username = ndb.StringProperty()
#: User Name
name = ndb.StringProperty()
#: User Last Name
last_name = ndb.StringProperty()
#: User email
email = ndb.StringProperty(required=True)
#: Hashed password. Only set for own authentication.
# Not required because third party authentication
# doesn't use password.
password = ndb.StringProperty()
#: User Country
country = ndb.StringProperty()
#: User TimeZone
tz = ndb.StringProperty()
#: Account activation verifies email
activated = ndb.BooleanProperty(default=False)
@classmethod
def get_by_email(cls, email):
"""Returns a user object based on an email.
:param email:
String representing the user email. Examples:
:returns:
A user object.
"""
return cls.query(cls.email == email).get()
@classmethod
def create_resend_token(cls, user_id):
entity = cls.token_model.create(user_id, 'resend-activation-mail')
return entity.token
@classmethod
def validate_resend_token(cls, user_id, token):
return cls.validate_token(user_id, 'resend-activation-mail', token)
@classmethod
def delete_resend_token(cls, user_id, token):
cls.token_model.get_key(user_id, 'resend-activation-mail', token).delete()
def get_social_providers_names(self):
social_user_objects = SocialUser.get_by_user(self.key)
result = []
# import logging
for social_user_object in social_user_objects:
# logging.error(social_user_object.extra_data['screen_name'])
result.append(social_user_object.provider)
return result
def get_social_providers_info(self):
providers = self.get_social_providers_names()
result = {'used': [], 'unused': []}
for k,v in SocialUser.PROVIDERS_INFO.items():
if k in providers:
result['used'].append(v)
else:
result['unused'].append(v)
return result
class Picture(ndb.Model):
# All pictures that a User has uploaded
title = ndb.StringProperty(required=True)
description = ndb.StringProperty(required=True)
blobKey = ndb.BlobKeyProperty(required=True)
servingUrl = ndb.StringProperty()
created = ndb.DateTimeProperty(auto_now_add=True)
user = ndb.KeyProperty(kind=User)
class LogVisit(ndb.Model):
user = ndb.KeyProperty(kind=User)
uastring = ndb.StringProperty()
ip = ndb.StringProperty()
timestamp = ndb.StringProperty()
class LogEmail(ndb.Model):
sender = ndb.StringProperty(
required=True)
to = ndb.StringProperty(
required=True)
subject = ndb.StringProperty(
required=True)
body = ndb.TextProperty()
when = ndb.DateTimeProperty()
class SocialUser(ndb.Model):
PROVIDERS_INFO = { # uri is for OpenID only (not OAuth)
'google': {'name': 'google', 'label': 'Google', 'uri': 'gmail.com'},
'github': {'name': 'github', 'label': 'Github', 'uri': ''},
'facebook': {'name': 'facebook', 'label': 'Facebook', 'uri': ''},
'linkedin': {'name': 'linkedin', 'label': 'LinkedIn', 'uri': ''},
'myopenid': {'name': 'myopenid', 'label': 'MyOpenid', 'uri': 'myopenid.com'},
'twitter': {'name': 'twitter', 'label': 'Twitter', 'uri': ''},
'yahoo': {'name': 'yahoo', 'label': 'Yahoo!', 'uri': 'yahoo.com'},
}
user = ndb.KeyProperty(kind=User)
provider = ndb.StringProperty()
uid = ndb.StringProperty()
extra_data = ndb.JsonProperty()
@classmethod
def get_by_user(cls, user):
return cls.query(cls.user == user).fetch()
@classmethod
def get_by_user_and_provider(cls, user, provider):
return cls.query(cls.user == user, cls.provider == provider).get()
@classmethod
def get_by_provider_and_uid(cls, provider, uid):
return cls.query(cls.provider == provider, cls.uid == uid).get()
@classmethod
def check_unique_uid(cls, provider, uid):
# pair (provider, uid) should be unique
test_unique_provider = cls.get_by_provider_and_uid(provider, uid)
if test_unique_provider is not None:
return False
else:
return True
@classmethod
def check_unique_user(cls, provider, user):
# pair (user, provider) should be unique
test_unique_user = cls.get_by_user_and_provider(user, provider)
if test_unique_user is not None:
return False
else:
return True
@classmethod
def check_unique(cls, user, provider, uid):
# pair (provider, uid) should be unique and pair (user, provider) should be unique
return cls.check_unique_uid(provider, uid) and cls.check_unique_user(provider, user)
@staticmethod
def open_id_providers():
return [k for k,v in SocialUser.PROVIDERS_INFO.items() if v['uri']]
| skumar07/Air-Share-Real | boilerplate/models.py | Python | lgpl-3.0 | 5,595 |
# Copyright (c) OpenMMLab. All rights reserved.
# flake8: noqa
import warnings
from .formatting import *
warnings.warn('DeprecationWarning: mmdet.datasets.pipelines.formating will be '
'deprecated, please replace it with '
'mmdet.datasets.pipelines.formatting.')
| open-mmlab/mmdetection | mmdet/datasets/pipelines/formating.py | Python | apache-2.0 | 293 |
# Copyright 2014 Cisco Systems, Inc.
# All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License"); you may
# not use this file except in compliance with the License. You may obtain
# a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS, WITHOUT
# WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. See the
# License for the specific language governing permissions and limitations
# under the License.
import functools
import six
from tempest import config
from tempest.scenario import manager
from tempest import test
CONF = config.CONF
class TestGettingAddress(manager.NetworkScenarioTest):
"""Test Summary:
1. Create network with subnets:
1.1. one IPv4 and
1.2. one or more IPv6 in a given address mode
2. Boot 2 VMs on this network
3. Allocate and assign 2 FIP4
4. Check that vNICs of all VMs gets all addresses actually assigned
5. Each VM will ping the other's v4 private address
6. If ping6 available in VM, each VM will ping all of the other's v6
addresses as well as the router's
"""
@classmethod
def skip_checks(cls):
super(TestGettingAddress, cls).skip_checks()
if not (CONF.network_feature_enabled.ipv6
and CONF.network_feature_enabled.ipv6_subnet_attributes):
raise cls.skipException('IPv6 or its attributes not supported')
if not (CONF.network.tenant_networks_reachable
or CONF.network.public_network_id):
msg = ('Either tenant_networks_reachable must be "true", or '
'public_network_id must be defined.')
raise cls.skipException(msg)
if CONF.baremetal.driver_enabled:
msg = ('Baremetal does not currently support network isolation')
raise cls.skipException(msg)
@classmethod
def setup_credentials(cls):
# Create no network resources for these tests.
cls.set_network_resources()
super(TestGettingAddress, cls).setup_credentials()
def setUp(self):
super(TestGettingAddress, self).setUp()
self.keypair = self.create_keypair()
self.sec_grp = self._create_security_group(tenant_id=self.tenant_id)
def prepare_network(self, address6_mode, n_subnets6=1, dualnet=False):
"""Prepare network
Creates network with given number of IPv6 subnets in the given mode and
one IPv4 subnet.
Creates router with ports on all subnets.
if dualnet - create IPv6 subnets on a different network
:return: list of created networks
"""
self.network = self._create_network(tenant_id=self.tenant_id)
if dualnet:
self.network_v6 = self._create_network(tenant_id=self.tenant_id)
sub4 = self._create_subnet(network=self.network,
namestart='sub4',
ip_version=4)
router = self._get_router(tenant_id=self.tenant_id)
sub4.add_to_router(router_id=router['id'])
self.addCleanup(sub4.delete)
self.subnets_v6 = []
for _ in range(n_subnets6):
net6 = self.network_v6 if dualnet else self.network
sub6 = self._create_subnet(network=net6,
namestart='sub6',
ip_version=6,
ipv6_ra_mode=address6_mode,
ipv6_address_mode=address6_mode)
sub6.add_to_router(router_id=router['id'])
self.addCleanup(sub6.delete)
self.subnets_v6.append(sub6)
return [self.network, self.network_v6] if dualnet else [self.network]
@staticmethod
def define_server_ips(srv):
ips = {'4': None, '6': []}
for net_name, nics in six.iteritems(srv['addresses']):
for nic in nics:
if nic['version'] == 6:
ips['6'].append(nic['addr'])
else:
ips['4'] = nic['addr']
return ips
def prepare_server(self, networks=None):
username = CONF.validation.image_ssh_user
networks = networks or [self.network]
srv = self.create_server(
key_name=self.keypair['name'],
security_groups=[{'name': self.sec_grp['name']}],
networks=[{'uuid': n.id} for n in networks],
wait_until='ACTIVE')
fip = self.create_floating_ip(thing=srv)
ips = self.define_server_ips(srv=srv)
ssh = self.get_remote_client(
ip_address=fip.floating_ip_address,
username=username)
return ssh, ips, srv["id"]
def turn_nic6_on(self, ssh, sid):
"""Turns the IPv6 vNIC on
Required because guest images usually set only the first vNIC on boot.
Searches for the IPv6 vNIC's MAC and brings it up.
@param ssh: RemoteClient ssh instance to server
@param sid: server uuid
"""
ports = [p["mac_address"] for p in
self._list_ports(device_id=sid,
network_id=self.network_v6.id)]
self.assertEqual(1, len(ports),
message=("Multiple IPv6 ports found on network %s. "
"ports: %s")
% (self.network_v6, ports))
mac6 = ports[0]
ssh.set_nic_state(ssh.get_nic_name(mac6))
def _prepare_and_test(self, address6_mode, n_subnets6=1, dualnet=False):
net_list = self.prepare_network(address6_mode=address6_mode,
n_subnets6=n_subnets6,
dualnet=dualnet)
sshv4_1, ips_from_api_1, sid1 = self.prepare_server(networks=net_list)
sshv4_2, ips_from_api_2, sid2 = self.prepare_server(networks=net_list)
def guest_has_address(ssh, addr):
return addr in ssh.get_ip_list()
# Turn on 2nd NIC for Cirros when dualnet
if dualnet:
self.turn_nic6_on(sshv4_1, sid1)
self.turn_nic6_on(sshv4_2, sid2)
# get addresses assigned to vNIC as reported by 'ip address' utility
ips_from_ip_1 = sshv4_1.get_ip_list()
ips_from_ip_2 = sshv4_2.get_ip_list()
self.assertIn(ips_from_api_1['4'], ips_from_ip_1)
self.assertIn(ips_from_api_2['4'], ips_from_ip_2)
for i in range(n_subnets6):
# v6 should be configured since the image supports it
# It can take time for ipv6 automatic address to get assigned
srv1_v6_addr_assigned = functools.partial(
guest_has_address, sshv4_1, ips_from_api_1['6'][i])
srv2_v6_addr_assigned = functools.partial(
guest_has_address, sshv4_2, ips_from_api_2['6'][i])
self.assertTrue(test.call_until_true(srv1_v6_addr_assigned,
CONF.validation.ping_timeout, 1))
self.assertTrue(test.call_until_true(srv2_v6_addr_assigned,
CONF.validation.ping_timeout, 1))
self._check_connectivity(sshv4_1, ips_from_api_2['4'])
self._check_connectivity(sshv4_2, ips_from_api_1['4'])
for i in range(n_subnets6):
self._check_connectivity(sshv4_1,
ips_from_api_2['6'][i])
self._check_connectivity(sshv4_1,
self.subnets_v6[i].gateway_ip)
self._check_connectivity(sshv4_2,
ips_from_api_1['6'][i])
self._check_connectivity(sshv4_2,
self.subnets_v6[i].gateway_ip)
def _check_connectivity(self, source, dest):
self.assertTrue(
self._check_remote_connectivity(source, dest),
"Timed out waiting for %s to become reachable from %s" %
(dest, source.ssh_client.host)
)
@test.attr(type='slow')
@test.idempotent_id('2c92df61-29f0-4eaa-bee3-7c65bef62a43')
@test.services('compute', 'network')
def test_slaac_from_os(self):
self._prepare_and_test(address6_mode='slaac')
@test.attr(type='slow')
@test.idempotent_id('d7e1f858-187c-45a6-89c9-bdafde619a9f')
@test.services('compute', 'network')
def test_dhcp6_stateless_from_os(self):
self._prepare_and_test(address6_mode='dhcpv6-stateless')
@test.attr(type='slow')
@test.idempotent_id('7ab23f41-833b-4a16-a7c9-5b42fe6d4123')
@test.services('compute', 'network')
def test_multi_prefix_dhcpv6_stateless(self):
self._prepare_and_test(address6_mode='dhcpv6-stateless', n_subnets6=2)
@test.attr(type='slow')
@test.idempotent_id('dec222b1-180c-4098-b8c5-cc1b8342d611')
@test.services('compute', 'network')
def test_multi_prefix_slaac(self):
self._prepare_and_test(address6_mode='slaac', n_subnets6=2)
@test.attr(type='slow')
@test.idempotent_id('b6399d76-4438-4658-bcf5-0d6c8584fde2')
@test.services('compute', 'network')
def test_dualnet_slaac_from_os(self):
self._prepare_and_test(address6_mode='slaac', dualnet=True)
@test.attr(type='slow')
@test.idempotent_id('76f26acd-9688-42b4-bc3e-cd134c4cb09e')
@test.services('compute', 'network')
def test_dualnet_dhcp6_stateless_from_os(self):
self._prepare_and_test(address6_mode='dhcpv6-stateless', dualnet=True)
@test.idempotent_id('cf1c4425-766b-45b8-be35-e2959728eb00')
@test.services('compute', 'network')
def test_dualnet_multi_prefix_dhcpv6_stateless(self):
self._prepare_and_test(address6_mode='dhcpv6-stateless', n_subnets6=2,
dualnet=True)
@test.idempotent_id('9178ad42-10e4-47e9-8987-e02b170cc5cd')
@test.services('compute', 'network')
def test_dualnet_multi_prefix_slaac(self):
self._prepare_and_test(address6_mode='slaac', n_subnets6=2,
dualnet=True)
| nuagenetworks/tempest | tempest/scenario/test_network_v6.py | Python | apache-2.0 | 10,197 |
# -*- coding: iso-8859-1 -*-
# -----------------------------------------------------------------------
# audiodiskitem.py - Item for CD Audio Disks
# -----------------------------------------------------------------------
# $Id$
#
# Notes:
# Todo:
#
# -----------------------------------------------------------------------
# Freevo - A Home Theater PC framework
# Copyright (C) 2003 Krister Lagerstrom, et al.
# Please see the file freevo/Docs/CREDITS for a complete list of authors.
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful, but
# WITHOUT ANY WARRANTY; without even the implied warranty of MER-
# CHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General
# Public License for more details.
#
# You should have received a copy of the GNU General Public License along
# with this program; if not, write to the Free Software Foundation, Inc.,
# 59 Temple Place, Suite 330, Boston, MA 02111-1307 USA
#
# -----------------------------------------------------------------------
import logging
logger = logging.getLogger("freevo.audio.audiodiskitem")
import config
import menu
import os
from item import Item
from audioitem import AudioItem
from playlist import Playlist
from directory import DirItem
class AudioDiskItem(Playlist):
"""
class for handling audio disks
"""
def __init__(self, disc_id, parent, devicename=None, display_type=None):
logger.log( 9, 'AudioDiskItem.__init__(disc_id=%r, parent=%r, devicename=%r, display_type=%r)', disc_id, parent, devicename, display_type)
Playlist.__init__(self, parent=parent)
self.type = 'audiocd'
self.media = None
self.disc_id = disc_id
self.devicename = devicename
self.name = _('Unknown CD Album')
# variables only for Playlist
self.autoplay = 0
# variables only for DirItem
self.display_type = display_type
cover = '%s/disc/metadata/%s.jpg' % (config.OVERLAY_DIR, disc_id)
if os.path.isfile(cover):
self.image = cover
def actions(self):
"""
return a list of actions for this item
"""
logger.log( 9, 'actions()')
self.cwd()
items = [ ( self.cwd, _('Browse disc') ) ]
return items
def cwd(self, arg=None, menuw=None):
"""
make a menu item for each file in the directory
"""
logger.log( 9, 'cwd(arg=%r, menuw=%r)', arg, menuw)
play_items = []
number = len(self.info['tracks'])
if hasattr(self.info, 'mixed'):
number -= 1
for i in range(0, number):
title=self.info['tracks'][i]['title']
item = AudioItem('cdda://%d' % (i+1), self, title, scan=False)
# XXX FIXME: set also all the other info here if AudioInfo
# XXX will be based on mmpython
#item.set_info('', self.name, title, i+1, self.disc_id[1], '')
item.info = self.info['tracks'][i]
item.length = item.info['length']
if config.MPLAYER_ARGS.has_key('cd'):
item.mplayer_options += (' ' + config.MPLAYER_ARGS['cd'])
if self.devicename:
item.mplayer_options += ' -cdrom-device %s' % self.devicename
play_items.append(item)
# add all playable items to the playlist of the directory
# to play one files after the other
self.playlist = play_items
# all items together
items = []
# random playlist (only active for audio)
if 'audio' in config.DIRECTORY_ADD_RANDOM_PLAYLIST and len(play_items) > 1:
pl = Playlist(_('Random playlist'), play_items, self, random=True)
pl.autoplay = True
items += [ pl ]
items += play_items
if hasattr(self.info, 'mixed'):
d = DirItem(self.media.mountdir, self)
d.name = _('Data files on disc')
items.append(d)
self.play_items = play_items
title = self.name
if title[0] == '[' and title[-1] == ']':
title = self.name[1:-1]
item_menu = menu.Menu(title, items, item_types = self.display_type)
if menuw:
menuw.pushmenu(item_menu)
return items
| freevo/freevo1 | src/audio/audiodiskitem.py | Python | gpl-2.0 | 4,512 |
# redminehelper: Redmine helper extension for Mercurial
#
# Copyright 2010 Alessio Franceschelli (alefranz.net)
# Copyright 2010-2011 Yuya Nishihara <[email protected]>
#
# This software may be used and distributed according to the terms of the
# GNU General Public License version 2 or any later version.
"""helper commands for Redmine to reduce the number of hg calls
To test this extension, please try::
$ hg --config extensions.redminehelper=redminehelper.py rhsummary
I/O encoding:
:file path: urlencoded, raw string
:tag name: utf-8
:branch name: utf-8
:node: hex string
Output example of rhsummary::
<?xml version="1.0"?>
<rhsummary>
<repository root="/foo/bar">
<tip revision="1234" node="abcdef0123..."/>
<tag revision="123" node="34567abc..." name="1.1.1"/>
<branch .../>
...
</repository>
</rhsummary>
Output example of rhmanifest::
<?xml version="1.0"?>
<rhmanifest>
<repository root="/foo/bar">
<manifest revision="1234" path="lib">
<file name="diff.rb" revision="123" node="34567abc..." time="12345"
size="100"/>
...
<dir name="redmine"/>
...
</manifest>
</repository>
</rhmanifest>
"""
import re, time, cgi, urllib
from mercurial import cmdutil, commands, node, error, hg
_x = cgi.escape
_u = lambda s: cgi.escape(urllib.quote(s))
def _tip(ui, repo):
# see mercurial/commands.py:tip
def tiprev():
try:
return len(repo) - 1
except TypeError: # Mercurial < 1.1
return repo.changelog.count() - 1
tipctx = repo.changectx(tiprev())
ui.write('<tip revision="%d" node="%s"/>\n'
% (tipctx.rev(), _x(node.hex(tipctx.node()))))
_SPECIAL_TAGS = ('tip',)
def _tags(ui, repo):
# see mercurial/commands.py:tags
for t, n in reversed(repo.tagslist()):
if t in _SPECIAL_TAGS:
continue
try:
r = repo.changelog.rev(n)
except error.LookupError:
continue
ui.write('<tag revision="%d" node="%s" name="%s"/>\n'
% (r, _x(node.hex(n)), _x(t)))
def _branches(ui, repo):
# see mercurial/commands.py:branches
def iterbranches():
if getattr(repo, 'branchtags', None) is not None:
# Mercurial < 2.9
for t, n in repo.branchtags().iteritems():
yield t, n, repo.changelog.rev(n)
else:
for tag, heads, tip, isclosed in repo.branchmap().iterbranches():
yield tag, tip, repo.changelog.rev(tip)
def branchheads(branch):
try:
return repo.branchheads(branch, closed=False)
except TypeError: # Mercurial < 1.2
return repo.branchheads(branch)
for t, n, r in sorted(iterbranches(), key=lambda e: e[2], reverse=True):
if repo.lookup(r) in branchheads(t):
ui.write('<branch revision="%d" node="%s" name="%s"/>\n'
% (r, _x(node.hex(n)), _x(t)))
def _manifest(ui, repo, path, rev):
ctx = repo.changectx(rev)
ui.write('<manifest revision="%d" path="%s">\n'
% (ctx.rev(), _u(path)))
known = set()
pathprefix = (path.rstrip('/') + '/').lstrip('/')
for f, n in sorted(ctx.manifest().iteritems(), key=lambda e: e[0]):
if not f.startswith(pathprefix):
continue
name = re.sub(r'/.*', '/', f[len(pathprefix):])
if name in known:
continue
known.add(name)
if name.endswith('/'):
ui.write('<dir name="%s"/>\n'
% _x(urllib.quote(name[:-1])))
else:
fctx = repo.filectx(f, fileid=n)
tm, tzoffset = fctx.date()
ui.write('<file name="%s" revision="%d" node="%s" '
'time="%d" size="%d"/>\n'
% (_u(name), fctx.rev(), _x(node.hex(fctx.node())),
tm, fctx.size(), ))
ui.write('</manifest>\n')
def rhannotate(ui, repo, *pats, **opts):
rev = urllib.unquote_plus(opts.pop('rev', None))
opts['rev'] = rev
return commands.annotate(ui, repo, *map(urllib.unquote_plus, pats), **opts)
def rhcat(ui, repo, file1, *pats, **opts):
rev = urllib.unquote_plus(opts.pop('rev', None))
opts['rev'] = rev
return commands.cat(ui, repo, urllib.unquote_plus(file1), *map(urllib.unquote_plus, pats), **opts)
def rhdiff(ui, repo, *pats, **opts):
"""diff repository (or selected files)"""
change = opts.pop('change', None)
if change: # add -c option for Mercurial<1.1
base = repo.changectx(change).parents()[0].rev()
opts['rev'] = [str(base), change]
opts['nodates'] = True
return commands.diff(ui, repo, *map(urllib.unquote_plus, pats), **opts)
def rhlog(ui, repo, *pats, **opts):
rev = opts.pop('rev')
bra0 = opts.pop('branch')
from_rev = urllib.unquote_plus(opts.pop('from', None))
to_rev = urllib.unquote_plus(opts.pop('to' , None))
bra = urllib.unquote_plus(opts.pop('rhbranch', None))
from_rev = from_rev.replace('"', '\\"')
to_rev = to_rev.replace('"', '\\"')
if hg.util.version() >= '1.6':
opts['rev'] = ['"%s":"%s"' % (from_rev, to_rev)]
else:
opts['rev'] = ['%s:%s' % (from_rev, to_rev)]
opts['branch'] = [bra]
return commands.log(ui, repo, *map(urllib.unquote_plus, pats), **opts)
def rhmanifest(ui, repo, path='', **opts):
"""output the sub-manifest of the specified directory"""
ui.write('<?xml version="1.0"?>\n')
ui.write('<rhmanifest>\n')
ui.write('<repository root="%s">\n' % _u(repo.root))
try:
_manifest(ui, repo, urllib.unquote_plus(path), urllib.unquote_plus(opts.get('rev')))
finally:
ui.write('</repository>\n')
ui.write('</rhmanifest>\n')
def rhsummary(ui, repo, **opts):
"""output the summary of the repository"""
ui.write('<?xml version="1.0"?>\n')
ui.write('<rhsummary>\n')
ui.write('<repository root="%s">\n' % _u(repo.root))
try:
_tip(ui, repo)
_tags(ui, repo)
_branches(ui, repo)
# TODO: bookmarks in core (Mercurial>=1.8)
finally:
ui.write('</repository>\n')
ui.write('</rhsummary>\n')
cmdtable = {
'rhannotate': (rhannotate,
[('r', 'rev', '', 'revision'),
('u', 'user', None, 'list the author (long with -v)'),
('n', 'number', None, 'list the revision number (default)'),
('c', 'changeset', None, 'list the changeset'),
],
'hg rhannotate [-r REV] [-u] [-n] [-c] FILE...'),
'rhcat': (rhcat,
[('r', 'rev', '', 'revision')],
'hg rhcat ([-r REV] ...) FILE...'),
'rhdiff': (rhdiff,
[('r', 'rev', [], 'revision'),
('c', 'change', '', 'change made by revision')],
'hg rhdiff ([-c REV] | [-r REV] ...) [FILE]...'),
'rhlog': (rhlog,
[
('r', 'rev', [], 'show the specified revision'),
('b', 'branch', [],
'show changesets within the given named branch'),
('l', 'limit', '',
'limit number of changes displayed'),
('d', 'date', '',
'show revisions matching date spec'),
('u', 'user', [],
'revisions committed by user'),
('', 'from', '',
''),
('', 'to', '',
''),
('', 'rhbranch', '',
''),
('', 'template', '',
'display with template')],
'hg rhlog [OPTION]... [FILE]'),
'rhmanifest': (rhmanifest,
[('r', 'rev', '', 'show the specified revision')],
'hg rhmanifest [-r REV] [PATH]'),
'rhsummary': (rhsummary, [], 'hg rhsummary'),
}
| sonico999/redmine-heroku | lib/redmine/scm/adapters/mercurial/redminehelper.py | Python | gpl-2.0 | 7,991 |
# Copyright © 2017 Lucas Hoffmann
# Copyright © 2018 Dylan Baker
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
import email.parser
import email.policy
import os
import tempfile
import unittest
from unittest import mock
from alot.db import envelope
SETTINGS = {
'user_agent': 'agent',
}
def email_to_dict(mail):
"""Consumes an email, and returns a dict of headers and 'Body'."""
split = mail.splitlines()
final = {}
for line in split:
if line.strip():
try:
k, v = line.split(':')
final[k.strip()] = v.strip()
except ValueError:
final['Body'] = line.strip()
return final
class TestEnvelope(unittest.TestCase):
def assertEmailEqual(self, first, second):
with self.subTest('body'):
self.assertEqual(first.is_multipart(), second.is_multipart())
if not first.is_multipart():
self.assertEqual(first.get_payload(), second.get_payload())
else:
for f, s in zip(first.walk(), second.walk()):
if f.is_multipart() or s.is_multipart():
self.assertEqual(first.is_multipart(),
second.is_multipart())
else:
self.assertEqual(f.get_payload(), s.get_payload())
with self.subTest('headers'):
self.assertListEqual(first.values(), second.values())
def test_setitem_stores_text_unchanged(self):
"Just ensure that the value is set and unchanged"
e = envelope.Envelope()
e['Subject'] = u'sm\xf8rebr\xf8d'
self.assertEqual(e['Subject'], u'sm\xf8rebr\xf8d')
def _test_mail(self, envelope):
mail = envelope.construct_mail()
raw = mail.as_string(policy=email.policy.SMTP)
actual = email.parser.Parser().parsestr(raw)
self.assertEmailEqual(mail, actual)
@mock.patch('alot.db.envelope.settings', SETTINGS)
def test_construct_mail_simple(self):
"""Very simple envelope with a To, From, Subject, and body."""
headers = {
'From': '[email protected]',
'To': '[email protected]',
'Subject': 'Test email',
}
e = envelope.Envelope(headers={k: [v] for k, v in headers.items()},
bodytext='Test')
self._test_mail(e)
@mock.patch('alot.db.envelope.settings', SETTINGS)
def test_construct_mail_with_attachment(self):
"""Very simple envelope with a To, From, Subject, body and attachment.
"""
headers = {
'From': '[email protected]',
'To': '[email protected]',
'Subject': 'Test email',
}
e = envelope.Envelope(headers={k: [v] for k, v in headers.items()},
bodytext='Test')
with tempfile.NamedTemporaryFile(mode='wt', delete=False) as f:
f.write('blah')
self.addCleanup(os.unlink, f.name)
e.attach(f.name)
self._test_mail(e)
| MacGyverNL/alot | tests/db/test_envelope.py | Python | gpl-3.0 | 3,652 |
# coding: utf-8
from __future__ import unicode_literals, division, absolute_import, print_function
__version__ = '0.24.0'
__version_info__ = (0, 24, 0)
| ctrlaltdel/neutrinator | vendor/asn1crypto/version.py | Python | gpl-3.0 | 154 |
# Licensed to the Software Freedom Conservancy (SFC) under one
# or more contributor license agreements. See the NOTICE file
# distributed with this work for additional information
# regarding copyright ownership. The SFC licenses this file
# to you under the Apache License, Version 2.0 (the
# "License"); you may not use this file except in compliance
# with the License. You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing,
# software distributed under the License is distributed on an
# "AS IS" BASIS, WITHOUT WARRANTIES OR CONDITIONS OF ANY
# KIND, either express or implied. See the License for the
# specific language governing permissions and limitations
# under the License.
import pytest
from selenium.common.exceptions import (
ElementNotVisibleException,
MoveTargetOutOfBoundsException,
WebDriverException)
from selenium.webdriver.common.by import By
from selenium.webdriver.support import expected_conditions as EC
from selenium.webdriver.support.ui import WebDriverWait
def testClickingOnAnchorScrollsPage(driver, pages):
scrollScript = """var pageY;
if (typeof(window.pageYOffset) == 'number') {
pageY = window.pageYOffset;
} else {
pageY = document.documentElement.scrollTop;
}
return pageY;"""
pages.load("macbeth.html")
driver.find_element(By.PARTIAL_LINK_TEXT, "last speech").click()
yOffset = driver.execute_script(scrollScript)
# Focusing on to click, but not actually following,
# the link will scroll it in to view, which is a few pixels further than 0
assert yOffset > 300
def testShouldScrollToClickOnAnElementHiddenByOverflow(driver, pages):
pages.load("click_out_of_bounds_overflow.html")
link = driver.find_element(By.ID, "link")
try:
link.click()
except MoveTargetOutOfBoundsException as e:
AssertionError("Should not be out of bounds: %s" % e.msg)
@pytest.mark.xfail_marionette(
reason='https://github.com/w3c/webdriver/issues/408')
def testShouldBeAbleToClickOnAnElementHiddenByOverflow(driver, pages):
pages.load("scroll.html")
link = driver.find_element(By.ID, "line8")
# This used to throw a MoveTargetOutOfBoundsException - we don't expect it to
link.click()
assert "line8" == driver.find_element(By.ID, "clicked").text
@pytest.mark.xfail_chrome(
reason='https://bugs.chromium.org/p/chromedriver/issues/detail?id=1536',
raises=WebDriverException)
def testShouldBeAbleToClickOnAnElementHiddenByDoubleOverflow(driver, pages):
pages.load("scrolling_tests/page_with_double_overflow_auto.html")
driver.find_element(By.ID, "link").click()
WebDriverWait(driver, 3).until(EC.title_is("Clicked Successfully!"))
def testShouldBeAbleToClickOnAnElementHiddenByYOverflow(driver, pages):
pages.load("scrolling_tests/page_with_y_overflow_auto.html")
driver.find_element(By.ID, "link").click()
WebDriverWait(driver, 3).until(EC.title_is("Clicked Successfully!"))
def testShouldNotScrollOverflowElementsWhichAreVisible(driver, pages):
pages.load("scroll2.html")
list = driver.find_element(By.TAG_NAME, "ul")
item = list.find_element(By.ID, "desired")
item.click()
yOffset = driver.execute_script("return arguments[0].scrollTop", list)
assert 0 == yOffset, "Should not have scrolled"
@pytest.mark.xfail_chrome(
reason='https://bugs.chromium.org/p/chromedriver/issues/detail?id=1542')
def testShouldNotScrollIfAlreadyScrolledAndElementIsInView(driver, pages):
pages.load("scroll3.html")
driver.find_element(By.ID, "button1").click()
scrollTop = getScrollTop(driver)
driver.find_element(By.ID, "button2").click()
assert scrollTop == getScrollTop(driver)
def testShouldBeAbleToClickRadioButtonScrolledIntoView(driver, pages):
pages.load("scroll4.html")
driver.find_element(By.ID, "radio").click()
# If we don't throw, we're good
@pytest.mark.xfail_marionette(
reason='https://github.com/w3c/webdriver/issues/408',
raises=ElementNotVisibleException)
def testShouldScrollOverflowElementsIfClickPointIsOutOfViewButElementIsInView(driver, pages):
pages.load("scroll5.html")
driver.find_element(By.ID, "inner").click()
assert "clicked" == driver.find_element(By.ID, "clicked").text
@pytest.mark.xfail_marionette(
reason='https://github.com/w3c/webdriver/issues/408')
def testShouldBeAbleToClickElementInAFrameThatIsOutOfView(driver, pages):
pages.load("scrolling_tests/page_with_frame_out_of_view.html")
driver.switch_to.frame(driver.find_element_by_name("frame"))
element = driver.find_element(By.NAME, "checkbox")
element.click()
assert element.is_selected()
def testShouldBeAbleToClickElementThatIsOutOfViewInAFrame(driver, pages):
pages.load("scrolling_tests/page_with_scrolling_frame.html")
driver.switch_to.frame(driver.find_element_by_name("scrolling_frame"))
element = driver.find_element(By.NAME, "scroll_checkbox")
element.click()
assert element.is_selected()
def testShouldNotBeAbleToClickElementThatIsOutOfViewInANonScrollableFrame(driver, pages):
pages.load("scrolling_tests/page_with_non_scrolling_frame.html")
driver.switch_to.frame("scrolling_frame")
element = driver.find_element(By.NAME, "scroll_checkbox")
element.click()
# TODO we should assert that the click was unsuccessful
def testShouldBeAbleToClickElementThatIsOutOfViewInAFrameThatIsOutOfView(driver, pages):
pages.load("scrolling_tests/page_with_scrolling_frame_out_of_view.html")
driver.switch_to.frame(driver.find_element_by_name("scrolling_frame"))
element = driver.find_element(By.NAME, "scroll_checkbox")
element.click()
assert element.is_selected()
def testShouldBeAbleToClickElementThatIsOutOfViewInANestedFrame(driver, pages):
pages.load("scrolling_tests/page_with_nested_scrolling_frames.html")
driver.switch_to.frame(driver.find_element_by_name("scrolling_frame"))
driver.switch_to.frame(driver.find_element_by_name("nested_scrolling_frame"))
element = driver.find_element(By.NAME, "scroll_checkbox")
element.click()
assert element.is_selected()
def testShouldBeAbleToClickElementThatIsOutOfViewInANestedFrameThatIsOutOfView(driver, pages):
pages.load("scrolling_tests/page_with_nested_scrolling_frames_out_of_view.html")
driver.switch_to.frame(driver.find_element_by_name("scrolling_frame"))
driver.switch_to.frame(driver.find_element_by_name("nested_scrolling_frame"))
element = driver.find_element(By.NAME, "scroll_checkbox")
element.click()
assert element.is_selected()
def testShouldNotScrollWhenGettingElementSize(driver, pages):
pages.load("scroll3.html")
scrollTop = getScrollTop(driver)
driver.find_element(By.ID, "button1").size
assert scrollTop == getScrollTop(driver)
def getScrollTop(driver):
return driver.execute_script("return document.body.scrollTop")
@pytest.mark.xfail_marionette(
reason='https://github.com/w3c/webdriver/issues/408')
def testShouldBeAbleToClickElementInATallFrame(driver, pages):
pages.load("scrolling_tests/page_with_tall_frame.html")
driver.switch_to.frame(driver.find_element_by_name("tall_frame"))
element = driver.find_element(By.NAME, "checkbox")
element.click()
assert element.is_selected()
| mojwang/selenium | py/test/selenium/webdriver/common/click_scrolling_tests.py | Python | apache-2.0 | 7,393 |
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright 2017 Jonathan Schultz
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
from __future__ import print_function
import argparse
from requests_oauthlib import OAuth1Session
import webbrowser
import twitter
import sys
import os
import shutil
import unicodecsv
import re
MENTIONREGEXP=re.compile(r'(@\w+)', re.UNICODE)
HASHTAGREGEXP=re.compile(r'(#\w+)', re.UNICODE)
def twitterUserHydrate(arglist):
parser = argparse.ArgumentParser(description='Retrieve twitter users from ID.',
fromfile_prefix_chars='@')
parser.add_argument('-v', '--verbosity', type=int, default=1)
# Twitter authentication stuff
parser.add_argument('--consumer-key', type=str, required=True,
help='Consumer key for Twitter authentication')
parser.add_argument('--consumer-secret', type=str, required=True,
help='Consumer secret for Twitter authentication')
parser.add_argument('-a', '--application-only-auth', action='store_true')
parser.add_argument('--access-token-key', type=str,
help='Access token key for Twitter authentication')
parser.add_argument('--access-token-secret', type=str,
help='Access token secret for Twitter authentication')
parser.add_argument( '--retry', type=int, default=5, help='Number of times to retry failed Twitter API call')
parser.add_argument('-l', '--limit', type=int, help='Limit number of tweets to process')
parser.add_argument('-o', '--outfile', type=str, help='Output CSV file, otherwise use stdout')
parser.add_argument('--no-comments', action='store_true', help='Do not output descriptive comments')
parser.add_argument('--no-header', action='store_true', help='Do not output CSV header with column names')
parser.add_argument('infile', type=str, nargs='?', help='Input CSV file, otherwise use stdin')
args = parser.parse_args(arglist)
hiddenargs = ['verbosity', 'consumer_key', 'consumer_secret', 'application_only_auth', 'access_token_key', 'access_token_secret', 'retry', 'no_comments']
if args.infile is None:
infile = sys.stdin
else:
infile = file(args.infile, 'rU')
# Skip comments at start of infile.
incomments = ''
while True:
line = infile.readline()
if line[:1] == '#':
incomments += line
else:
infieldnames = next(unicodecsv.reader([line]))
break
inreader=unicodecsv.DictReader(infile, fieldnames=infieldnames)
if args.outfile is None:
outfile = sys.stdout
else:
if os.path.exists(args.outfile):
shutil.move(args.outfile, args.outfile + '.bak')
outfile = file(args.outfile, 'w')
if not args.no_comments:
comments = ((' ' + args.outfile + ' ') if args.outfile else '').center(80, '#') + '\n'
comments += '# ' + os.path.basename(sys.argv[0]) + '\n'
arglist = args.__dict__.keys()
for arg in arglist:
if arg not in hiddenargs:
val = getattr(args, arg)
if type(val) == str or type(val) == unicode:
comments += '# --' + arg + '="' + val + '"\n'
elif type(val) == bool:
if val:
comments += '# --' + arg + '\n'
elif type(val) == list:
for valitem in val:
if type(valitem) == str:
comments += '# --' + arg + '="' + valitem + '"\n'
else:
comments += '# --' + arg + '=' + str(valitem) + '\n'
elif val is not None:
comments += '# --' + arg + '=' + str(val) + '\n'
outfile.write(comments + incomments)
# Twitter URLs
REQUEST_TOKEN_URL = 'https://api.twitter.com/oauth/request_token'
ACCESS_TOKEN_URL = 'https://api.twitter.com/oauth/access_token'
AUTHORIZATION_URL = 'https://api.twitter.com/oauth/authorize'
SIGNIN_URL = 'https://api.twitter.com/oauth/authenticate'
if args.application_only_auth:
api = twitter.Api(
consumer_key=args.consumer_key,
consumer_secret=args.consumer_secret,
application_only_auth=True,
sleep_on_rate_limit=True
)
else:
if not all([args.access_token_key, args.access_token_secret]):
oauth_client = OAuth1Session(args.consumer_key, client_secret=args.consumer_secret, callback_uri='oob')
resp = oauth_client.fetch_request_token(REQUEST_TOKEN_URL)
url = oauth_client.authorization_url(AUTHORIZATION_URL)
print('Opening browser for Twitter authentication: ' + url, file=sys.stderr)
webbrowser.open(url)
print('Enter your pincode? ', file=sys.stderr)
pincode = raw_input()
oauth_client = OAuth1Session(args.consumer_key, client_secret=args.consumer_secret,
resource_owner_key=resp.get('oauth_token'),
resource_owner_secret=resp.get('oauth_token_secret'),
verifier=pincode)
resp = oauth_client.fetch_access_token(ACCESS_TOKEN_URL)
args.access_token_key = resp.get('oauth_token')
args.access_token_secret = resp.get('oauth_token_secret')
print('To re-use access token next time use the following arguments:', file=sys.stderr)
print(' --access-token-key ' + args.access_token_key + ' --access-token-secret ' + args.access_token_secret, file=sys.stderr)
api = twitter.Api(
consumer_key=args.consumer_key,
consumer_secret=args.consumer_secret,
access_token_key=args.access_token_key,
access_token_secret=args.access_token_secret,
sleep_on_rate_limit=True
)
if args.verbosity >= 1:
print("Loading users.", file=sys.stderr)
fieldnames = None
while True:
if args.verbosity >= 2:
print("Loading batch.", file=sys.stderr)
rows = []
while len(rows) < 100:
try:
rows.append(next(inreader))
except StopIteration:
break
if len(rows) == 0:
break
retry = args.retry
while True:
try:
userdata = api.UsersLookup(screen_name=[row['screen_name'].encode('utf-8') for row in rows])
break
except twitter.error.TwitterError as error:
for message in error.message:
if message['code'] == 88 and retry > 0:
if args.verbosity >= 2:
print("Retrying after twitter error: ", error, file=sys.stderr)
retry -= 1
break
else:
raise
for userdatum in userdata:
userdict = userdatum.AsDict()
if not fieldnames:
fieldnames = infieldnames + userdict.keys() + list({'default_profile', 'default_profile_image', 'follow_request_sent', 'geo_enabled', 'is_translator', 'profile_background_tile', 'profile_user_background_image', 'protected', 'verified', 'withheld_in_countries', 'withheld_scope'} - set(infieldnames) - set(userdict.keys()))
outunicodecsv=unicodecsv.DictWriter(outfile, fieldnames=fieldnames, extrasaction='ignore', lineterminator=os.linesep)
if not args.no_header:
outunicodecsv.writeheader()
outunicodecsv.writerow(userdict)
outfile.close()
if __name__ == '__main__':
twitterUserHydrate(None)
| BarraQDA/twitterScrape | twitterScrape/twitterUserHydrate.py | Python | gpl-3.0 | 8,576 |
"""
The following functions provide a set of utilities for working with `Dependent`
and collections of `Dependent`.
* :func:`~revscoring.dependencies.solve` provides basic dependency solving
* :func:`~revscoring.dependencies.expand` provides minimal expansion of
dependency trees
* :func:`~revscoring.dependencies.dig` provides expansion of "root" dependents
-- dependents with no dependencies of their own
* :func:`~revscoring.dependencies.draw` provides a means to print a dependency
tree to the terminal (useful when debugging)
.. autofunction:: revscoring.dependencies.solve
.. autofunction:: revscoring.dependencies.expand
.. autofunction:: revscoring.dependencies.dig
.. autofunction:: revscoring.dependencies.draw
"""
import logging
import time
import traceback
from ..errors import CaughtDependencyError, DependencyError, DependencyLoop
logger = logging.getLogger(__name__)
def solve(dependents, context=None, cache=None, profile=None):
"""
Calculates a dependent's value by solving dependencies.
:Parameters:
dependents : :class:`revscoring.Dependent` | `iterable`
A dependent or collection of dependents to solve
context : `dict` | `iterable`
A mapping of injected dependency processers to use as context.
Can be specified as a set of new
:class:`revscoring.Dependent` or a map of
:class:`revscoring.Dependent`
pairs.
cache : `dict`
A cache of previously solved dependencies as
:class:`revscoring.Dependent`:`<value>` pairs
profile : `dict`
A mapping of :class:`revscoring.Dependent` to `list` of process
durations for generating the value. The provided `dict` will be
modified in-place and new durations will be appended.
:Returns:
The result of executing the dependents with all dependencies resolved.
If a single dependent is provided, the value will be returned. If a
collection of dependents is provided, a generator of values will be
returned
"""
cache = cache if cache is not None else {}
context = normalize_context(context)
if hasattr(dependents, '__iter__'):
# Multiple values -- return a generator
return _solve_many(dependents, context=context, cache=cache,
profile=profile)
else:
# Singular value -- return it's solution
dependent = dependents
value, _, _ = _solve(dependent, context=context, cache=cache,
profile=profile)
return value
def expand(dependents, context=None, cache=None):
"""
Calculates a dependent's value by solving dependencies.
:Parameters:
dependents : :class:`revscoring.Dependent` | `iterable`
A dependent or collection of dependents to solve
context : `dict` | `iterable`
A mapping of injected dependency processers to use as context.
Can be specified as a set of new
:class:`revscoring.Dependent` or a map of
:class:`revscoring.Dependent` pairs.
cache : `dict`
A cache of previously solved dependencies as `Dependent`:`<value>`
pairs
:Returns:
A generator over all dependents in the dependency tree with each
dependent occurring only once
"""
cache = set(cache or [])
context = normalize_context(context)
if hasattr(dependents, '__iter__'):
# Multiple values
return _expand_many(dependents, context, cache)
else:
# Singular value
dependent = dependents
return _expand(dependent, context, cache)
def draw(dependent, context=None, cache=None, depth=0):
"""
Returns a string representation of the the dependency tree for a single
:class:`revscoring.Dependent`.
:Parameters:
dependent : :class:`revscoring.Dependent`
The dependent to draw the dependencies for.
context : `dict` | `iterable`
A mapping of injected dependency processers to use as context.
Can be specified as a set of
:class:`revscoring.Dependent` or a map of
:class:`revscoring.Dependent` pairs.
cache : `dict` | `set`
A cache of previously solved dependencies as `Dependent`:`<value>`
pairs. When these items are reached while scanning the tree,
"CACHED" will be printed.
:Returns:
None
"""
return "\n".join(draw_lines(dependent, context, cache, depth)) + "\n"
def draw_lines(dependent, context, cache, depth):
cache = cache or {}
context = normalize_context(context)
if dependent in cache:
yield "\t" * depth + " - " + repr(dependent) + " CACHED"
else:
if dependent in context:
dependent = context[dependent]
yield "\t" * depth + " - " + repr(dependent)
# Check if we're a dependent with explicit dependencies
if hasattr(dependent, "dependencies"):
for dependency in dependent.dependencies:
yield from draw_lines(dependency, context, cache, depth + 1)
def dig(dependents, context=None, cache=None):
"""
Expands root dependencies. These are dependents at the bottom of the tree
-- :class:`revscoring.Dependent` with no dependencies of
their own.
:Parameters:
dependents : :class:`revscoring.Dependent` | `iterable`
A dependent or collection of dependents to scan
context : `dict` | `iterable`
A mapping of injected dependency processers to use as context.
Can be specified as a set of new
:class:`revscoring.Dependent` or a map of
:class:`revscoring.Dependent`
pairs.
cache : `dict` | `set`
A cache of previously solved dependencies to not scan beneath
:Returns:
A generator over root dependencies
"""
cache = set(cache or [])
context = normalize_context(context)
if hasattr(dependents, '__iter__'):
# Multiple values
return _dig_many(dependents, context, cache)
else:
# Singular value
dependent = dependents
return _dig(dependent, context, cache)
def normalize_context(context):
"""
Normalizes a context argument. This allows for context to be specified
either as a collection of contextual
:class:`revscoring.Dependent` or a `dict` of
:class:`revscoring.Dependent` pairs.
"""
if context is None:
return {}
elif isinstance(context, dict):
return context
elif hasattr(context, "__iter__"):
return {d: d for d in context}
else:
raise TypeError("'context' is not a dict or iterable: {0}"
.format(str(context)))
def _solve(dependent, context, cache, history=None, profile=None):
history = history or set()
# Check if we've already got a value for this dependency
if dependent in cache:
return cache[dependent], cache, history
# Check if a corresponding dependent was injected into the context
else:
# If a dependent is in context here, replace it.
if dependent in context:
dependent = context[dependent]
# Check if the dependency is callable.
if not callable(dependent):
raise RuntimeError("Can't solve dependency " + repr(dependent) +
". " + type(dependent).__name__ +
" is not callable.")
# Check if we're in a loop.
elif dependent in history:
raise DependencyLoop("Dependency loop detected at " +
repr(dependent))
# All is good. Time to generate a value
else:
# Add to history so we can detect any loops on the way down.
history.add(dependent)
# Check if we're a dependent with explicit dependencies
if hasattr(dependent, "dependencies"):
dependencies = dependent.dependencies
else:
# No dependencies? OK. Let's try that.
dependencies = []
# Generate args for process function from dependencies (if any)
args = []
for dependency in dependencies:
value, cache, history = _solve(dependency, context=context,
cache=cache, history=history,
profile=profile)
args.append(value)
# Generate value
try:
start = time.time()
value = dependent(*args)
duration = time.time() - start
if profile is not None:
if dependent in profile:
profile[dependent].append(duration)
else:
profile[dependent] = [duration]
except DependencyError:
raise
except Exception as e:
message = "Failed to process {0}: {1}".format(dependent, e)
tb = traceback.extract_stack()
formatted_exception = traceback.format_exc()
raise CaughtDependencyError(message, e, tb,
formatted_exception)
# Add value to cache
cache[dependent] = value
return cache[dependent], cache, history
def _solve_many(dependents, context, cache, profile=None):
for dependent in dependents:
value, cache, history = _solve(dependent, context=context, cache=cache,
profile=profile)
yield value
def _expand(dependent, context, cache):
if dependent not in cache:
yield dependent
cache.add(dependent)
if hasattr(dependent, "dependencies"):
yield from _expand_many(dependent.dependencies, context, cache)
def _expand_many(dependents, context, cache):
for dependent in dependents:
yield from _expand(dependent, context, cache)
def _dig(dependent, context, cache):
if hasattr(dependent, "dependencies"):
if len(dependent.dependencies) > 0:
yield from _dig_many(dependent.dependencies, context, cache)
else:
yield dependent
else:
yield dependent
def _dig_many(dependents, context, cache):
for dependent in dependents:
if dependent not in cache:
if dependent in context:
# Use contextual dependency
dependent = context[dependent]
cache.add(dependent)
yield from _dig(dependent, context, cache)
| wiki-ai/revscoring | revscoring/dependencies/functions.py | Python | mit | 10,779 |
# -*- coding: utf-8 -*-
""" Sahana Eden Setup Model
@copyright: 2015 (c) Sahana Software Foundation
@license: MIT
Permission is hereby granted, free of charge, to any person
obtaining a copy of this software and associated documentation
files (the "Software"), to deal in the Software without
restriction, including without limitation the rights to use,
copy, modify, merge, publish, distribute, sublicense, and/or sell
copies of the Software, and to permit persons to whom the
Software is furnished to do so, subject to the following
conditions:
The above copyright notice and this permission notice shall be
included in all copies or substantial portions of the Software.
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND,
EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES
OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND
NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT
HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY,
WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR
OTHER DEALINGS IN THE SOFTWARE.
"""
__all__ = ("S3DeployModel",
"setup_create_yaml_file",
"setup_create_playbook",
"setup_get_templates",
"setup_get_prepop_options",
"setup_log",
"setup_rheader",
"setup_management_exists",
"setup_UpgradeMethod",
"setup_refresh",
"setup_getupgrades",
"setup_host_validator",
"setup_upgrade_status",
)
from ..s3 import *
from gluon import *
import os
import socket
import shutil
import time
try:
import ansible.playbook
import ansible.inventory
from ansible import callbacks
except ImportError:
current.log.warning("ansible module needed for Setup")
try:
import yaml
except ImportError:
current.log.warning("PyYAML module needed for Setup")
TIME_FORMAT = "%b %d %Y %H:%M:%S"
MSG_FORMAT = "%(now)s - %(category)s - %(data)s\n\n"
class S3DeployModel(S3Model):
names = ("setup_deployment",
"setup_server",
"setup_instance",
"setup_host",
"setup_packages",
"setup_upgrade"
)
def model(self):
T = current.T
s3 = current.response.s3
define_table = self.define_table
configure = self.configure
add_components = self.add_components
set_method = self.set_method
tablename = "setup_deployment"
define_table(tablename,
Field("name",
label = T("Name"),
required = True,
),
Field("distro",
label = T("Linux Distribution"),
required = True,
requires = IS_IN_SET(
[
("wheezy", "Debian Wheezy"),
("precise", "Ubuntu 14.04 LTS Precise"),
])
),
Field("remote_user",
label = T("Remote User"),
required = True,
),
Field("secret_key",
label = T("AWS Secret Key"),
required = True,
),
Field("access_key",
label = T("AWS Access Key"),
required = True,
),
Field("private_key", "upload",
custom_retrieve = retrieve_file,
custom_store = store_file,
label = T("Private Key"),
required = True,
),
Field("webserver_type", "integer",
label = T("Web Server"),
required = True,
requires = IS_IN_SET({1:"apache", 2:"cherokee"}),
),
Field("db_type", "integer",
label = T("Database"),
required = True,
requires = IS_IN_SET({1:"mysql", 2: "postgresql"}),
),
Field("db_password", "password",
label = T("Database Password"),
required = True,
readable = False,
),
Field("repo_url",
# @ToDo: Add more advanced options
default = "https://github.com/flavour/eden",
label = T("Eden Repo git URL"),
),
Field("template",
label = T("Template"),
required = True,
requires = IS_IN_SET(setup_get_templates(), zero=None),
),
Field("refresh_lock", "integer",
default = 0,
readable = False,
writable = False,
),
Field("last_refreshed", "datetime",
readable = False,
writable = False,
),
*s3_meta_fields()
)
# CRUD Strings
s3.crud_strings[tablename] = Storage(
label_create_button = T("Add Deployment"),
label_list_button = T("View Deployments"),
label_delete_button = T("Delete Deployment"),
msg_record_created = T("Deployment Created"),
msg_record_modified = T("Deployment updated"),
msg_record_deleted = T("Deployment deleted"),
msg_list_empty = T("No Deployment Saved yet"),
subtitle_create = T("Add Deployment"),
title_create = T("Add Deployment"),
title_list = T("View Deployments"),
title_update = T("Edit Deployment"),
)
configure(tablename,
editable = False,
deletable = False,
insertable = True,
listadd = True
)
tablename = "setup_server"
define_table(tablename,
Field("deployment_id", "reference setup_deployment"),
Field("role", "integer",
requires = IS_IN_SET({1: "all",
2: "db",
3: "webserver",
4: "eden",
}),
),
Field("host_ip",
required = True,
),
Field("hostname",
label = "Hostname",
required = True,
),
)
configure(tablename,
onvalidation = server_validation
)
tablename = "setup_instance"
define_table(tablename,
Field("deployment_id", "reference setup_deployment"),
Field("type", "integer",
requires = IS_IN_SET({1: "prod", 2: "test", 3: "demo", 4: "dev"})
),
Field("url",
requires = IS_URL(),
),
Field("prepop_options",
label = "Prepop Options",
required = True,
requires = IS_IN_SET([], multiple=True),
),
Field("scheduler_id", "reference scheduler_task",
readable = False,
writable = False,
),
)
configure(tablename,
deletable = False,
editable = False,
onaccept = instance_onaccept,
)
add_components("setup_deployment",
setup_instance = "deployment_id",
setup_server = "deployment_id",
)
tablename = "setup_packages"
define_table(tablename,
Field("name",
label = T("Package Name"),
),
Field("cv",
label = T("Current Version"),
),
Field("av",
label = T("Available Version"),
),
Field("type",
label = T("Type of Package"),
requires = IS_IN_SET(["os", "pip", "git"]),
),
Field("deployment",
"reference setup_deployment",
),
)
tablename = "setup_upgrade"
define_table(tablename,
Field("deployment",
"reference setup_deployment"
),
Field("scheduler",
"reference scheduler_task"
),
)
set_method("setup", "deploy",
method = "upgrade",
action = setup_UpgradeMethod,
)
return {}
# -------------------------------------------------------------------------
def defaults(self):
"""
Safe defaults for model-global names in case module is disabled
"""
return {}
# -----------------------------------------------------------------------------
def server_validation(form):
ip = form.vars.host_ip
table = current.s3db.setup_server
db = current.db
rows = db(table.host_ip == ip).select()
if rows:
form.errors["host_ip"] = "Server already in use"
# -----------------------------------------------------------------------------
def instance_onaccept(form):
db = current.db
s3db = current.s3db
form_vars = form.vars
# Get deployment id
itable = s3db.setup_instance
instance = db(itable.id == form_vars.id).select(itable.deployment_id,
limitby = (0, 1)
).first()
deployment_id = instance.deployment_id
stable = s3db.setup_server
query = (stable.deployment_id == deployment_id)
rows = db(query).select(stable.role,
stable.host_ip,
stable.hostname,
orderby = stable.role
)
hosts = []
for row in rows:
hosts.append((row.role, row.host_ip))
if row.role == 1 or row.role == 4:
hostname = row.hostname
dtable = s3db.setup_deployment
deployment = db(dtable.id == deployment_id).select(dtable.db_password,
dtable.webserver_type,
dtable.db_type,
dtable.distro,
dtable.template,
dtable.private_key,
dtable.remote_user,
limitby=(0, 1)
).first()
prepop_options = str(",".join(form_vars.prepop_options))
instance_type = int(form_vars.type)
if instance_type == 2:
demo_type = "na"
elif instance_type == 1 or instance_type == 3:
# find dtype
sctable = s3db.scheduler_task
query = (itable.deployment_id == deployment_id) & \
(sctable.status == "COMPLETED")
existing_instances = db(query).select(itable.type,
join = sctable.on(itable.scheduler_id == sctable.id)
)
if existing_instances:
demo_type = "afterprod"
else:
demo_type = "beforeprod"
webservers = ("apache", "cherokee")
dbs = ("mysql", "postgresql")
prepop = ("prod", "test", "demo")
scheduler_id = setup_create_yaml_file(hosts,
deployment.db_password,
webservers[deployment.webserver_type - 1],
dbs[deployment.db_type - 1],
prepop[instance_type - 1],
prepop_options,
deployment.distro,
False,
hostname,
deployment.template,
form_vars.url,
deployment.private_key,
deployment.remote_user,
demo_type,
)
# add scheduler fk in current record
record = db(itable.id == form_vars.id).select().first()
record.update_record(scheduler_id=scheduler_id)
# -----------------------------------------------------------------------------
def setup_create_yaml_file(hosts, password, web_server, database_type,
prepop, prepop_options, distro, local=False,
hostname=None, template="default", sitename=None,
private_key=None, remote_user=None, demo_type=None):
roles_path = "../private/playbook/roles/"
if len(hosts) == 1:
deployment = [
{
"hosts": hosts[0][1],
"sudo": True,
"remote_user": remote_user,
"vars": {
"password": password,
"template": template,
"web_server": web_server,
"type": prepop,
"distro": distro,
"prepop_options": prepop_options,
"sitename": sitename,
"hostname": hostname,
"dtype": demo_type,
"eden_ip": hosts[0][1],
"db_ip": hosts[0][1],
"db_type": database_type
},
"roles": [
"%s%s" % (roles_path, database_type),
"%scommon" % roles_path,
"%suwsgi" % roles_path,
"%sconfigure" % roles_path,
"%s%s" % (roles_path, web_server),
]
}
]
else:
deployment = [
{
"hosts": hosts[0][1],
"sudo": True,
"remote_user": remote_user,
"vars": {
"distro": distro,
"dtype": demo_type,
"password": password,
"type": prepop
},
"roles": [
"%s%s" % (roles_path, database_type),
]
},
{
"hosts": hosts[2][1],
"sudo": True,
"remote_user": remote_user,
"vars": {
"dtype": demo_type,
"db_ip": hosts[0][1],
"db_type": database_type,
"hostname": hostname,
"password": password,
"prepop_options": prepop_options,
"sitename": sitename,
"template": template,
"type": prepop,
"web_server": web_server,
},
"roles": [
"%scommon" % roles_path,
"%suwsgi" % roles_path,
"%sconfigure" % roles_path,
]
},
{
"hosts": hosts[1][1],
"sudo": True,
"remote_user": remote_user,
"vars": {
"eden_ip": hosts[2][1],
"type": prepop
},
"roles": [
"%s%s" % (roles_path, web_server),
]
}
]
if demo_type == "afterprod":
only_tags = ["demo"]
elif prepop == "test":
only_tags = ["test",]
else:
only_tags = ["all"]
directory = os.path.join(current.request.folder, "yaml")
name = "deployment_%d" % int(time.time())
file_path = os.path.join(directory, "%s.yml" % name)
if not os.path.isdir(directory):
os.mkdir(directory)
with open(file_path, "w") as yaml_file:
yaml_file.write(yaml.dump(deployment, default_flow_style=False))
row = current.s3task.schedule_task(
name,
vars = {
"playbook": file_path,
"private_key": os.path.join(current.request.folder, "uploads", private_key),
"host": [host[1] for host in hosts],
"only_tags": only_tags,
},
function_name = "deploy",
repeats = 1,
timeout = 3600,
sync_output = 300
)
return row
# -----------------------------------------------------------------------------
def setup_create_playbook(playbook, hosts, private_key, only_tags):
inventory = ansible.inventory.Inventory(hosts)
#playbook_cb = callbacks.PlaybookCallbacks(verbose=utils.VERBOSITY)
stats = callbacks.AggregateStats()
# runner_cb = callbacks.PlaybookRunnerCallbacks(
# stats, verbose=utils.VERBOSITY)
head, tail = os.path.split(playbook)
deployment_name = tail.rsplit(".")[0]
cb = CallbackModule(deployment_name)
pb = ansible.playbook.PlayBook(
playbook = playbook,
inventory = inventory,
callbacks = cb,
runner_callbacks = cb,
stats = stats,
private_key_file = private_key,
only_tags = only_tags
)
return pb
# -----------------------------------------------------------------------------
def setup_get_prepop_options(template):
module_name = "applications.eden_deployment.modules.templates.%s.config" % template
__import__(module_name)
config = sys.modules[module_name]
prepopulate_options = config.settings.base.get("prepopulate_options")
if isinstance(prepopulate_options, dict):
if "mandatory" in prepopulate_options:
del prepopulate_options["mandatory"]
return prepopulate_options.keys()
else:
return ["mandatory"]
# -----------------------------------------------------------------------------
def setup_log(filename, category, data):
if type(data) == dict:
if 'verbose_override' in data:
# avoid logging extraneous data from facts
data = 'omitted'
else:
data = data.copy()
invocation = data.pop('invocation', None)
data = json.dumps(data)
if invocation is not None:
data = json.dumps(invocation) + " => %s " % data
path = os.path.join(current.request.folder, "yaml", "%s.log" % filename)
now = time.strftime(TIME_FORMAT, time.localtime())
fd = open(path, "a")
fd.write(MSG_FORMAT % dict(now=now, category=category, data=data))
fd.close()
# -----------------------------------------------------------------------------
def setup_get_templates():
path = os.path.join(current.request.folder, "modules", "templates")
templates = set(
os.path.basename(folder) for folder, subfolders, files in os.walk(path) \
for file_ in files if file_ == 'config.py'
)
return templates
# -----------------------------------------------------------------------------
def store_file(file, filename=None, path=None):
path = os.path.join(current.request.folder, "uploads")
if not os.path.exists(path):
os.makedirs(path)
pathfilename = os.path.join(path, filename)
dest_file = open(pathfilename, 'wb')
try:
shutil.copyfileobj(file, dest_file)
finally:
dest_file.close()
os.chmod(pathfilename, 0600)
return filename
# -----------------------------------------------------------------------------
def retrieve_file(filename, path=None):
path = os.path.join(current.request.folder, "uploads")
return (filename, open(os.path.join(path, filename), 'rb'))
# -----------------------------------------------------------------------------
class CallbackModule(object):
"""
logs playbook results, per deployment in eden/yaml
"""
def __init__(self, filename):
self.filename = filename
def on_any(self, *args, **kwargs):
pass
def on_failed(self, host, res, ignore_errors=False):
setup_log(self.filename, 'FAILED', res)
def on_ok(self, host, res):
setup_log(self.filename, 'OK', res)
def on_error(self, host, msg):
setup_log(self.filename, 'ERROR', msg)
def on_skipped(self, host, item=None):
setup_log(self.filename, 'SKIPPED', '...')
def on_unreachable(self, host, res):
setup_log(self.filename, 'UNREACHABLE', res)
def on_no_hosts(self):
pass
def on_async_poll(self, host, res, jid, clock):
setup_log(self.filename, 'DEBUG', host, res, jid, clock)
def on_async_ok(self, host, res, jid):
setup_log(self.filename, 'DEBUG', host, res, jid)
def on_async_failed(self, host, res, jid):
setup_log(self.filename, 'ASYNC_FAILED', res)
def on_start(self):
setup_log(self.filename, 'DEBUG', 'on_start')
def on_notify(self, host, handler):
setup_log(self.filename, 'DEBUG', host)
def on_no_hosts_matched(self):
setup_log(self.filename, 'DEBUG', 'no_hosts_matched')
def on_no_hosts_remaining(self):
setup_log(self.filename, 'DEBUG', 'no_hosts_remaining')
def on_task_start(self, name, is_conditional):
setup_log(self.filename, 'DEBUG', 'Starting %s' % name)
def on_vars_prompt(self, varname, private=True, prompt=None,
encrypt=None, confirm=False, salt_size=None,
salt=None, default=None):
pass
def on_setup(self):
setup_log(self.filename, 'DEBUG', 'on_setup')
def on_import_for_host(self, host, imported_file):
setup_log(self.filename, 'IMPORTED', imported_file)
def on_not_import_for_host(self, host, missing_file):
setup_log(self.filename, 'NOTIMPORTED', missing_file)
def on_play_start(self, pattern):
setup_log(self.filename, 'play_start', pattern)
def on_stats(self, stats):
setup_log(self.filename, 'DEBUG', stats)
# -----------------------------------------------------------------------------
def setup_rheader(r, tabs=[]):
""" Resource component page header """
if r.representation == "html":
T = current.T
tabs = [(T("Deployment Details"), None),
(T("Servers"), "server"),
(T("Instances"), "instance"),
]
rheader_tabs = s3_rheader_tabs(r, tabs)
rheader = DIV(rheader_tabs)
return rheader
# -----------------------------------------------------------------------------
def setup_management_exists(_type, _id, deployment_id):
""" Returns true/false depending on whether a management task
exists for an instance
"""
db = current.db
ttable = current.s3db.scheduler_task
args = '["%s", "%s", "%s"]' % (_type, _id, deployment_id)
query = ((ttable.function_name == "setup_management") & \
(ttable.args == args) & \
(ttable.status.belongs(["RUNNING", "QUEUED", "ASSIGNED"])))
exists = db(query).select(ttable.id,
limitby = (0, 1)).first()
if exists:
return True
return False
# -----------------------------------------------------------------------------
class setup_UpgradeMethod(S3Method):
def apply_method(self, r, **attr):
s3db = current.s3db
db = current.db
T = current.T
response = current.response
record = r.record
dtable = s3db.setup_deploy
stable = s3db.scheduler_task
query = (dtable.host == record.host) & \
(stable.status == "COMPLETED")
machines = db(query).select(
dtable.id.with_alias("deployment"),
dtable.type.with_alias("type"),
join = [
stable.on(dtable.scheduler_id == stable.id)
],
distinct = True
)
machine_ids = [machine.deployment for machine in machines]
validate = s3db.setup_host_validator(machine_ids)
if r.http == "GET":
if record.last_refreshed is None:
redirect(URL(c="setup", f="refresh", args=record.id))
# Data table
resource = s3db.resource("setup_packages")
totalrows = resource.count()
list_fields = ["id",
"name",
"cv",
"av",
]
package_filter = (s3db.setup_packages.deployment == record.id) & \
(s3db.setup_packages.cv != s3db.setup_packages.av)
resource.add_filter(package_filter)
data = resource.select(list_fields,
limit = totalrows,
)
dt = S3DataTable(data["rfields"], data["rows"])
dt_id = "datatable"
if validate is not None:
dt_bulk_actions = None
appname = current.request.application
current.response.s3.scripts.append("/%s/static/scripts/S3/s3.setup.js" % appname)
else:
dt_bulk_actions = [(T("Upgrade"), "upgrade")]
items = dt.html(totalrows,
totalrows,
dt_pagination = "false",
dt_bulk_actions = dt_bulk_actions,
)
output = dict(items=items)
response.view = "list.html"
elif r.http == "POST":
if validate is not None:
current.session.error = validate
redirect(URL(c="setup", f="%s_deploy" % record.type, args=[record.id, "upgrade"]))
post_vars = r.post_vars
ptable = s3db.setup_packages
selected = post_vars.selected
if selected:
selected = selected.split(",")
else:
selected = []
# query = ptable.id.belongs(selected)
# packages = db(query).select()
query = FS("id").belongs(selected)
presource = s3db.resource("setup_packages", filter=query)
packages = presource.select(["name", "type"], as_rows=True)
system_packages = []
pip_packages = []
git_packages = []
for package in packages:
if package.type == "os":
system_packages.append(package.name)
elif package.type == "pip":
pip_packages.append(package.name)
elif package.type == "git":
if package.name == "web2py":
git_packages.append({name: package.name, chdir: "/home/%s" % record.type})
directory = os.path.join(current.request.folder, "yaml")
name = "upgrade_%d" % int(time.time())
file_path = os.path.join(directory, "%s.yml" % name)
roles_path = "../private/playbook/roles/"
upgrade = [
{
"hosts": record.host,
"sudo": True,
"vars": {
"system_packages": system_packages,
"pip_packages": pip_packages,
"git_packages": git_packages,
},
"roles": [
"%supgrades" % roles_path,
]
}
]
if record.type == "remote":
upgrade[0]["remote_user"] = record.remote_user
else:
upgrade[0]["connection"] = "local"
if not os.path.isdir(directory):
os.mkdir(directory)
with open(file_path, "w") as yaml_file:
yaml_file.write(yaml.dump(upgrade, default_flow_style=False))
if record.private_key:
private_key = os.path.join(current.request.folder, "uploads", record.private_key)
else:
private_key = None
only_tags = ['all']
row = current.s3task.schedule_task(
name,
vars = {
"playbook": file_path,
"private_key": private_key,
"host": [record.host],
"only_tags": only_tags,
},
function_name = "deploy",
repeats = 1,
timeout = 3600,
sync_output = 300
)
# Add record to setup_upgrade
utable = s3db.setup_upgrade
utable.insert(deployment=record.id, scheduler=row.id)
current.session.flash = T("Upgrade Queued. Please wait while it is completed")
redirect(URL(c="setup", f="%s_deploy" % record.type, args=[record.id, "upgrade"]))
return output
# -----------------------------------------------------------------------------
def setup_refresh(id):
T = current.T
db = current.db
s3db = current.s3db
dtable = s3db.setup_deploy
query = (dtable.id == id)
record = db(query).select(dtable.id,
dtable.host,
dtable.type,
dtable.prepop,
dtable.remote_user,
dtable.private_key,
).first()
if not record:
return {"success": False,
"msg": T("Record Not Found"),
"f": "index",
"args": None
}
# Get machines with the same host as record
ptable = s3db.setup_packages
stable = s3db.scheduler_task
utable = s3db.setup_upgrade
query = (dtable.host == record.host) & \
(stable.status == "COMPLETED")
machines = db(query).select(
dtable.id.with_alias("deployment"),
dtable.type.with_alias("type"),
join = [
stable.on(dtable.scheduler_id == stable.id)
],
distinct = True
)
# Check if machines have a refresh running
machine_ids = [machine.deployment for machine in machines]
validate = s3db.setup_host_validator(machine_ids)
if validate is not None:
return {"success": False,
"msg": validate,
"f": str("%s_deploy" % record.type),
"args": [record.id, "read"]
}
# set the refresh lock
for machine in machines:
db(dtable.id == machine.deployment).update(refresh_lock=1)
# find new packages
if record.type == "local":
response = s3db.setup_getupgrades(record.host, record.prepop)
else:
response = s3db.setup_getupgrades(record.host,
record.prepop,
record.remote_user,
record.private_key,
)
if response["dark"]:
return {"success": False,
"msg": T("Error contacting the server"),
"f": str("%s_deploy" % record.type),
"args": [record.id, "upgrade"]
}
# Call ansible runner
# get a list of current packages
packages = db(ptable.deployment == record.id).select(ptable.name)
old_set = set()
for package in packages:
old_set.add(package.name)
new_set = set()
fetched_packages = response["contacted"][record.host]["packages"]
for package in fetched_packages:
new_set.add(package["name"])
new_packages = new_set.difference(old_set)
upgrade_packages = new_set.intersection(old_set)
uptodate_packages = old_set.difference(new_set)
for package in fetched_packages:
if package["name"] in new_packages:
for machine in machines:
if package["name"] == "web2py" and machine.deployment != record.id:
continue
ptable.insert(name = package["name"],
cv = package["cv"],
av = package["av"],
type = package["type"],
deployment = machine.deployment,
)
elif package["name"] in upgrade_packages:
for machine in machines:
if package["name"] == "web2py" and machine.deployment != record.id:
continue
query = (ptable.name == package["name"]) & \
(ptable.deployment == machine.deployment)
db(query).update(av=package["av"])
for package in uptodate_packages:
for machine in machines:
if package == "web2py" and machine.deployment != record.id:
continue
query = (ptable.name == package) & \
(ptable.deployment == machine.deployment)
row = db(query).select().first()
row.av = row.cv
row.update_record()
# release the refresh lock
for machine in machines:
db(dtable.id == machine.deployment).update(refresh_lock=0)
# update last refreshed
import datetime
record.update_record(last_refreshed=datetime.datetime.now())
return {"success": True,
"msg": T("Refreshed Packages"),
"f": str("%s_deploy" % record.type),
"args": [record.id, "upgrade"]
}
# -----------------------------------------------------------------------------
def setup_host_validator(machine_ids):
""" Helper Function that checks whether it's safe to allow
upgrade/deployments/refresh packages on given instances
"""
s3db = current.s3db
db = current.db
T = current.T
dtable = s3db.setup_deploy
ptable = s3db.setup_packages
stable = s3db.scheduler_task
utable = s3db.setup_upgrade
if len(machine_ids) > 1:
query = (dtable.id.belongs(machine_ids)) & \
(dtable.refresh_lock != 0)
else:
query = (dtable.id == machine_ids[0]) & \
(dtable.refresh_lock != 0)
rows = db(query).select(dtable.id)
if rows:
return T("A refresh is in progress. Please wait for it to finish")
# or an upgrade in process
if len(machine_ids) > 1:
query = (utable.deployment.belongs(machine_ids)) & \
((stable.status != "COMPLETED") & (stable.status != "FAILED"))
else:
query = (utable.deployment == machine_ids[0]) & \
((stable.status != "COMPLETED") & (stable.status != "FAILED"))
rows = db(query).select(utable.deployment,
join=stable.on(utable.scheduler == stable.id)
)
if rows:
return T("An upgrade is in progress. Please wait for it to finish")
# or even a deployment in process
if len(machine_ids) > 1:
query = (dtable.id.belongs(machine_ids)) & \
((stable.status != "COMPLETED") & (stable.status != "FAILED"))
else:
query = (dtable.id == machine_ids[0]) & \
((stable.status != "COMPLETED") & (stable.status != "FAILED"))
rows = db(query).select(dtable.id,
join = stable.on(utable.scheduler == stable.id)
)
if rows:
return T("A deployment is in progress. Please wait for it to finish")
# -----------------------------------------------------------------------------
def setup_getupgrades(host, web2py_path, remote_user=None, private_key=None):
import ansible.runner
module_path = os.path.join(current.request.folder, "private", "playbook", "library")
if private_key:
private_key = os.path.join(current.request.folder, "uploads", private_key)
inventory = ansible.inventory.Inventory([host])
if private_key and remote_user:
runner = ansible.runner.Runner(module_name = "upgrade",
module_path = module_path,
module_args = "web2py_path=/home/%s" % web2py_path,
remote_user = remote_user,
private_key_file = private_key,
pattern = host,
inventory = inventory,
sudo = True,
)
else:
runner = ansible.runner.Runner(module_name = "upgrade",
module_path = module_path,
module_args = "web2py_path=/home/%s" % web2py_path,
pattern = host,
inventory = inventory,
sudo = True,
)
response = runner.run()
return response
def setup_upgrade_status(_id):
s3db = current.s3db
db = current.db
T = current.T
utable = s3db.setup_upgrade
stable = s3db.scheduler_task
query = (utable.deployment == _id)
row = db(query).select(stable.status,
join = utable.on(stable.id == utable.scheduler)
).last()
if row.status == "COMPLETED":
return T("Upgrade Completed! Refreshing the page in 5 seconds")
| gallifrey17/eden | modules/s3db/setup.py | Python | mit | 39,483 |
import _plotly_utils.basevalidators
class ValuessrcValidator(_plotly_utils.basevalidators.SrcValidator):
def __init__(self, plotly_name="valuessrc", parent_name="funnelarea", **kwargs):
super(ValuessrcValidator, self).__init__(
plotly_name=plotly_name,
parent_name=parent_name,
edit_type=kwargs.pop("edit_type", "none"),
**kwargs
)
| plotly/plotly.py | packages/python/plotly/plotly/validators/funnelarea/_valuessrc.py | Python | mit | 402 |
# coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for license information.
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is regenerated.
# --------------------------------------------------------------------------
from typing import TYPE_CHECKING
import warnings
from azure.core.exceptions import ClientAuthenticationError, HttpResponseError, ResourceExistsError, ResourceNotFoundError, map_error
from azure.core.paging import ItemPaged
from azure.core.pipeline import PipelineResponse
from azure.core.pipeline.transport import HttpRequest, HttpResponse
from azure.core.polling import LROPoller, NoPolling, PollingMethod
from azure.mgmt.core.exceptions import ARMErrorFormat
from azure.mgmt.core.polling.arm_polling import ARMPolling
from .. import models as _models
if TYPE_CHECKING:
# pylint: disable=unused-import,ungrouped-imports
from typing import Any, Callable, Dict, Generic, Iterable, Optional, TypeVar, Union
T = TypeVar('T')
ClsType = Optional[Callable[[PipelineResponse[HttpRequest, HttpResponse], T, Dict[str, Any]], Any]]
class AzureFirewallsOperations(object):
"""AzureFirewallsOperations operations.
You should not instantiate this class directly. Instead, you should create a Client instance that
instantiates it for you and attaches it as an attribute.
:ivar models: Alias to model classes used in this operation group.
:type models: ~azure.mgmt.network.v2019_11_01.models
:param client: Client for service requests.
:param config: Configuration of service client.
:param serializer: An object model serializer.
:param deserializer: An object model deserializer.
"""
models = _models
def __init__(self, client, config, serializer, deserializer):
self._client = client
self._serialize = serializer
self._deserialize = deserializer
self._config = config
def _delete_initial(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> None
cls = kwargs.pop('cls', None) # type: ClsType[None]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self._delete_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.delete(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 202, 204]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if cls:
return cls(pipeline_response, None, {})
_delete_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def begin_delete(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> LROPoller[None]
"""Deletes the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either None or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[None]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType[None]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._delete_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
if cls:
return cls(pipeline_response, None, {})
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'location'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_delete.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def get(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
**kwargs # type: Any
):
# type: (...) -> "_models.AzureFirewall"
"""Gets the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AzureFirewall, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.AzureFirewall
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
# Construct URL
url = self.get.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
request = self._client.get(url, query_parameters, header_parameters)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
get.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def _create_or_update_initial(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.AzureFirewall"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureFirewall"
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self._create_or_update_initial.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'AzureFirewall')
body_content_kwargs['content'] = body_content
request = self._client.put(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200, 201]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
if response.status_code == 200:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if response.status_code == 201:
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
_create_or_update_initial.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def begin_create_or_update(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.AzureFirewall"
**kwargs # type: Any
):
# type: (...) -> LROPoller["_models.AzureFirewall"]
"""Creates or updates the specified Azure Firewall.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:param parameters: Parameters supplied to the create or update Azure Firewall operation.
:type parameters: ~azure.mgmt.network.v2019_11_01.models.AzureFirewall
:keyword callable cls: A custom type or function that will be passed the direct response
:keyword str continuation_token: A continuation token to restart a poller from a saved state.
:keyword polling: By default, your polling method will be ARMPolling.
Pass in False for this operation to not poll, or pass in your own initialized polling object for a personal polling strategy.
:paramtype polling: bool or ~azure.core.polling.PollingMethod
:keyword int polling_interval: Default waiting time between two polls for LRO operations if no Retry-After header is present.
:return: An instance of LROPoller that returns either AzureFirewall or the result of cls(response)
:rtype: ~azure.core.polling.LROPoller[~azure.mgmt.network.v2019_11_01.models.AzureFirewall]
:raises ~azure.core.exceptions.HttpResponseError:
"""
polling = kwargs.pop('polling', True) # type: Union[bool, PollingMethod]
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
lro_delay = kwargs.pop(
'polling_interval',
self._config.polling_interval
)
cont_token = kwargs.pop('continuation_token', None) # type: Optional[str]
if cont_token is None:
raw_result = self._create_or_update_initial(
resource_group_name=resource_group_name,
azure_firewall_name=azure_firewall_name,
parameters=parameters,
cls=lambda x,y,z: x,
**kwargs
)
kwargs.pop('error_map', None)
kwargs.pop('content_type', None)
def get_long_running_output(pipeline_response):
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
if polling is True: polling_method = ARMPolling(lro_delay, lro_options={'final-state-via': 'azure-async-operation'}, path_format_arguments=path_format_arguments, **kwargs)
elif polling is False: polling_method = NoPolling()
else: polling_method = polling
if cont_token:
return LROPoller.from_continuation_token(
polling_method=polling_method,
continuation_token=cont_token,
client=self._client,
deserialization_callback=get_long_running_output
)
else:
return LROPoller(self._client, raw_result, get_long_running_output, polling_method)
begin_create_or_update.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def update_tags(
self,
resource_group_name, # type: str
azure_firewall_name, # type: str
parameters, # type: "_models.TagsObject"
**kwargs # type: Any
):
# type: (...) -> "_models.AzureFirewall"
"""Updates tags of an Azure Firewall resource.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:param azure_firewall_name: The name of the Azure Firewall.
:type azure_firewall_name: str
:param parameters: Parameters supplied to update azure firewall tags.
:type parameters: ~azure.mgmt.network.v2019_11_01.models.TagsObject
:keyword callable cls: A custom type or function that will be passed the direct response
:return: AzureFirewall, or the result of cls(response)
:rtype: ~azure.mgmt.network.v2019_11_01.models.AzureFirewall
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewall"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
content_type = kwargs.pop("content_type", "application/json")
accept = "application/json"
# Construct URL
url = self.update_tags.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'azureFirewallName': self._serialize.url("azure_firewall_name", azure_firewall_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Content-Type'] = self._serialize.header("content_type", content_type, 'str')
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
body_content_kwargs = {} # type: Dict[str, Any]
body_content = self._serialize.body(parameters, 'TagsObject')
body_content_kwargs['content'] = body_content
request = self._client.patch(url, query_parameters, header_parameters, **body_content_kwargs)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
deserialized = self._deserialize('AzureFirewall', pipeline_response)
if cls:
return cls(pipeline_response, deserialized, {})
return deserialized
update_tags.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls/{azureFirewallName}'} # type: ignore
def list(
self,
resource_group_name, # type: str
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AzureFirewallListResult"]
"""Lists all Azure Firewalls in a resource group.
:param resource_group_name: The name of the resource group.
:type resource_group_name: str
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_11_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list.metadata['url'] # type: ignore
path_format_arguments = {
'resourceGroupName': self._serialize.url("resource_group_name", resource_group_name, 'str'),
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list.metadata = {'url': '/subscriptions/{subscriptionId}/resourceGroups/{resourceGroupName}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
def list_all(
self,
**kwargs # type: Any
):
# type: (...) -> Iterable["_models.AzureFirewallListResult"]
"""Gets all the Azure Firewalls in a subscription.
:keyword callable cls: A custom type or function that will be passed the direct response
:return: An iterator like instance of either AzureFirewallListResult or the result of cls(response)
:rtype: ~azure.core.paging.ItemPaged[~azure.mgmt.network.v2019_11_01.models.AzureFirewallListResult]
:raises: ~azure.core.exceptions.HttpResponseError
"""
cls = kwargs.pop('cls', None) # type: ClsType["_models.AzureFirewallListResult"]
error_map = {
401: ClientAuthenticationError, 404: ResourceNotFoundError, 409: ResourceExistsError
}
error_map.update(kwargs.pop('error_map', {}))
api_version = "2019-11-01"
accept = "application/json"
def prepare_request(next_link=None):
# Construct headers
header_parameters = {} # type: Dict[str, Any]
header_parameters['Accept'] = self._serialize.header("accept", accept, 'str')
if not next_link:
# Construct URL
url = self.list_all.metadata['url'] # type: ignore
path_format_arguments = {
'subscriptionId': self._serialize.url("self._config.subscription_id", self._config.subscription_id, 'str'),
}
url = self._client.format_url(url, **path_format_arguments)
# Construct parameters
query_parameters = {} # type: Dict[str, Any]
query_parameters['api-version'] = self._serialize.query("api_version", api_version, 'str')
request = self._client.get(url, query_parameters, header_parameters)
else:
url = next_link
query_parameters = {} # type: Dict[str, Any]
request = self._client.get(url, query_parameters, header_parameters)
return request
def extract_data(pipeline_response):
deserialized = self._deserialize('AzureFirewallListResult', pipeline_response)
list_of_elem = deserialized.value
if cls:
list_of_elem = cls(list_of_elem)
return deserialized.next_link or None, iter(list_of_elem)
def get_next(next_link=None):
request = prepare_request(next_link)
pipeline_response = self._client._pipeline.run(request, stream=False, **kwargs)
response = pipeline_response.http_response
if response.status_code not in [200]:
map_error(status_code=response.status_code, response=response, error_map=error_map)
raise HttpResponseError(response=response, error_format=ARMErrorFormat)
return pipeline_response
return ItemPaged(
get_next, extract_data
)
list_all.metadata = {'url': '/subscriptions/{subscriptionId}/providers/Microsoft.Network/azureFirewalls'} # type: ignore
| Azure/azure-sdk-for-python | sdk/network/azure-mgmt-network/azure/mgmt/network/v2019_11_01/operations/_azure_firewalls_operations.py | Python | mit | 27,002 |
"""
Copyright (c) 2002 Intel Corporation
All rights reserved.
Redistribution and use in source and binary forms, with or without
modification, are permitted provided that the following conditions are
met:
* Redistributions of source code must retain the above copyright
notice, this list of conditions and the following disclaimer.
* Redistributions in binary form must reproduce the above
copyright notice, this list of conditions and the following
disclaimer in the documentation and/or other materials provided
with the distribution.
* Neither the name of the Intel Corporation nor the names of its
contributors may be used to endorse or promote products derived
from this software without specific prior written permission.
THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
"AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE INTEL OR
CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL, SPECIAL,
EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT LIMITED TO,
PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE, DATA, OR
PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY OF
LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING
NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS
SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
EXPORT LAWS: THIS LICENSE ADDS NO RESTRICTIONS TO THE EXPORT LAWS OF
YOUR JURISDICTION. It is licensee's responsibility to comply with any
export regulations applicable in licensee's jurisdiction. Under
CURRENT (May 2000) U.S. export regulations this software is eligible
for export from the U.S. and can be downloaded by or otherwise
exported or reexported worldwide EXCEPT to U.S. embargoed destinations
which include Cuba, Iraq, Libya, North Korea, Iran, Syria, Sudan,
Afghanistan and any other country to which the U.S. has embargoed
goods and services.
DESCRIPTION: Proxy class that implements client end of XML-RPC
communication with a node manager.
AUTHOR: Brent Chun ([email protected])
$Id: nodemgrproxy.py,v 1.1 2003-08-19 17:17:22 aclement Exp $
"""
from xmlrpclib import ServerProxy
from M2Crypto.m2xmlrpclib import SSL_Transport
from sslctx import clictxinit
class nodemgrproxy:
def __init__(self, host, port, sslport=None, key=None, cert=None, cacert=None):
self.host = host
self.port = port
self.sslport = sslport
self.key = key
self.cert = cert
self.cacert = cacert
def getconfig(self):
s = ServerProxy("http://%s:%d" % (self.host, self.port))
return s.getconfig()
def getleases(self):
s = ServerProxy("http://%s:%d" % (self.host, self.port))
return s.getleases()
def getslivers(self):
s = ServerProxy("http://%s:%d" % (self.host, self.port))
return s.getslivers()
def getprinciple(self, slice):
s = ServerProxy("http://%s:%d" % (self.host, self.port))
params = { "slice" : slice }
return s.getprinciple(params)
def getsshkeys(self, slice):
s = ServerProxy("http://%s:%d" % (self.host, self.port))
params = { "slice" : slice }
return s.getsshkeys(params)
def newlease(self, ticketdata):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "ticketdata" : ticketdata }
return s.newlease(params)
def newvm(self, leasedata, privatekey, publickey):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "leasedata" : leasedata, "privatekey" : privatekey,
"publickey" : publickey }
return s.newvm(params)
def newleasevm(self, ticketdata, privatekey, publickey):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "ticketdata" : ticketdata, "privatekey" : privatekey,
"publickey" : publickey }
return s.newleasevm(params)
def deletelease(self, slice):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "slice" : slice }
return s.deletelease(params)
def renewlease(self, slice):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "slice" : slice }
return s.renewlease(params)
def addkey(self, slice, key):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "slice" : slice, "key" : key }
return s.addkey(params)
def delkey(self, slice, key):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "slice" : slice, "key" : key }
return s.delkey(params)
def nukekeys(self, slice):
ctx = clictxinit(self.cert, self.key, self.cacert)
s = ServerProxy("https://%s:%d" % (self.host, self.sslport), SSL_Transport(ctx))
params = { "slice" : slice }
return s.nukekeys(params)
| nmc-probe/emulab-nome | tbsetup/plab/libdslice/dslice/nodemgrproxy.py | Python | agpl-3.0 | 5,675 |
# Copyright 2017 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
"""Definition of XLA test case."""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
import contextlib
import random
import re
import numpy as np
from tensorflow.contrib.compiler import jit
from tensorflow.core.framework import types_pb2
from tensorflow.core.protobuf import config_pb2
from tensorflow.python.client import session
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.framework import random_seed
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import variables
from tensorflow.python.platform import flags
from tensorflow.python.platform import test
from tensorflow.python.platform import tf_logging as logging
FLAGS = flags.FLAGS
flags.DEFINE_string('test_device', None,
'Tensorflow device on which to place operators under test')
flags.DEFINE_string('types', None, 'Types to test. Comma-separated list.')
flags.DEFINE_string('disabled_manifest', None,
'Path to a file with a list of tests that should not run.')
class XLATestCase(test.TestCase):
"""XLA test cases are parameterized test cases."""
def __init__(self, method_name='runTest'):
super(XLATestCase, self).__init__(method_name)
self.device = FLAGS.test_device
self.has_custom_call = (self.device == 'XLA_CPU')
self._all_tf_types = set([
dtypes.as_dtype(types_pb2.DataType.Value(name))
for name in FLAGS.types.split(',')
])
self.int_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_integer
])
self._float_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_floating
])
self.complex_tf_types = set([
dtype for dtype in self._all_tf_types if dtype.is_complex
])
self._numeric_tf_types = set(
self.int_tf_types | self._float_tf_types | self.complex_tf_types)
self._all_types = set(
[dtype.as_numpy_dtype for dtype in self._all_tf_types])
self.int_types = set([dtype.as_numpy_dtype for dtype in self.int_tf_types])
self._float_types = set(
[dtype.as_numpy_dtype for dtype in self._float_tf_types])
self.complex_types = set([
dtype.as_numpy_dtype for dtype in self.complex_tf_types
])
self._numeric_types = set(
self.int_types | self._float_types | self.complex_types)
# Parse the manifest file, if any, into a regex identifying tests to
# disable
self.disabled_regex = None
self._method_types_filter = dict()
# TODO(xpan): Make it text proto if it doesn't scale.
# Each line of the manifest file specifies an entry. The entry can be
# 1) TestNameRegex // E.g. CumprodTest.* Or
# 2) TestName TypeName // E.g. AdamOptimizerTest.testSharing DT_BFLOAT16
# The 1) disables the entire test. While 2) only filter some numeric types
# so that they are not used in those tests.
if FLAGS.disabled_manifest is not None:
comments_re = re.compile('#.*$')
manifest_file = open(FLAGS.disabled_manifest, 'r')
disabled_tests = []
disabled_method_types = []
for l in manifest_file.read().splitlines():
entry = comments_re.sub('', l).strip().split(' ')
if len(entry) == 1:
disabled_tests.append(entry[0])
elif len(entry) == 2:
disabled_method_types.append(
(entry[0], entry[1].strip().split(',')))
else:
raise ValueError('Bad entry in manifest file.')
self.disabled_regex = re.compile('|'.join(disabled_tests))
for method, types in disabled_method_types:
self._method_types_filter[method] = set([
dtypes.as_dtype(types_pb2.DataType.Value(name)).as_numpy_dtype
for name in types])
manifest_file.close()
@property
def all_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._all_tf_types - tf_types
@property
def float_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_types - self._method_types_filter.get(name, set())
@property
def float_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._float_tf_types - self._method_types_filter.get(name, set())
@property
def numeric_tf_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
tf_types = set([dtypes.as_dtype(t)
for t in self._method_types_filter.get(name, set())])
return self._numeric_tf_types - tf_types
@property
def numeric_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._numeric_types - self._method_types_filter.get(name, set())
@property
def all_types(self):
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
return self._all_types - self._method_types_filter.get(name, set())
def setUp(self):
super(XLATestCase, self).setUp()
name = '{}.{}'.format(type(self).__name__, self._testMethodName)
if self.disabled_regex is not None and self.disabled_regex.match(name):
logging.info('Disabled test case: %s', name)
self.skipTest('{} is disabled by manifest.'.format(name))
return
logging.info('Start test case: %s', name)
random.seed(random_seed.DEFAULT_GRAPH_SEED)
np.random.seed(random_seed.DEFAULT_GRAPH_SEED)
def tearDown(self):
super(XLATestCase, self).tearDown()
logging.info('End test case: %s', self._testMethodName)
@contextlib.contextmanager
def test_session(self):
"""Custom implementation of test_session() for XLA tests.
We override the standard Tensorflow test_session() since it is too
specific to CPU and GPU tests. In particular, we want to disable soft
placement and explicitly assign ops to devices under test.
Yields:
A session to use when running a test case.
"""
graph = ops.Graph()
with session.Session(graph=graph) as sess, graph.as_default():
yield sess
@contextlib.contextmanager
def test_scope(self):
"""Test scope that runs tests on a Tensorflow/XLA device.
Uses a compilation_scope() to mark operators to compile.
Yields:
A scope to apply to the operators under test.
"""
with ops.device('device:{}:0'.format(self.device)):
yield
def Benchmark(tf_bench,
builder_fn,
use_xla_jit,
device,
separate_compiled_gradients=False):
"""Build a graph and run benchmarks against it, with or without XLA.
Args:
tf_bench: An instance of tf.test.Benchmark, used to run the benchmark.
builder_fn: A function that builds a graph when invoked, and returns
(name, fetches), where name is the name of the test, and fetches
is a list of tensors to fetch as output.
use_xla_jit: If true compile with the XLA JIT, otherwise use regular TF.
device: The tensorflow device to run on, e.g. "cpu", "gpu".
separate_compiled_gradients: If true put each gradient subgraph into a
separate compilation scope. This gives fine-grained control over which
portions of the graph will be compiled as a single unit. Compiling
gradients separately may yield better performance for some graphs.
The scope is named based on the scope of the forward computation as well
as the name of the gradients. As a result, the gradients will be compiled
in a scope that is separate from both the forward computation, and from
other gradients.
"""
with ops.Graph().as_default():
name = None
targets = []
with ops.device(device):
fetches = []
jit_scope = jit.experimental_jit_scope
with jit_scope(
compile_ops=use_xla_jit,
separate_compiled_gradients=separate_compiled_gradients):
name, fetches = builder_fn()
# We only want to benchmark the operations themselves, and not the data
# transfer of the result(s). Non-compiled identity ops ensure XLA
# doesn't know we're dropping the results, otherwise it might compile
# away the entire computation.
for fetch in fetches:
targets.append(array_ops.identity(fetch).op)
config = config_pb2.ConfigProto(allow_soft_placement=True)
with session.Session(config=config) as sess:
sess.run(variables.global_variables_initializer())
xla = 'xla_' if use_xla_jit else ''
tf_bench.run_op_benchmark(
sess, targets, name='%s_%s%s' % (name, xla, device))
| eadgarchen/tensorflow | tensorflow/compiler/tests/xla_test.py | Python | apache-2.0 | 9,399 |
# Copyright 2013 Donald Stufft
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from __future__ import absolute_import, division, print_function
from __future__ import unicode_literals
# This file is automatically generated, do not edit it
__all__ = [
"__title__", "__summary__", "__uri__", "__version__",
"__author__", "__email__", "__license__", "__copyright__",
]
__title__ = "warehouse"
__summary__ = "Next Generation Python Package Repository"
__uri__ = "https://github.com/pypa/warehouse"
__version__ = "{version}"
__build__ = "{build}"
__author__ = "Donald Stufft"
__email__ = "[email protected]"
__license__ = "Apache License, Version 2.0"
__copyright__ = "Copyright 2013 Donald Stufft"
| mattrobenolt/warehouse | warehouse/__about__.tmpl.py | Python | apache-2.0 | 1,202 |
import simulation
import network
import power_distribution
import matplotlib.pyplot as plt
import numpy as np
import nodes
from matplotlib import cm
import background_visualisation
import node_visualisation_interpreters
import edge_visualisation_interpreters
import visualisation
hardGenNetwork = network.Network(network.SimpleHardcodedNetworkGenerator())
cS = simulation.coreSimulation(hardGenNetwork)
cS.aYearInTheLife()
con1 = cS.network.consumerAgents[0]
con2 = cS.network.consumerAgents[1]
con3 = cS.network.consumerAgents[2]
supplierAgent = cS.network.supplierAgents[0]
sup1 = cS.network.suppliers[0]
sup2 = cS.network.suppliers[1]
nw_final=cS.network
fig, axes = plt.subplots(2,2)
'''
Consumers: Satisfaction, Power, Social
'''
bg1 = background_visualisation.Background()
#bg2 = background_visualisation.interpBackground(cmap=cm.Accent)
#bg2.delunay_data(ranGenNetwork.consumers,'neighbourhood_id')
ni1= node_visualisation_interpreters.node_interpreter(radius=50,node_type=nodes.Supplier,color='red', transparency=0.5)
ni2= node_visualisation_interpreters.node_interpreter(radius=15,node_type=nodes.Distributor,color='purple')
ni3= node_visualisation_interpreters.node_interpreter(radius=25,node_type=nodes.Consumer,color='green')
ni4= node_visualisation_interpreters.node_scale_color('guaranteed_capacity',node_type=nodes.Consumer,radius=40,min_val=0,max_val=6000,cmap=cm.autumn)
ei1 = edge_visualisation_interpreters.edge_interpreter(thickness=3,color='gray')
grid1 = visualisation.networkPane(nw_final,[ei1],[ni1,ni2,ni3],None,0,0)
grid2 = visualisation.historyPane(nw_final,1,0,'consumers','consumer_agent','memSoc')
grid3 = visualisation.networkPane(nw_final,[ei1],[ni4],None,1,1)
grid4 = visualisation.historyPane(nw_final,0,1,'consumers','consumer_agent','memory')
grid5 = visualisation.historyPane(nw_final,0,2,'consumers','consumer_agent','memPow')
grid6 = visualisation.historyPane(nw_final,1,2,'consumers','consumer_agent','memPowReq')
grid7 = visualisation.historyPane(nw_final,2,2,'consumers','consumer_agent','memPowUsed')
vis= visualisation.Visualisation(grid1,grid2,grid3,grid4,grid5,grid6,grid7)
vis.update()
| ABM-project/power-grid | power_grid/TestCases.py | Python | mit | 2,145 |
import batoid
import numpy as np
from test_helpers import timer, do_pickle, all_obj_diff, init_gpu, rays_allclose
@timer
def test_properties():
rng = np.random.default_rng(5)
for i in range(100):
R = rng.normal(0.0, 0.3) # negative allowed
sphere = batoid.Sphere(R)
assert sphere.R == R
do_pickle(sphere)
@timer
def test_sag():
rng = np.random.default_rng(57)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.sag(x, y)
np.testing.assert_allclose(
result,
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Check that it returned a scalar float and not an array
assert isinstance(result, float)
# Check 0,0
np.testing.assert_allclose(sphere.sag(0, 0), 0.0, rtol=0, atol=1e-17)
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
np.testing.assert_allclose(
sphere.sag(x, y),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.sag(x[::5,::2], y[::5,::2]),
R*(1-np.sqrt(1.0-(x*x + y*y)/R/R))[::5,::2]
)
do_pickle(sphere)
@timer
def test_normal():
rng = np.random.default_rng(577)
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
for j in range(10):
x = rng.uniform(-0.7*abs(R), 0.7*abs(R))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R))
result = sphere.normal(x, y)
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.array([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(result, normal)
# Check 0,0
np.testing.assert_equal(sphere.normal(0, 0), np.array([0, 0, 1]))
# Check vectorization
x = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
y = rng.uniform(-0.7*abs(R), 0.7*abs(R), size=(10, 10))
r = np.hypot(x, y)
rat = r/R
dzdr = rat/np.sqrt(1-rat*rat)
nz = 1/np.sqrt(1+dzdr*dzdr)
normal = np.dstack([-x/r*dzdr*nz, -y/r*dzdr*nz, nz])
np.testing.assert_allclose(
sphere.normal(x, y),
normal
)
# Make sure non-unit stride arrays also work
np.testing.assert_allclose(
sphere.normal(x[::5,::2], y[::5,::2]),
normal[::5, ::2]
)
@timer
def test_intersect():
rng = np.random.default_rng(5772)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphereCoordSys = batoid.CoordSys(origin=[0, 0, -1])
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
# If we shoot rays straight up, then it's easy to predict the intersection
vx = np.zeros_like(x)
vy = np.zeros_like(x)
vz = np.ones_like(x)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
np.testing.assert_allclose(rv.z, -2*abs(R))
rv2 = batoid.intersect(sphere, rv.copy(), sphereCoordSys)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
# Check default intersect coordTransform
rv2 = rv.copy().toCoordSys(sphereCoordSys)
batoid.intersect(sphere, rv2)
assert rv2.coordSys == sphereCoordSys
rv2 = rv2.toCoordSys(batoid.CoordSys())
np.testing.assert_allclose(rv2.x, x)
np.testing.assert_allclose(rv2.y, y)
np.testing.assert_allclose(rv2.z, sphere.sag(x, y)-1, rtol=0, atol=1e-9)
@timer
def test_reflect():
rng = np.random.default_rng(57721)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.full_like(x, 1)
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.reflect(sphere, rv.copy())
rvr2 = sphere.reflect(rv.copy())
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test law of reflection
a0 = np.einsum("ad,ad->a", normal, rv.v)[~rvr.failed]
a1 = np.einsum("ad,ad->a", normal, -rvr.v)[~rvr.failed]
np.testing.assert_allclose(
a0, a1,
rtol=0, atol=1e-12
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_refract():
rng = np.random.default_rng(577215)
size = 10_000
for i in range(100):
R = 1./rng.normal(0.0, 0.3)
sphere = batoid.Sphere(R)
m0 = batoid.ConstMedium(rng.normal(1.2, 0.01))
m1 = batoid.ConstMedium(rng.normal(1.3, 0.01))
x = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
y = rng.uniform(-0.3*abs(R), 0.3*abs(R), size=size)
z = np.full_like(x, -2*abs(R))
vx = rng.uniform(-1e-5, 1e-5, size=size)
vy = rng.uniform(-1e-5, 1e-5, size=size)
vz = np.sqrt(1-vx*vx-vy*vy)/m0.n
rv = batoid.RayVector(x, y, z, vx, vy, vz)
rvr = batoid.refract(sphere, rv.copy(), m0, m1)
rvr2 = sphere.refract(rv.copy(), m0, m1)
rays_allclose(rvr, rvr2)
# print(f"{np.sum(rvr.failed)/len(rvr)*100:.2f}% failed")
normal = sphere.normal(rvr.x, rvr.y)
# Test Snell's law
s0 = np.sum(np.cross(normal, rv.v*m0.n)[~rvr.failed], axis=-1)
s1 = np.sum(np.cross(normal, rvr.v*m1.n)[~rvr.failed], axis=-1)
np.testing.assert_allclose(
m0.n*s0, m1.n*s1,
rtol=0, atol=1e-9
)
# Test that rv.v, rvr.v and normal are all in the same plane
np.testing.assert_allclose(
np.einsum(
"ad,ad->a",
np.cross(normal, rv.v),
rv.v
)[~rvr.failed],
0.0,
rtol=0, atol=1e-12
)
@timer
def test_ne():
objs = [
batoid.Sphere(1.0),
batoid.Sphere(2.0),
batoid.Plane()
]
all_obj_diff(objs)
@timer
def test_fail():
sphere = batoid.Sphere(1.0)
rv = batoid.RayVector(0, 10, 0, 0, 0, -1) # Too far to side
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([True]))
# This one passes
rv = batoid.RayVector(0, 0, 0, 0, 0, -1)
rv2 = batoid.intersect(sphere, rv.copy())
np.testing.assert_equal(rv2.failed, np.array([False]))
if __name__ == '__main__':
test_properties()
test_sag()
test_normal()
test_intersect()
test_reflect()
test_refract()
test_ne()
test_fail()
| jmeyers314/batoid | tests/test_Sphere.py | Python | bsd-2-clause | 7,740 |
from matplotlib import pyplot as plt
from calculations import calculate_slope_and_intercept, calculate_error
from ..data import get_random_epoch
def launch():
x, y = get_random_epoch()
m, b = calculate_slope_and_intercept(x, y)
y_hats = [b + (val * m) for val in x]
print 'calculated slope: ' + str(m)
print 'calculated intercept: ' + str(b)
print 'calculated error: ' + str(calculate_error(y_hats, y))
plt.figure(1)
plt.xlabel('inputs')
plt.ylabel('outputs')
plt.title('ordinary least squares regression')
plt.plot(x, y_hats, color='red')
plt.scatter(x, y, color='yellowgreen', marker='.')
plt.show()
| DanielDeychakiwsky/python-machine-learning | src/linear_regression/ordinary_least_squares/launch.py | Python | mit | 660 |
from collections import deque
import datetime
import os
import re
import sys
from django.core.exceptions import ImproperlyConfigured
from django.db import models
from django.conf import settings
from django.utils import importlib
from south import exceptions
from south.migration.utils import depends, dfs, flatten, get_app_label
from south.orm import FakeORM
from south.utils import memoize, ask_for_it_by_name, datetime_utils
from south.migration.utils import app_label_to_app_module
def all_migrations(applications=None):
"""
Returns all Migrations for all `applications` that are migrated.
"""
if applications is None:
applications = models.get_apps()
for model_module in applications:
# The app they've passed is the models module - go up one level
app_path = ".".join(model_module.__name__.split(".")[:-1])
app = ask_for_it_by_name(app_path)
try:
yield Migrations(app)
except exceptions.NoMigrations:
pass
def application_to_app_label(application):
"Works out the app label from either the app label, the app name, or the module"
if isinstance(application, basestring):
app_label = application.split('.')[-1]
else:
app_label = application.__name__.split('.')[-1]
return app_label
class MigrationsMetaclass(type):
"""
Metaclass which ensures there is only one instance of a Migrations for
any given app.
"""
def __init__(self, name, bases, dict):
super(MigrationsMetaclass, self).__init__(name, bases, dict)
self.instances = {}
def __call__(self, application, **kwds):
app_label = application_to_app_label(application)
# If we don't already have an instance, make one
if app_label not in self.instances:
self.instances[app_label] = super(MigrationsMetaclass, self).__call__(app_label_to_app_module(app_label), **kwds)
return self.instances[app_label]
def _clear_cache(self):
"Clears the cache of Migration objects."
self.instances = {}
class Migrations(list):
"""
Holds a list of Migration objects for a particular app.
"""
__metaclass__ = MigrationsMetaclass
if getattr(settings, "SOUTH_USE_PYC", False):
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.pyc?)?$') # Match .py or .pyc files, or module dirs
else:
MIGRATION_FILENAME = re.compile(r'(?!__init__)' # Don't match __init__.py
r'[0-9a-zA-Z_]*' # Don't match dotfiles, or names with dots/invalid chars in them
r'(\.py)?$') # Match only .py files, or module dirs
def __init__(self, application, force_creation=False, verbose_creation=True):
"Constructor. Takes the module of the app, NOT its models (like get_app returns)"
self._cache = {}
self.set_application(application, force_creation, verbose_creation)
def create_migrations_directory(self, verbose=True):
"Given an application, ensures that the migrations directory is ready."
migrations_dir = self.migrations_dir()
# Make the directory if it's not already there
if not os.path.isdir(migrations_dir):
if verbose:
print "Creating migrations directory at '%s'..." % migrations_dir
os.mkdir(migrations_dir)
# Same for __init__.py
init_path = os.path.join(migrations_dir, "__init__.py")
if not os.path.isfile(init_path):
# Touch the init py file
if verbose:
print "Creating __init__.py in '%s'..." % migrations_dir
open(init_path, "w").close()
def migrations_dir(self):
"""
Returns the full path of the migrations directory.
If it doesn't exist yet, returns where it would exist, based on the
app's migrations module (defaults to app.migrations)
"""
module_path = self.migrations_module()
try:
module = importlib.import_module(module_path)
except ImportError:
# There's no migrations module made yet; guess!
try:
parent = importlib.import_module(".".join(module_path.split(".")[:-1]))
except ImportError:
# The parent doesn't even exist, that's an issue.
raise exceptions.InvalidMigrationModule(
application = self.application.__name__,
module = module_path,
)
else:
# Good guess.
return os.path.join(os.path.dirname(parent.__file__), module_path.split(".")[-1])
else:
# Get directory directly
return os.path.dirname(module.__file__)
def migrations_module(self):
"Returns the module name of the migrations module for this"
app_label = application_to_app_label(self.application)
if hasattr(settings, "SOUTH_MIGRATION_MODULES"):
if app_label in settings.SOUTH_MIGRATION_MODULES:
# There's an override.
return settings.SOUTH_MIGRATION_MODULES[app_label]
return self._application.__name__ + '.migrations'
def get_application(self):
return self._application
def set_application(self, application, force_creation=False, verbose_creation=True):
"""
Called when the application for this Migrations is set.
Imports the migrations module object, and throws a paddy if it can't.
"""
self._application = application
if not hasattr(application, 'migrations'):
try:
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
except ImportError:
if force_creation:
self.create_migrations_directory(verbose_creation)
module = importlib.import_module(self.migrations_module())
self._migrations = application.migrations = module
else:
raise exceptions.NoMigrations(application)
self._load_migrations_module(application.migrations)
application = property(get_application, set_application)
def _load_migrations_module(self, module):
self._migrations = module
filenames = []
dirname = self.migrations_dir()
for f in os.listdir(dirname):
if self.MIGRATION_FILENAME.match(os.path.basename(f)):
full_path = os.path.join(dirname, f)
# If it's a .pyc file, only append if the .py isn't already around
if f.endswith(".pyc") and (os.path.isfile(full_path[:-1])):
continue
# If it's a module directory, only append if it contains __init__.py[c].
if os.path.isdir(full_path):
if not (os.path.isfile(os.path.join(full_path, "__init__.py")) or \
(getattr(settings, "SOUTH_USE_PYC", False) and \
os.path.isfile(os.path.join(full_path, "__init__.pyc")))):
continue
filenames.append(f)
filenames.sort()
self.extend(self.migration(f) for f in filenames)
def migration(self, filename):
name = Migration.strip_filename(filename)
if name not in self._cache:
self._cache[name] = Migration(self, name)
return self._cache[name]
def __getitem__(self, value):
if isinstance(value, basestring):
return self.migration(value)
return super(Migrations, self).__getitem__(value)
def _guess_migration(self, prefix):
prefix = Migration.strip_filename(prefix)
matches = [m for m in self if m.name().startswith(prefix)]
if len(matches) == 1:
return matches[0]
elif len(matches) > 1:
raise exceptions.MultiplePrefixMatches(prefix, matches)
else:
raise exceptions.UnknownMigration(prefix, None)
def guess_migration(self, target_name):
if target_name == 'zero' or not self:
return
elif target_name is None:
return self[-1]
else:
return self._guess_migration(prefix=target_name)
def app_label(self):
return self._application.__name__.split('.')[-1]
def full_name(self):
return self._migrations.__name__
@classmethod
def calculate_dependencies(cls, force=False):
"Goes through all the migrations, and works out the dependencies."
if getattr(cls, "_dependencies_done", False) and not force:
return
for migrations in all_migrations():
for migration in migrations:
migration.calculate_dependencies()
cls._dependencies_done = True
@staticmethod
def invalidate_all_modules():
"Goes through all the migrations, and invalidates all cached modules."
for migrations in all_migrations():
for migration in migrations:
migration.invalidate_module()
def next_filename(self, name):
"Returns the fully-formatted filename of what a new migration 'name' would be"
highest_number = 0
for migration in self:
try:
number = int(migration.name().split("_")[0])
highest_number = max(highest_number, number)
except ValueError:
pass
# Work out the new filename
return "%04i_%s.py" % (
highest_number + 1,
name,
)
class Migration(object):
"""
Class which represents a particular migration file on-disk.
"""
def __init__(self, migrations, filename):
"""
Returns the migration class implied by 'filename'.
"""
self.migrations = migrations
self.filename = filename
self.dependencies = set()
self.dependents = set()
def __str__(self):
return self.app_label() + ':' + self.name()
def __repr__(self):
return u'<Migration: %s>' % unicode(self)
def __eq__(self, other):
return self.app_label() == other.app_label() and self.name() == other.name()
def __hash__(self):
return hash(str(self))
def app_label(self):
return self.migrations.app_label()
@staticmethod
def strip_filename(filename):
return os.path.splitext(os.path.basename(filename))[0]
def name(self):
return self.strip_filename(os.path.basename(self.filename))
def full_name(self):
return self.migrations.full_name() + '.' + self.name()
def migration(self):
"Tries to load the actual migration module"
full_name = self.full_name()
try:
migration = sys.modules[full_name]
except KeyError:
try:
migration = __import__(full_name, {}, {}, ['Migration'])
except ImportError, e:
raise exceptions.UnknownMigration(self, sys.exc_info())
except Exception, e:
raise exceptions.BrokenMigration(self, sys.exc_info())
# Override some imports
migration._ = lambda x: x # Fake i18n
migration.datetime = datetime_utils
return migration
migration = memoize(migration)
def migration_class(self):
"Returns the Migration class from the module"
return self.migration().Migration
def migration_instance(self):
"Instantiates the migration_class"
return self.migration_class()()
migration_instance = memoize(migration_instance)
def previous(self):
"Returns the migration that comes before this one in the sequence."
index = self.migrations.index(self) - 1
if index < 0:
return None
return self.migrations[index]
previous = memoize(previous)
def next(self):
"Returns the migration that comes after this one in the sequence."
index = self.migrations.index(self) + 1
if index >= len(self.migrations):
return None
return self.migrations[index]
next = memoize(next)
def _get_dependency_objects(self, attrname):
"""
Given the name of an attribute (depends_on or needed_by), either yields
a list of migration objects representing it, or errors out.
"""
for app, name in getattr(self.migration_class(), attrname, []):
try:
migrations = Migrations(app)
except ImproperlyConfigured:
raise exceptions.DependsOnUnmigratedApplication(self, app)
migration = migrations.migration(name)
try:
migration.migration()
except exceptions.UnknownMigration:
raise exceptions.DependsOnUnknownMigration(self, migration)
if migration.is_before(self) == False:
raise exceptions.DependsOnHigherMigration(self, migration)
yield migration
def calculate_dependencies(self):
"""
Loads dependency info for this migration, and stores it in itself
and any other relevant migrations.
"""
# Normal deps first
for migration in self._get_dependency_objects("depends_on"):
self.dependencies.add(migration)
migration.dependents.add(self)
# And reverse deps
for migration in self._get_dependency_objects("needed_by"):
self.dependents.add(migration)
migration.dependencies.add(self)
# And implicit ordering deps
previous = self.previous()
if previous:
self.dependencies.add(previous)
previous.dependents.add(self)
def invalidate_module(self):
"""
Removes the cached version of this migration's module import, so we
have to re-import it. Used when south.db.db changes.
"""
reload(self.migration())
self.migration._invalidate()
def forwards(self):
return self.migration_instance().forwards
def backwards(self):
return self.migration_instance().backwards
def forwards_plan(self):
"""
Returns a list of Migration objects to be applied, in order.
This list includes `self`, which will be applied last.
"""
return depends(self, lambda x: x.dependencies)
def _backwards_plan(self):
return depends(self, lambda x: x.dependents)
def backwards_plan(self):
"""
Returns a list of Migration objects to be unapplied, in order.
This list includes `self`, which will be unapplied last.
"""
return list(self._backwards_plan())
def is_before(self, other):
if self.migrations == other.migrations:
if self.filename < other.filename:
return True
return False
def is_after(self, other):
if self.migrations == other.migrations:
if self.filename > other.filename:
return True
return False
def prev_orm(self):
if getattr(self.migration_class(), 'symmetrical', False):
return self.orm()
previous = self.previous()
if previous is None:
# First migration? The 'previous ORM' is empty.
return FakeORM(None, self.app_label())
return previous.orm()
prev_orm = memoize(prev_orm)
def orm(self):
return FakeORM(self.migration_class(), self.app_label())
orm = memoize(orm)
def no_dry_run(self):
migration_class = self.migration_class()
try:
return migration_class.no_dry_run
except AttributeError:
return False
| edisonlz/fruit | web_project/base/site-packages/south/migration/base.py | Python | apache-2.0 | 16,120 |
# Copyright 2013 Huang Ying <[email protected]>
#
# This program is free software; you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation; either version 2 of the License, or
# (at your option) any later version.
import sys
import Image
from ..pdf import generator
from .. import imb
class Config(object):
def __init__(self):
object.__init__(self)
def imb2pdf(in_fn, out_fn):
imbook = imb.Book()
imbook.load(in_fn)
if len(imbook.pages) == 0:
return
img_fns = [pg.img_fn for pg in imbook.pages]
img = Image.open(imbook.pages[0].img_fn)
config = Config()
config.title = imbook.title
config.author = imbook.author
config.bookmarks = imbook.toc_entries
config.out_size = img.size
config.rotate = 0
config.out_file_name = out_fn
pg = pdf.generator.PDFGenerator(config)
pg.generate(img_fns)
def usage():
print 'Usage: %s <input file>.imb <output file>.pdf' % (sys.argv[0],)
sys.exit(-1)
def main():
if len(sys.argv) != 3:
usage()
imb2pdf(sys.argv[1], sys.argv[2])
| hying-caritas/ibutils | ibutils/imb/to_pdf.py | Python | gpl-2.0 | 1,161 |
# -*- coding: utf-8 -*-
##############################################################################
#
# OpenERP, Open Source Management Solution
# Copyright (C) 2004-2010 Tiny SPRL (<http://tiny.be>).
#
# This program is free software: you can redistribute it and/or modify
# it under the terms of the GNU Affero General Public License as
# published by the Free Software Foundation, either version 3 of the
# License, or (at your option) any later version.
#
# This program is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU Affero General Public License for more details.
#
# You should have received a copy of the GNU Affero General Public License
# along with this program. If not, see <http://www.gnu.org/licenses/>.
#
##############################################################################
import wizard
import product_margin
| addition-it-solutions/project-all | addons/product_margin/__init__.py | Python | agpl-3.0 | 1,024 |
class Foo:
class Bar:
def __init__(self):
print("__init__ from Bar")
def __init__(self):
print("__init__ from Foo")
Foo.Bar()
Foo()
Foo.Bar() | NeonMercury/python-lua | tests/nestedclass.py | Python | apache-2.0 | 191 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.