repo_name
stringlengths 7
94
| repo_path
stringlengths 4
237
| repo_head_hexsha
stringlengths 40
40
| content
stringlengths 10
680k
| apis
stringlengths 2
680k
|
---|---|---|---|---|
kevinxucs/pyston | minibenchmarks/go.py | bdb87c1706ac74a0d15d9bc2bae53798678a5f14 | # from pypy-benchmarks/own/chaos.py, with some minor modifications
# (more output, took out the benchmark harness)
#
import random, math, sys, time
SIZE = 9
GAMES = 200
KOMI = 7.5
EMPTY, WHITE, BLACK = 0, 1, 2
SHOW = {EMPTY: '.', WHITE: 'o', BLACK: 'x'}
PASS = -1
MAXMOVES = SIZE*SIZE*3
TIMESTAMP = 0
MOVES = 0
def to_pos(x,y):
return y * SIZE + x
def to_xy(pos):
y, x = divmod(pos, SIZE)
return x, y
class Square:
def __init__(self, board, pos):
self.board = board
self.pos = pos
self.timestamp = TIMESTAMP
self.removestamp = TIMESTAMP
self.zobrist_strings = [random.randrange(sys.maxint) for i in range(3)]
def set_neighbours(self):
x, y = self.pos % SIZE, self.pos / SIZE;
self.neighbours = []
for dx, dy in [(-1, 0), (1, 0), (0, -1), (0, 1)]:
newx, newy = x + dx, y + dy
if 0 <= newx < SIZE and 0 <= newy < SIZE:
self.neighbours.append(self.board.squares[to_pos(newx, newy)])
def move(self, color):
global TIMESTAMP, MOVES
TIMESTAMP += 1
MOVES += 1
self.board.zobrist.update(self, color)
self.color = color
self.reference = self
self.ledges = 0
self.used = True
for neighbour in self.neighbours:
neighcolor = neighbour.color
if neighcolor == EMPTY:
self.ledges += 1
else:
neighbour_ref = neighbour.find(update=True)
if neighcolor == color:
if neighbour_ref.reference.pos != self.pos:
self.ledges += neighbour_ref.ledges
neighbour_ref.reference = self
self.ledges -= 1
else:
neighbour_ref.ledges -= 1
if neighbour_ref.ledges == 0:
neighbour.remove(neighbour_ref)
self.board.zobrist.add()
def remove(self, reference, update=True):
self.board.zobrist.update(self, EMPTY)
self.removestamp = TIMESTAMP
if update:
self.color = EMPTY
self.board.emptyset.add(self.pos)
# if color == BLACK:
# self.board.black_dead += 1
# else:
# self.board.white_dead += 1
for neighbour in self.neighbours:
if neighbour.color != EMPTY and neighbour.removestamp != TIMESTAMP:
neighbour_ref = neighbour.find(update)
if neighbour_ref.pos == reference.pos:
neighbour.remove(reference, update)
else:
if update:
neighbour_ref.ledges += 1
def find(self, update=False):
reference = self.reference
if reference.pos != self.pos:
reference = reference.find(update)
if update:
self.reference = reference
return reference
def __repr__(self):
return repr(to_xy(self.pos))
class EmptySet:
def __init__(self, board):
self.board = board
self.empties = range(SIZE*SIZE)
self.empty_pos = range(SIZE*SIZE)
def random_choice(self):
choices = len(self.empties)
while choices:
i = int(random.random()*choices)
pos = self.empties[i]
if self.board.useful(pos):
return pos
choices -= 1
self.set(i, self.empties[choices])
self.set(choices, pos)
return PASS
def add(self, pos):
self.empty_pos[pos] = len(self.empties)
self.empties.append(pos)
def remove(self, pos):
self.set(self.empty_pos[pos], self.empties[len(self.empties)-1])
self.empties.pop()
def set(self, i, pos):
self.empties[i] = pos
self.empty_pos[pos] = i
class ZobristHash:
def __init__(self, board):
self.board = board
self.hash_set = set()
self.hash = 0
for square in self.board.squares:
self.hash ^= square.zobrist_strings[EMPTY]
self.hash_set.clear()
self.hash_set.add(self.hash)
def update(self, square, color):
self.hash ^= square.zobrist_strings[square.color]
self.hash ^= square.zobrist_strings[color]
def add(self):
self.hash_set.add(self.hash)
def dupe(self):
return self.hash in self.hash_set
class Board:
def __init__(self):
self.squares = [Square(self, pos) for pos in range(SIZE*SIZE)]
for square in self.squares:
square.set_neighbours()
self.reset()
def reset(self):
for square in self.squares:
square.color = EMPTY
square.used = False
self.emptyset = EmptySet(self)
self.zobrist = ZobristHash(self)
self.color = BLACK
self.finished = False
self.lastmove = -2
self.history = []
self.white_dead = 0
self.black_dead = 0
def move(self, pos):
square = self.squares[pos]
if pos != PASS:
square.move(self.color)
self.emptyset.remove(square.pos)
elif self.lastmove == PASS:
self.finished = True
if self.color == BLACK: self.color = WHITE
else: self.color = BLACK
self.lastmove = pos
self.history.append(pos)
def random_move(self):
return self.emptyset.random_choice()
def useful_fast(self, square):
if not square.used:
for neighbour in square.neighbours:
if neighbour.color == EMPTY:
return True
return False
def useful(self, pos):
global TIMESTAMP
TIMESTAMP += 1
square = self.squares[pos]
if self.useful_fast(square):
return True
old_hash = self.zobrist.hash
self.zobrist.update(square, self.color)
empties = opps = weak_opps = neighs = weak_neighs = 0
for neighbour in square.neighbours:
neighcolor = neighbour.color
if neighcolor == EMPTY:
empties += 1
continue
neighbour_ref = neighbour.find()
if neighbour_ref.timestamp != TIMESTAMP:
if neighcolor == self.color:
neighs += 1
else:
opps += 1
neighbour_ref.timestamp = TIMESTAMP
neighbour_ref.temp_ledges = neighbour_ref.ledges
neighbour_ref.temp_ledges -= 1
if neighbour_ref.temp_ledges == 0:
if neighcolor == self.color:
weak_neighs += 1
else:
weak_opps += 1
neighbour_ref.remove(neighbour_ref, update=False)
dupe = self.zobrist.dupe()
self.zobrist.hash = old_hash
strong_neighs = neighs-weak_neighs
strong_opps = opps-weak_opps
return not dupe and \
(empties or weak_opps or (strong_neighs and (strong_opps or weak_neighs)))
def useful_moves(self):
return [pos for pos in self.emptyset.empties if self.useful(pos)]
def replay(self, history):
for pos in history:
self.move(pos)
def score(self, color):
if color == WHITE:
count = KOMI + self.black_dead
else:
count = self.white_dead
for square in self.squares:
squarecolor = square.color
if squarecolor == color:
count += 1
elif squarecolor == EMPTY:
surround = 0
for neighbour in square.neighbours:
if neighbour.color == color:
surround += 1
if surround == len(square.neighbours):
count += 1
return count
def check(self):
for square in self.squares:
if square.color == EMPTY:
continue
members1 = set([square])
changed = True
while changed:
changed = False
for member in members1.copy():
for neighbour in member.neighbours:
if neighbour.color == square.color and neighbour not in members1:
changed = True
members1.add(neighbour)
ledges1 = 0
for member in members1:
for neighbour in member.neighbours:
if neighbour.color == EMPTY:
ledges1 += 1
root = square.find()
#print 'members1', square, root, members1
#print 'ledges1', square, ledges1
members2 = set()
for square2 in self.squares:
if square2.color != EMPTY and square2.find() == root:
members2.add(square2)
ledges2 = root.ledges
#print 'members2', square, root, members1
#print 'ledges2', square, ledges2
assert members1 == members2
assert ledges1 == ledges2, ('ledges differ at %r: %d %d' % (square, ledges1, ledges2))
empties1 = set(self.emptyset.empties)
empties2 = set()
for square in self.squares:
if square.color == EMPTY:
empties2.add(square.pos)
def __repr__(self):
result = []
for y in range(SIZE):
start = to_pos(0, y)
result.append(''.join([SHOW[square.color]+' ' for square in self.squares[start:start+SIZE]]))
return '\n'.join(result)
class UCTNode:
def __init__(self):
self.bestchild = None
self.pos = -1
self.wins = 0
self.losses = 0
self.pos_child = [None for x in range(SIZE*SIZE)]
self.parent = None
def play(self, board):
""" uct tree search """
color = board.color
node = self
path = [node]
while True:
pos = node.select(board)
if pos == PASS:
break
board.move(pos)
child = node.pos_child[pos]
if not child:
child = node.pos_child[pos] = UCTNode()
child.unexplored = board.useful_moves()
child.pos = pos
child.parent = node
path.append(child)
break
path.append(child)
node = child
self.random_playout(board)
self.update_path(board, color, path)
def select(self, board):
""" select move; unexplored children first, then according to uct value """
if self.unexplored:
i = random.randrange(len(self.unexplored))
pos = self.unexplored[i]
self.unexplored[i] = self.unexplored[len(self.unexplored)-1]
self.unexplored.pop()
return pos
elif self.bestchild:
return self.bestchild.pos
else:
return PASS
def random_playout(self, board):
""" random play until both players pass """
for x in range(MAXMOVES): # XXX while not self.finished?
if board.finished:
break
board.move(board.random_move())
def update_path(self, board, color, path):
""" update win/loss count along path """
wins = board.score(BLACK) >= board.score(WHITE)
for node in path:
if color == BLACK: color = WHITE
else: color = BLACK
if wins == (color == BLACK):
node.wins += 1
else:
node.losses += 1
if node.parent:
node.parent.bestchild = node.parent.best_child()
def score(self):
winrate = self.wins/float(self.wins+self.losses)
parentvisits = self.parent.wins+self.parent.losses
if not parentvisits:
return winrate
nodevisits = self.wins+self.losses
return winrate + math.sqrt((math.log(parentvisits))/(5*nodevisits))
def best_child(self):
maxscore = -1
maxchild = None
for child in self.pos_child:
if child and child.score() > maxscore:
maxchild = child
maxscore = child.score()
return maxchild
def best_visited(self):
maxvisits = -1
maxchild = None
for child in self.pos_child:
# if child:
# print to_xy(child.pos), child.wins, child.losses, child.score()
if child and (child.wins+child.losses) > maxvisits:
maxvisits, maxchild = (child.wins+child.losses), child
return maxchild
def user_move(board):
while True:
text = raw_input('?').strip()
if text == 'p':
return PASS
if text == 'q':
raise EOFError
try:
x, y = [int(i) for i in text.split()]
except ValueError:
continue
if not (0 <= x < SIZE and 0 <= y < SIZE):
continue
pos = to_pos(x, y)
if board.useful(pos):
return pos
def computer_move(board):
global MOVES
pos = board.random_move()
if pos == PASS:
return PASS
tree = UCTNode()
tree.unexplored = board.useful_moves()
nboard = Board()
for game in range(GAMES):
node = tree
nboard.reset()
nboard.replay(board.history)
node.play(nboard)
# print 'moves', MOVES
return tree.best_visited().pos
def versus_cpu():
print "versus_cpu"
random.seed(1)
board = Board()
pos = computer_move(board)
def main(n):
times = []
for i in range(5):
versus_cpu() # warmup
for i in range(n):
t1 = time.time()
versus_cpu()
t2 = time.time()
times.append(t2 - t1)
return times
if __name__ == "__main__":
main(100)
| [] |
BrianPugh/circuitpython | tools/gen_usb_descriptor.py | f0bb9635bf311013e7b1ff69d1a0542575cf9d0a | # SPDX-FileCopyrightText: 2014 MicroPython & CircuitPython contributors (https://github.com/adafruit/circuitpython/graphs/contributors)
#
# SPDX-License-Identifier: MIT
import argparse
import os
import sys
sys.path.append("../../tools/usb_descriptor")
from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util
import hid_report_descriptors
DEFAULT_INTERFACE_NAME = 'CircuitPython'
ALL_DEVICES='CDC,MSC,AUDIO,HID'
ALL_DEVICES_SET=frozenset(ALL_DEVICES.split(','))
DEFAULT_DEVICES='CDC,MSC,AUDIO,HID'
ALL_HID_DEVICES='KEYBOARD,MOUSE,CONSUMER,SYS_CONTROL,GAMEPAD,DIGITIZER,XAC_COMPATIBLE_GAMEPAD,RAW'
ALL_HID_DEVICES_SET=frozenset(ALL_HID_DEVICES.split(','))
# Digitizer works on Linux but conflicts with mouse, so omit it.
DEFAULT_HID_DEVICES='KEYBOARD,MOUSE,CONSUMER,GAMEPAD'
parser = argparse.ArgumentParser(description='Generate USB descriptors.')
parser.add_argument('--highspeed', default=False, action='store_true',
help='descriptor for highspeed device')
parser.add_argument('--manufacturer', type=str,
help='manufacturer of the device')
parser.add_argument('--product', type=str,
help='product name of the device')
parser.add_argument('--vid', type=lambda x: int(x, 16),
help='vendor id')
parser.add_argument('--pid', type=lambda x: int(x, 16),
help='product id')
parser.add_argument('--serial_number_length', type=int, default=32,
help='length needed for the serial number in digits')
parser.add_argument('--devices', type=lambda l: tuple(l.split(',')), default=DEFAULT_DEVICES,
help='devices to include in descriptor (AUDIO includes MIDI support)')
parser.add_argument('--hid_devices', type=lambda l: tuple(l.split(',')), default=DEFAULT_HID_DEVICES,
help='HID devices to include in HID report descriptor')
parser.add_argument('--interface_name', type=str,
help='The name/prefix to use in the interface descriptions',
default=DEFAULT_INTERFACE_NAME)
parser.add_argument('--no-renumber_endpoints', dest='renumber_endpoints', action='store_false',
help='use to not renumber endpoint')
parser.add_argument('--cdc_ep_num_notification', type=int, default=0,
help='endpoint number of CDC NOTIFICATION')
parser.add_argument('--cdc_ep_num_data_out', type=int, default=0,
help='endpoint number of CDC DATA OUT')
parser.add_argument('--cdc_ep_num_data_in', type=int, default=0,
help='endpoint number of CDC DATA IN')
parser.add_argument('--msc_ep_num_out', type=int, default=0,
help='endpoint number of MSC OUT')
parser.add_argument('--msc_ep_num_in', type=int, default=0,
help='endpoint number of MSC IN')
parser.add_argument('--hid_ep_num_out', type=int, default=0,
help='endpoint number of HID OUT')
parser.add_argument('--hid_ep_num_in', type=int, default=0,
help='endpoint number of HID IN')
parser.add_argument('--midi_ep_num_out', type=int, default=0,
help='endpoint number of MIDI OUT')
parser.add_argument('--midi_ep_num_in', type=int, default=0,
help='endpoint number of MIDI IN')
parser.add_argument('--output_c_file', type=argparse.FileType('w', encoding='UTF-8'), required=True)
parser.add_argument('--output_h_file', type=argparse.FileType('w', encoding='UTF-8'), required=True)
args = parser.parse_args()
unknown_devices = list(frozenset(args.devices) - ALL_DEVICES_SET)
if unknown_devices:
raise ValueError("Unknown device(s)", unknown_devices)
unknown_hid_devices = list(frozenset(args.hid_devices) - ALL_HID_DEVICES_SET)
if unknown_hid_devices:
raise ValueError("Unknown HID devices(s)", unknown_hid_devices)
if not args.renumber_endpoints:
if 'CDC' in args.devices:
if args.cdc_ep_num_notification == 0:
raise ValueError("CDC notification endpoint number must not be 0")
elif args.cdc_ep_num_data_out == 0:
raise ValueError("CDC data OUT endpoint number must not be 0")
elif args.cdc_ep_num_data_in == 0:
raise ValueError("CDC data IN endpoint number must not be 0")
if 'MSC' in args.devices:
if args.msc_ep_num_out == 0:
raise ValueError("MSC endpoint OUT number must not be 0")
elif args.msc_ep_num_in == 0:
raise ValueError("MSC endpoint IN number must not be 0")
if 'HID' in args.devices:
if args.args.hid_ep_num_out == 0:
raise ValueError("HID endpoint OUT number must not be 0")
elif args.hid_ep_num_in == 0:
raise ValueError("HID endpoint IN number must not be 0")
if 'AUDIO' in args.devices:
if args.args.midi_ep_num_out == 0:
raise ValueError("MIDI endpoint OUT number must not be 0")
elif args.midi_ep_num_in == 0:
raise ValueError("MIDI endpoint IN number must not be 0")
class StringIndex:
"""Assign a monotonically increasing index to each unique string. Start with 0."""
string_to_index = {}
index_to_variable = {}
strings = []
@classmethod
def index(cls, string, *, variable_name = None):
if string in cls.string_to_index:
idx = cls.string_to_index[string]
if not cls.index_to_variable[idx]:
cls.index_to_variable[idx] = variable_name
return idx
else:
idx = len(cls.strings)
cls.string_to_index[string] = idx
cls.strings.append(string)
cls.index_to_variable[idx] = variable_name
return idx
@classmethod
def strings_in_order(cls):
return cls.strings
# langid must be the 0th string descriptor
LANGID_INDEX = StringIndex.index("\u0409", variable_name="language_id")
assert LANGID_INDEX == 0
SERIAL_NUMBER_INDEX = StringIndex.index("S" * args.serial_number_length, variable_name="usb_serial_number")
device = standard.DeviceDescriptor(
description="top",
idVendor=args.vid,
idProduct=args.pid,
iManufacturer=StringIndex.index(args.manufacturer),
iProduct=StringIndex.index(args.product),
iSerialNumber=SERIAL_NUMBER_INDEX)
# Interface numbers are interface-set local and endpoints are interface local
# until util.join_interfaces renumbers them.
cdc_union = cdc.Union(
description="CDC comm",
bMasterInterface=0x00, # Adjust this after interfaces are renumbered.
bSlaveInterface_list=[0x01]) # Adjust this after interfaces are renumbered.
cdc_call_management = cdc.CallManagement(
description="CDC comm",
bmCapabilities=0x01,
bDataInterface=0x01) # Adjust this after interfaces are renumbered.
cdc_comm_interface = standard.InterfaceDescriptor(
description="CDC comm",
bInterfaceClass=cdc.CDC_CLASS_COMM, # Communications Device Class
bInterfaceSubClass=cdc.CDC_SUBCLASS_ACM, # Abstract control model
bInterfaceProtocol=cdc.CDC_PROTOCOL_NONE,
iInterface=StringIndex.index("{} CDC control".format(args.interface_name)),
subdescriptors=[
cdc.Header(
description="CDC comm",
bcdCDC=0x0110),
cdc_call_management,
cdc.AbstractControlManagement(
description="CDC comm",
bmCapabilities=0x02),
cdc_union,
standard.EndpointDescriptor(
description="CDC comm in",
bEndpointAddress=args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
wMaxPacketSize=0x0040,
bInterval=0x10)
])
cdc_data_interface = standard.InterfaceDescriptor(
description="CDC data",
bInterfaceClass=cdc.CDC_CLASS_DATA,
iInterface=StringIndex.index("{} CDC data".format(args.interface_name)),
subdescriptors=[
standard.EndpointDescriptor(
description="CDC data out",
bEndpointAddress=args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
standard.EndpointDescriptor(
description="CDC data in",
bEndpointAddress=args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
])
cdc_interfaces = [cdc_comm_interface, cdc_data_interface]
msc_interfaces = [
standard.InterfaceDescriptor(
description="MSC",
bInterfaceClass=msc.MSC_CLASS,
bInterfaceSubClass=msc.MSC_SUBCLASS_TRANSPARENT,
bInterfaceProtocol=msc.MSC_PROTOCOL_BULK,
iInterface=StringIndex.index("{} Mass Storage".format(args.interface_name)),
subdescriptors=[
standard.EndpointDescriptor(
description="MSC in",
bEndpointAddress=args.msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
standard.EndpointDescriptor(
description="MSC out",
bEndpointAddress=(args.msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT),
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
]
)
]
# When there's only one hid_device, it shouldn't have a report id.
# Otherwise, report ids are assigned sequentially:
# args.hid_devices[0] has report_id 1
# args.hid_devices[1] has report_id 2
# etc.
report_ids = {}
if len(args.hid_devices) == 1:
name = args.hid_devices[0]
combined_hid_report_descriptor = hid.ReportDescriptor(
description=name,
report_descriptor=bytes(hid_report_descriptors.REPORT_DESCRIPTOR_FUNCTIONS[name](0)))
report_ids[name] = 0
else:
report_id = 1
concatenated_descriptors = bytearray()
for name in args.hid_devices:
concatenated_descriptors.extend(
bytes(hid_report_descriptors.REPORT_DESCRIPTOR_FUNCTIONS[name](report_id)))
report_ids[name] = report_id
report_id += 1
combined_hid_report_descriptor = hid.ReportDescriptor(
description="MULTIDEVICE",
report_descriptor=bytes(concatenated_descriptors))
# ASF4 expects keyboard and generic devices to have both in and out endpoints,
# and will fail (possibly silently) if both are not supplied.
hid_endpoint_in_descriptor = standard.EndpointDescriptor(
description="HID in",
bEndpointAddress=args.hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
bInterval=8)
hid_endpoint_out_descriptor = standard.EndpointDescriptor(
description="HID out",
bEndpointAddress=args.hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT,
bInterval=8)
hid_interfaces = [
standard.InterfaceDescriptor(
description="HID Multiple Devices",
bInterfaceClass=hid.HID_CLASS,
bInterfaceSubClass=hid.HID_SUBCLASS_NOBOOT,
bInterfaceProtocol=hid.HID_PROTOCOL_NONE,
iInterface=StringIndex.index("{} HID".format(args.interface_name)),
subdescriptors=[
hid.HIDDescriptor(
description="HID",
wDescriptorLength=len(bytes(combined_hid_report_descriptor))),
hid_endpoint_in_descriptor,
hid_endpoint_out_descriptor,
]
),
]
# Audio!
# In and out here are relative to CircuitPython
# USB OUT -> midi_in_jack_emb -> midi_out_jack_ext -> CircuitPython
midi_in_jack_emb = midi.InJackDescriptor(
description="MIDI PC -> {}".format(args.interface_name),
bJackType=midi.JACK_TYPE_EMBEDDED,
iJack=StringIndex.index("{} usb_midi.ports[0]".format(args.interface_name)))
midi_out_jack_ext = midi.OutJackDescriptor(
description="MIDI data out to user code.",
bJackType=midi.JACK_TYPE_EXTERNAL,
input_pins=[(midi_in_jack_emb, 1)],
iJack=0)
# USB IN <- midi_out_jack_emb <- midi_in_jack_ext <- CircuitPython
midi_in_jack_ext = midi.InJackDescriptor(
description="MIDI data in from user code.",
bJackType=midi.JACK_TYPE_EXTERNAL,
iJack=0)
midi_out_jack_emb = midi.OutJackDescriptor(
description="MIDI PC <- {}".format(args.interface_name),
bJackType=midi.JACK_TYPE_EMBEDDED,
input_pins=[(midi_in_jack_ext, 1)],
iJack=StringIndex.index("{} usb_midi.ports[1]".format(args.interface_name)))
audio_midi_interface = standard.InterfaceDescriptor(
description="Midi goodness",
bInterfaceClass=audio.AUDIO_CLASS_DEVICE,
bInterfaceSubClass=audio.AUDIO_SUBCLASS_MIDI_STREAMING,
bInterfaceProtocol=audio.AUDIO_PROTOCOL_V1,
iInterface=StringIndex.index("{} MIDI".format(args.interface_name)),
subdescriptors=[
midi.Header(
jacks_and_elements=[
midi_in_jack_emb,
midi_in_jack_ext,
midi_out_jack_emb,
midi_out_jack_ext
],
),
standard.EndpointDescriptor(
description="MIDI data out to {}".format(args.interface_name),
bEndpointAddress=args.midi_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval=0,
wMaxPacketSize=512 if args.highspeed else 64),
midi.DataEndpointDescriptor(baAssocJack=[midi_in_jack_emb]),
standard.EndpointDescriptor(
description="MIDI data in from {}".format(args.interface_name),
bEndpointAddress=args.midi_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN,
bmAttributes=standard.EndpointDescriptor.TYPE_BULK,
bInterval = 0x0,
wMaxPacketSize=512 if args.highspeed else 64),
midi.DataEndpointDescriptor(baAssocJack=[midi_out_jack_emb]),
])
cs_ac_interface = audio10.AudioControlInterface(
description="Empty audio control",
audio_streaming_interfaces = [],
midi_streaming_interfaces = [
audio_midi_interface
]
)
audio_control_interface = standard.InterfaceDescriptor(
description="All the audio",
bInterfaceClass=audio.AUDIO_CLASS_DEVICE,
bInterfaceSubClass=audio.AUDIO_SUBCLASS_CONTROL,
bInterfaceProtocol=audio.AUDIO_PROTOCOL_V1,
iInterface=StringIndex.index("{} Audio".format(args.interface_name)),
subdescriptors=[
cs_ac_interface,
])
# Audio streaming interfaces must occur before MIDI ones.
audio_interfaces = [audio_control_interface] + cs_ac_interface.audio_streaming_interfaces + cs_ac_interface.midi_streaming_interfaces
interfaces_to_join = []
if 'CDC' in args.devices:
interfaces_to_join.append(cdc_interfaces)
if 'MSC' in args.devices:
interfaces_to_join.append(msc_interfaces)
if 'HID' in args.devices:
interfaces_to_join.append(hid_interfaces)
if 'AUDIO' in args.devices:
interfaces_to_join.append(audio_interfaces)
# util.join_interfaces() will renumber the endpoints to make them unique across descriptors,
# and renumber the interfaces in order. But we still need to fix up certain
# interface cross-references.
interfaces = util.join_interfaces(interfaces_to_join, renumber_endpoints=args.renumber_endpoints)
# Now adjust the CDC interface cross-references.
cdc_union.bMasterInterface = cdc_comm_interface.bInterfaceNumber
cdc_union.bSlaveInterface_list = [cdc_data_interface.bInterfaceNumber]
cdc_call_management.bDataInterface = cdc_data_interface.bInterfaceNumber
cdc_iad = standard.InterfaceAssociationDescriptor(
description="CDC IAD",
bFirstInterface=cdc_comm_interface.bInterfaceNumber,
bInterfaceCount=len(cdc_interfaces),
bFunctionClass=cdc.CDC_CLASS_COMM, # Communications Device Class
bFunctionSubClass=cdc.CDC_SUBCLASS_ACM, # Abstract control model
bFunctionProtocol=cdc.CDC_PROTOCOL_NONE)
descriptor_list = []
if 'CDC' in args.devices:
# Put the CDC IAD just before the CDC interfaces.
# There appears to be a bug in the Windows composite USB driver that requests the
# HID report descriptor with the wrong interface number if the HID interface is not given
# first. However, it still fetches the descriptor anyway. We could reorder the interfaces but
# the Windows 7 Adafruit_usbser.inf file thinks CDC is at Interface 0, so we'll leave it
# there for backwards compatibility.
descriptor_list.append(cdc_iad)
descriptor_list.extend(cdc_interfaces)
if 'MSC' in args.devices:
descriptor_list.extend(msc_interfaces)
if 'HID' in args.devices:
descriptor_list.extend(hid_interfaces)
if 'AUDIO' in args.devices:
# Only add the control interface because other audio interfaces are managed by it to ensure the
# correct ordering.
descriptor_list.append(audio_control_interface)
# Finally, build the composite descriptor.
configuration = standard.ConfigurationDescriptor(
description="Composite configuration",
wTotalLength=(standard.ConfigurationDescriptor.bLength +
sum([len(bytes(x)) for x in descriptor_list])),
bNumInterfaces=len(interfaces))
descriptor_list.insert(0, configuration)
string_descriptors = [standard.StringDescriptor(string) for string in StringIndex.strings_in_order()]
serial_number_descriptor = string_descriptors[SERIAL_NUMBER_INDEX]
c_file = args.output_c_file
h_file = args.output_h_file
c_file.write("""\
#include <stdint.h>
#include "py/objtuple.h"
#include "shared-bindings/usb_hid/Device.h"
#include "{H_FILE_NAME}"
""".format(H_FILE_NAME=h_file.name))
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=device.description,
CLASS=device.__class__))
c_file.write("""\
const uint8_t usb_desc_dev[] = {
""")
for b in bytes(device):
c_file.write("0x{:02x}, ".format(b))
c_file.write("""\
};
""")
c_file.write("""\
const uint8_t usb_desc_cfg[] = {
""")
# Write out all the regular descriptors as one long array (that's how ASF4 does it).
descriptor_length = 0
for descriptor in descriptor_list:
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=descriptor.description,
CLASS=descriptor.__class__))
b = bytes(descriptor)
notes = descriptor.notes()
i = 0
# This prints each subdescriptor on a separate line.
n = 0
while i < len(b):
length = b[i]
for j in range(length):
c_file.write("0x{:02x}, ".format(b[i + j]))
c_file.write("// " + notes[n])
n += 1
c_file.write("\n")
i += length
descriptor_length += len(b)
c_file.write("""\
};
""")
pointers_to_strings = []
for idx, descriptor in enumerate(string_descriptors):
c_file.write("""\
// {DESCRIPTION} : {CLASS}
""".format(DESCRIPTION=descriptor.description,
CLASS=descriptor.__class__))
b = bytes(descriptor)
notes = descriptor.notes()
i = 0
# This prints each subdescriptor on a separate line.
variable_name = StringIndex.index_to_variable[idx]
if not variable_name:
variable_name = "string_descriptor{}".format(idx)
const = "const "
if variable_name == "usb_serial_number":
const = ""
c_file.write("""\
{const}uint16_t {NAME}[] = {{
""".format(const=const, NAME=variable_name))
pointers_to_strings.append("{name}".format(name=variable_name))
n = 0
while i < len(b):
length = b[i]
for j in range(length // 2):
c_file.write("0x{:04x}, ".format(b[i + 2*j + 1] << 8 | b[i + 2*j]))
n += 1
c_file.write("\n")
i += length
c_file.write("""\
};
""")
c_file.write("""\
// array of pointer to string descriptors
uint16_t const * const string_desc_arr [] =
{
""")
c_file.write(""",\
""".join(pointers_to_strings))
c_file.write("""
};
""")
c_file.write("\n")
hid_descriptor_length = len(bytes(combined_hid_report_descriptor))
# Now we values we need for the .h file.
h_file.write("""\
#ifndef MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
#define MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
#include <stdint.h>
extern const uint8_t usb_desc_dev[{device_length}];
extern const uint8_t usb_desc_cfg[{configuration_length}];
extern uint16_t usb_serial_number[{serial_number_length}];
extern uint16_t const * const string_desc_arr [{string_descriptor_length}];
extern const uint8_t hid_report_descriptor[{hid_report_descriptor_length}];
#define CFG_TUSB_RHPORT0_MODE ({rhport0_mode})
#define USB_HID_NUM_DEVICES {hid_num_devices}
// Vendor name included in Inquiry response, max 8 bytes
#define CFG_TUD_MSC_VENDOR "{msc_vendor}"
// Product name included in Inquiry response, max 16 bytes
#define CFG_TUD_MSC_PRODUCT "{msc_product}"
"""
.format(serial_number_length=len(bytes(serial_number_descriptor)) // 2,
device_length=len(bytes(device)),
configuration_length=descriptor_length,
max_configuration_length=max(hid_descriptor_length, descriptor_length),
string_descriptor_length=len(pointers_to_strings),
hid_report_descriptor_length=len(bytes(combined_hid_report_descriptor)),
rhport0_mode='OPT_MODE_DEVICE | OPT_MODE_HIGH_SPEED' if args.highspeed else 'OPT_MODE_DEVICE',
hid_num_devices=len(args.hid_devices),
msc_vendor=args.manufacturer[:8],
msc_product=args.product[:16]))
# Write out the report descriptor and info
c_file.write("""\
const uint8_t hid_report_descriptor[{HID_DESCRIPTOR_LENGTH}] = {{
""".format(HID_DESCRIPTOR_LENGTH=hid_descriptor_length))
for b in bytes(combined_hid_report_descriptor):
c_file.write("0x{:02x}, ".format(b))
c_file.write("""\
};
""")
# Write out USB HID report buffer definitions.
for name in args.hid_devices:
c_file.write("""\
static uint8_t {name}_report_buffer[{report_length}];
""".format(name=name.lower(), report_length=hid_report_descriptors.HID_DEVICE_DATA[name].report_length))
if hid_report_descriptors.HID_DEVICE_DATA[name].out_report_length > 0:
c_file.write("""\
static uint8_t {name}_out_report_buffer[{report_length}];
""".format(name=name.lower(), report_length=hid_report_descriptors.HID_DEVICE_DATA[name].out_report_length))
# Write out table of device objects.
c_file.write("""
usb_hid_device_obj_t usb_hid_devices[] = {
""")
for name in args.hid_devices:
device_data = hid_report_descriptors.HID_DEVICE_DATA[name]
out_report_buffer = '{}_out_report_buffer'.format(name.lower()) if device_data.out_report_length > 0 else 'NULL'
c_file.write("""\
{{
.base = {{ .type = &usb_hid_device_type }},
.report_buffer = {name}_report_buffer,
.report_id = {report_id},
.report_length = {report_length},
.usage_page = {usage_page:#04x},
.usage = {usage:#04x},
.out_report_buffer = {out_report_buffer},
.out_report_length = {out_report_length},
}},
""".format(name=name.lower(), report_id=report_ids[name],
report_length=device_data.report_length,
usage_page=device_data.usage_page,
usage=device_data.usage,
out_report_buffer=out_report_buffer,
out_report_length=device_data.out_report_length))
c_file.write("""\
};
""")
# Write out tuple of device objects.
c_file.write("""
mp_obj_tuple_t common_hal_usb_hid_devices = {{
.base = {{
.type = &mp_type_tuple,
}},
.len = {num_devices},
.items = {{
""".format(num_devices=len(args.hid_devices)))
for idx in range(len(args.hid_devices)):
c_file.write("""\
(mp_obj_t) &usb_hid_devices[{idx}],
""".format(idx=idx))
c_file.write("""\
},
};
""")
h_file.write("""\
#endif // MICROPY_INCLUDED_AUTOGEN_USB_DESCRIPTOR_H
""")
| [((209, 254), 'sys.path.append', 'sys.path.append', (['"""../../tools/usb_descriptor"""'], {}), "('../../tools/usb_descriptor')\n", (224, 254), False, 'import sys\n'), ((821, 885), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Generate USB descriptors."""'}), "(description='Generate USB descriptors.')\n", (844, 885), False, 'import argparse\n'), ((6439, 6518), 'adafruit_usb_descriptor.cdc.Union', 'cdc.Union', ([], {'description': '"""CDC comm"""', 'bMasterInterface': '(0)', 'bSlaveInterface_list': '[1]'}), "(description='CDC comm', bMasterInterface=0, bSlaveInterface_list=[1])\n", (6448, 6518), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((6661, 6739), 'adafruit_usb_descriptor.cdc.CallManagement', 'cdc.CallManagement', ([], {'description': '"""CDC comm"""', 'bmCapabilities': '(1)', 'bDataInterface': '(1)'}), "(description='CDC comm', bmCapabilities=1, bDataInterface=1)\n", (6679, 6739), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((10800, 11009), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""HID in"""', 'bEndpointAddress': '(args.hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'bInterval': '(8)'}), "(description='HID in', bEndpointAddress=args.\n hid_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN, bmAttributes=\n standard.EndpointDescriptor.TYPE_INTERRUPT, bInterval=8)\n", (10827, 11009), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((11048, 11259), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""HID out"""', 'bEndpointAddress': '(args.hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'bInterval': '(8)'}), "(description='HID out', bEndpointAddress=args.\n hid_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT, bInterval=8)\n", (11075, 11259), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((12235, 12385), 'adafruit_usb_descriptor.midi.OutJackDescriptor', 'midi.OutJackDescriptor', ([], {'description': '"""MIDI data out to user code."""', 'bJackType': 'midi.JACK_TYPE_EXTERNAL', 'input_pins': '[(midi_in_jack_emb, 1)]', 'iJack': '(0)'}), "(description='MIDI data out to user code.', bJackType\n =midi.JACK_TYPE_EXTERNAL, input_pins=[(midi_in_jack_emb, 1)], iJack=0)\n", (12257, 12385), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((12549, 12663), 'adafruit_usb_descriptor.midi.InJackDescriptor', 'midi.InJackDescriptor', ([], {'description': '"""MIDI data in from user code."""', 'bJackType': 'midi.JACK_TYPE_EXTERNAL', 'iJack': '(0)'}), "(description='MIDI data in from user code.', bJackType\n =midi.JACK_TYPE_EXTERNAL, iJack=0)\n", (12570, 12663), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((14416, 14570), 'adafruit_usb_descriptor.audio10.AudioControlInterface', 'audio10.AudioControlInterface', ([], {'description': '"""Empty audio control"""', 'audio_streaming_interfaces': '[]', 'midi_streaming_interfaces': '[audio_midi_interface]'}), "(description='Empty audio control',\n audio_streaming_interfaces=[], midi_streaming_interfaces=[\n audio_midi_interface])\n", (14445, 14570), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((15741, 15830), 'adafruit_usb_descriptor.util.join_interfaces', 'util.join_interfaces', (['interfaces_to_join'], {'renumber_endpoints': 'args.renumber_endpoints'}), '(interfaces_to_join, renumber_endpoints=args.\n renumber_endpoints)\n', (15761, 15830), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((17753, 17786), 'adafruit_usb_descriptor.standard.StringDescriptor', 'standard.StringDescriptor', (['string'], {}), '(string)\n', (17778, 17786), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((3370, 3410), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {'encoding': '"""UTF-8"""'}), "('w', encoding='UTF-8')\n", (3387, 3410), False, 'import argparse\n'), ((3471, 3511), 'argparse.FileType', 'argparse.FileType', (['"""w"""'], {'encoding': '"""UTF-8"""'}), "('w', encoding='UTF-8')\n", (3488, 3511), False, 'import argparse\n'), ((7191, 7237), 'adafruit_usb_descriptor.cdc.Header', 'cdc.Header', ([], {'description': '"""CDC comm"""', 'bcdCDC': '(272)'}), "(description='CDC comm', bcdCDC=272)\n", (7201, 7237), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7304, 7375), 'adafruit_usb_descriptor.cdc.AbstractControlManagement', 'cdc.AbstractControlManagement', ([], {'description': '"""CDC comm"""', 'bmCapabilities': '(2)'}), "(description='CDC comm', bmCapabilities=2)\n", (7333, 7375), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7432, 7680), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC comm in"""', 'bEndpointAddress': '(args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_INTERRUPT', 'wMaxPacketSize': '(64)', 'bInterval': '(16)'}), "(description='CDC comm in', bEndpointAddress=\n args.cdc_ep_num_notification | standard.EndpointDescriptor.DIRECTION_IN,\n bmAttributes=standard.EndpointDescriptor.TYPE_INTERRUPT, wMaxPacketSize\n =64, bInterval=16)\n", (7459, 7680), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((7967, 8233), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC data out"""', 'bEndpointAddress': '(args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='CDC data out', bEndpointAddress=\n args.cdc_ep_num_data_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (7994, 8233), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((8291, 8554), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""CDC data in"""', 'bEndpointAddress': '(args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='CDC data in', bEndpointAddress=\n args.cdc_ep_num_data_in | standard.EndpointDescriptor.DIRECTION_IN,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (8318, 8554), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((13329, 13439), 'adafruit_usb_descriptor.midi.Header', 'midi.Header', ([], {'jacks_and_elements': '[midi_in_jack_emb, midi_in_jack_ext, midi_out_jack_emb, midi_out_jack_ext]'}), '(jacks_and_elements=[midi_in_jack_emb, midi_in_jack_ext,\n midi_out_jack_emb, midi_out_jack_ext])\n', (13340, 13439), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((13901, 13960), 'adafruit_usb_descriptor.midi.DataEndpointDescriptor', 'midi.DataEndpointDescriptor', ([], {'baAssocJack': '[midi_in_jack_emb]'}), '(baAssocJack=[midi_in_jack_emb])\n', (13928, 13960), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((14328, 14388), 'adafruit_usb_descriptor.midi.DataEndpointDescriptor', 'midi.DataEndpointDescriptor', ([], {'baAssocJack': '[midi_out_jack_emb]'}), '(baAssocJack=[midi_out_jack_emb])\n', (14355, 14388), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((9019, 9273), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""MSC in"""', 'bEndpointAddress': '(args.msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='MSC in', bEndpointAddress=args.\n msc_ep_num_in | standard.EndpointDescriptor.DIRECTION_IN, bmAttributes=\n standard.EndpointDescriptor.TYPE_BULK, bInterval=0, wMaxPacketSize=512 if\n args.highspeed else 64)\n", (9046, 9273), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n'), ((9354, 9610), 'adafruit_usb_descriptor.standard.EndpointDescriptor', 'standard.EndpointDescriptor', ([], {'description': '"""MSC out"""', 'bEndpointAddress': '(args.msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT)', 'bmAttributes': 'standard.EndpointDescriptor.TYPE_BULK', 'bInterval': '(0)', 'wMaxPacketSize': '(512 if args.highspeed else 64)'}), "(description='MSC out', bEndpointAddress=args.\n msc_ep_num_out | standard.EndpointDescriptor.DIRECTION_OUT,\n bmAttributes=standard.EndpointDescriptor.TYPE_BULK, bInterval=0,\n wMaxPacketSize=512 if args.highspeed else 64)\n", (9381, 9610), False, 'from adafruit_usb_descriptor import audio, audio10, cdc, hid, midi, msc, standard, util\n')] |
Columbine21/THUIAR-ERC | bclstm/train_meld.py | 90e928e1ce777152e459dbc487acf04c32cbc645 | from tqdm import tqdm
import pandas as pd
import numpy as np, argparse, time, pickle, random, os, datetime
import torch
import torch.optim as optim
from model import MaskedNLLLoss, BC_LSTM
from dataloader import MELDDataLoader
from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report
def setup_seed(seed):
""" Manually Fix the random seed to get deterministic results.
"""
torch.manual_seed(seed)
torch.cuda.manual_seed(seed)
torch.cuda.manual_seed_all(seed)
np.random.seed(seed)
random.seed(seed)
torch.benchmark = False
torch.backends.cudnn.deterministic = True
def train_or_eval_model(model, loss_function, dataloader, epoch, optimizer=None, mode='train'):
losses, preds, labels, masks, losses_sense = [], [], [], [], []
max_sequence_len = []
assert mode != 'train' or optimizer != None
if mode == 'train':
model.train()
else:
model.eval()
with tqdm(dataloader) as td:
for data in td:
if mode == 'train':
optimizer.zero_grad()
textf, acouf, mask, label = [d.cuda() for d in data[:-1]] if args.cuda else data[:-1]
log_prob, _ = model(textf, None, acouf, None, mask)
lp_ = log_prob.transpose(0,1).contiguous().view(-1, log_prob.size()[2]) # batch*seq_len, n_classes
labels_ = label.view(-1) # batch*seq_len
loss = loss_function(lp_, labels_, mask)
pred_ = torch.argmax(lp_,1) # batch*seq_len
preds.append(pred_.data.cpu().numpy())
labels.append(labels_.data.cpu().numpy())
masks.append(mask.view(-1).cpu().numpy())
losses.append(loss.item()*masks[-1].sum())
if mode == 'train':
total_loss = loss
total_loss.backward()
optimizer.step()
if preds!=[]:
preds = np.concatenate(preds)
labels = np.concatenate(labels)
masks = np.concatenate(masks)
else:
return float('nan'), float('nan'), float('nan'), [], [], [], float('nan'),[]
avg_loss = round(np.sum(losses)/np.sum(masks), 4)
avg_sense_loss = round(np.sum(losses_sense)/np.sum(masks), 4)
avg_accuracy = round(accuracy_score(labels,preds, sample_weight=masks)*100, 2)
avg_fscore = round(f1_score(labels,preds, sample_weight=masks, average='weighted')*100, 2)
if mode == 'test':
class_report = classification_report(labels, preds, sample_weight=masks, target_names=['neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger'], digits=6)
print(class_report)
return avg_loss, avg_accuracy, labels, preds, masks, [avg_fscore]
def parse_args():
parser = argparse.ArgumentParser()
parser.add_argument('--num_workers', type=int, default=0,
help='num workers of loading data')
# dataloader settings
parser.add_argument('--batch-size', type=int, default=32, metavar='BS', help='batch size')
parser.add_argument('--data_path', type=str, default='../TextCnn/dataset/MELD_features_raw.pkl')
# model settings.
parser.add_argument('--attention_type', type=str, default='general2')
parser.add_argument('--utterance_dim', type=int, default=600,
help='embedding dims to use')
parser.add_argument('--emotion_state_dim', type=int, default=100)
parser.add_argument('--hidden_layer_dim', type=int, default=100)
parser.add_argument('--dropout', type=float, default=0.25)
parser.add_argument('--n_classes', type=int, default=7)
# late fusion module.
parser.add_argument('--lateFusionModule', type=str, default='concat')
parser.add_argument('--input_features', type=tuple, default=(100, 300))
parser.add_argument('--pre_fusion_hidden_dims', type=tuple, default=(24, 7))
parser.add_argument('--pre_fusion_dropout', type=float, default=0.4)
parser.add_argument('--post_fusion_dropout', type=float, default=0.3)
# train settings.
parser.add_argument('--lr', type=float, default=1e-4, metavar='LR', help='learning rate')
parser.add_argument('--l2', type=float, default=1e-5, metavar='L2', help='L2 regularization weight')
parser.add_argument('--epochs', type=int, default=100, metavar='E', help='number of epochs')
return parser.parse_args()
if __name__ == '__main__':
args = parse_args()
args.cuda = torch.cuda.is_available()
if args.cuda:
print('Running on GPU')
else:
print('Running on CPU')
for seed in [1, 11, 111, 1111, 11111]:
setup_seed(seed)
args.seed = seed
print(args)
model = BC_LSTM(args)
print('MELD BC_LSTM MODULE ...')
if args.cuda:
model.cuda()
loss_weights = torch.FloatTensor([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
loss_function = MaskedNLLLoss(loss_weights.cuda() if args.cuda else loss_weights)
optimizer = optim.Adam(model.parameters(), lr=args.lr, weight_decay=args.l2)
lf = open('logs/cnn_meld_logs.txt', 'a')
dataloader = MELDDataLoader(args)
valid_losses, valid_fscores = [], []
test_fscores, test_accuracys, test_losses = [], [], []
best_loss, best_label, best_pred, best_mask = None, None, None, None
for e in range(args.epochs):
start_time = time.time()
train_loss, train_acc, _, _, _, train_fscore = train_or_eval_model(model, loss_function, dataloader['train'], e, optimizer, mode='train')
valid_loss, valid_acc, _, _, _, valid_fscore = train_or_eval_model(model, loss_function, dataloader['valid'], e, mode='valid')
test_loss, test_acc, test_label, test_pred, test_mask, test_fscore = train_or_eval_model(model, loss_function, dataloader['test'], e, mode='test')
valid_losses.append(valid_loss)
valid_fscores.append(valid_fscore)
test_losses.append(test_loss)
test_accuracys.append(test_acc)
test_fscores.append(test_fscore)
x = 'epoch: {}, train_loss: {}, acc: {}, fscore: {}, valid_loss: {}, acc: {}, fscore: {}, test_loss: {}, acc: {}, fscore: {}, time: {} sec'.format(e+1, train_loss, train_acc, train_fscore, valid_loss, valid_acc, valid_fscore, test_loss, test_acc, test_fscore, round(time.time()-start_time, 2))
print (x)
lf.write(x + '\n')
valid_fscores = np.array(valid_fscores).transpose()
test_fscores = np.array(test_fscores).transpose() # [1, epoches]
test_accuracys = np.array(test_accuracys).transpose() # [epoches]
f1_score1 = test_fscores[0][np.argmin(valid_losses)]
acc_score1 = test_accuracys[np.argmin(valid_losses)]
f1_score2 = test_fscores[0][np.argmax(valid_fscores[0])]
acc_score2 = test_accuracys[np.argmax(valid_fscores[0])]
scores = [acc_score1, f1_score1, acc_score2, f1_score2]
scores = [str(item) for item in scores]
print ('Test Scores: Weighted F1')
print('@Best Valid Loss: Test Acc: {}, Test F1: {}'.format(acc_score1, f1_score1))
print('@Best Valid F1: Test Acc: {}, Test F1: {}'.format(acc_score2, f1_score2))
rf = open('results/cnn_meld_results.txt', 'a')
rf.write('\t'.join(scores) + '\t' + str(args) + '\n')
rf.close()
| [((427, 450), 'torch.manual_seed', 'torch.manual_seed', (['seed'], {}), '(seed)\n', (444, 450), False, 'import torch\n'), ((455, 483), 'torch.cuda.manual_seed', 'torch.cuda.manual_seed', (['seed'], {}), '(seed)\n', (477, 483), False, 'import torch\n'), ((488, 520), 'torch.cuda.manual_seed_all', 'torch.cuda.manual_seed_all', (['seed'], {}), '(seed)\n', (514, 520), False, 'import torch\n'), ((525, 545), 'numpy.random.seed', 'np.random.seed', (['seed'], {}), '(seed)\n', (539, 545), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((550, 567), 'random.seed', 'random.seed', (['seed'], {}), '(seed)\n', (561, 567), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2807, 2832), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (2830, 2832), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((4486, 4511), 'torch.cuda.is_available', 'torch.cuda.is_available', ([], {}), '()\n', (4509, 4511), False, 'import torch\n'), ((978, 994), 'tqdm.tqdm', 'tqdm', (['dataloader'], {}), '(dataloader)\n', (982, 994), False, 'from tqdm import tqdm\n'), ((1982, 2003), 'numpy.concatenate', 'np.concatenate', (['preds'], {}), '(preds)\n', (1996, 2003), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2021, 2043), 'numpy.concatenate', 'np.concatenate', (['labels'], {}), '(labels)\n', (2035, 2043), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2061, 2082), 'numpy.concatenate', 'np.concatenate', (['masks'], {}), '(masks)\n', (2075, 2082), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2524, 2684), 'sklearn.metrics.classification_report', 'classification_report', (['labels', 'preds'], {'sample_weight': 'masks', 'target_names': "['neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger']", 'digits': '(6)'}), "(labels, preds, sample_weight=masks, target_names=[\n 'neutral', 'surprise', 'fear', 'sadness', 'joy', 'disgust', 'anger'],\n digits=6)\n", (2545, 2684), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((4744, 4757), 'model.BC_LSTM', 'BC_LSTM', (['args'], {}), '(args)\n', (4751, 4757), False, 'from model import MaskedNLLLoss, BC_LSTM\n'), ((4879, 4933), 'torch.FloatTensor', 'torch.FloatTensor', (['[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])\n', (4896, 4933), False, 'import torch\n'), ((5207, 5227), 'dataloader.MELDDataLoader', 'MELDDataLoader', (['args'], {}), '(args)\n', (5221, 5227), False, 'from dataloader import MELDDataLoader\n'), ((1541, 1561), 'torch.argmax', 'torch.argmax', (['lp_', '(1)'], {}), '(lp_, 1)\n', (1553, 1561), False, 'import torch\n'), ((2200, 2214), 'numpy.sum', 'np.sum', (['losses'], {}), '(losses)\n', (2206, 2214), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2215, 2228), 'numpy.sum', 'np.sum', (['masks'], {}), '(masks)\n', (2221, 2228), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2260, 2280), 'numpy.sum', 'np.sum', (['losses_sense'], {}), '(losses_sense)\n', (2266, 2280), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2281, 2294), 'numpy.sum', 'np.sum', (['masks'], {}), '(masks)\n', (2287, 2294), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((2325, 2375), 'sklearn.metrics.accuracy_score', 'accuracy_score', (['labels', 'preds'], {'sample_weight': 'masks'}), '(labels, preds, sample_weight=masks)\n', (2339, 2375), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((2406, 2470), 'sklearn.metrics.f1_score', 'f1_score', (['labels', 'preds'], {'sample_weight': 'masks', 'average': '"""weighted"""'}), "(labels, preds, sample_weight=masks, average='weighted')\n", (2414, 2470), False, 'from sklearn.metrics import f1_score, confusion_matrix, accuracy_score, classification_report\n'), ((5478, 5489), 'time.time', 'time.time', ([], {}), '()\n', (5487, 5489), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6591, 6614), 'numpy.array', 'np.array', (['valid_fscores'], {}), '(valid_fscores)\n', (6599, 6614), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6650, 6672), 'numpy.array', 'np.array', (['test_fscores'], {}), '(test_fscores)\n', (6658, 6672), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6725, 6749), 'numpy.array', 'np.array', (['test_accuracys'], {}), '(test_accuracys)\n', (6733, 6749), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6811, 6834), 'numpy.argmin', 'np.argmin', (['valid_losses'], {}), '(valid_losses)\n', (6820, 6834), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6872, 6895), 'numpy.argmin', 'np.argmin', (['valid_losses'], {}), '(valid_losses)\n', (6881, 6895), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6933, 6960), 'numpy.argmax', 'np.argmax', (['valid_fscores[0]'], {}), '(valid_fscores[0])\n', (6942, 6960), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6998, 7025), 'numpy.argmax', 'np.argmax', (['valid_fscores[0]'], {}), '(valid_fscores[0])\n', (7007, 7025), True, 'import numpy as np, argparse, time, pickle, random, os, datetime\n'), ((6472, 6483), 'time.time', 'time.time', ([], {}), '()\n', (6481, 6483), False, 'import numpy as np, argparse, time, pickle, random, os, datetime\n')] |
emkailu/PAT3DEM | bin/p3starcoordcheck.py | 74e7a0f30179e49ea5c7da1bea893e21a3ed601a | #!/usr/bin/env python
import os
import sys
import argparse
import pat3dem.star as p3s
import math
def main():
progname = os.path.basename(sys.argv[0])
usage = progname + """ [options] <coord star files>
Output the coord star files after deleting duplicate particles
"""
args_def = {'mindis':150}
parser = argparse.ArgumentParser()
parser.add_argument("star", nargs='*', help="specify coord star files to be processed")
parser.add_argument("-m", "--mindis", type=float, help="specify the minimum distance between particles in pixels, by default {}".format(args_def['mindis']))
args = parser.parse_args()
if len(sys.argv) == 1:
print "usage: " + usage
print "Please run '" + progname + " -h' for detailed options."
sys.exit(1)
# get default values
for i in args_def:
if args.__dict__[i] == None:
args.__dict__[i] = args_def[i]
# loop over all input files
for star in args.star:
star_dict = p3s.star_parse(star, 'data_')
header = star_dict['data_']+star_dict['loop_']
header_len = len(header)
basename = os.path.basename(os.path.splitext(star)[0])
with open(star) as s_read:
lines = s_read.readlines()[header_len:-1]
#
with open(basename+'_checked.star', 'w') as s_w:
s_w.write(''.join(header))
# use list of list to store x and y
xy = []
for line in lines:
good = 1
line = line.split()
# get coord
x, y = float(line[star_dict['_rlnCoordinateX']]), float(line[star_dict['_rlnCoordinateY']])
for i in xy:
dis = math.sqrt((x - i[0])**2 + (y - i[1])**2)
if dis < args.mindis:
print 'Distance between ({},{}) and {} is {}. Discard.'.format(x,y,i,dis)
good = 0
break
if good == 1:
s_w.write('{:>12} '.format(x) + '{:>12} \n'.format(y))
xy.append((x,y))
s_w.write('\n')
if __name__ == '__main__':
main()
| [] |
ryankirkland/voice-of-the-customer | src/review_scraper.py | 0214af45cc6aa76bfce64065f07c3f4781ee045e | from bs4 import BeautifulSoup
import pandas as pd
import requests
import time
import sys
def reviews_scraper(asin_list, filename):
'''
Takes a list of asins, retrieves html for reviews page, and parses out key data points
Parameters
----------
List of ASINs (list of strings)
Returns:
-------
review information (list), reviews_df (Pandas DataFrame)
'''
asin_list = [asin_list]
print(asin_list)
reviews = []
headers = {"User-Agent":"Mozilla/5.0 (Windows NT 10.0; Win64; x64; rv:66.0) Gecko/20100101 Firefox/66.0", "Accept-Encoding":"gzip, deflate", "Accept":"text/html,application/xhtml+xml,application/xml;q=0.9,*/*;q=0.8", "DNT":"1","Connection":"close", "Upgrade-Insecure-Requests":"1"}
for asin in asin_list:
print(f'Collecting reviews for {asin}')
passed_last_page = None
counter = 1
while (passed_last_page == None) and (counter <= 10):
print(len(reviews))
reviews_url = f'https://www.amazon.com/product-reviews/{asin}/ref=cm_cr_arp_d_viewopt_srt?ie=UTF8&reviewerType=all_reviews&sortBy=recent&pageNumber={counter}'
print(reviews_url)
rev = requests.get(reviews_url, headers=headers)
print(rev.status_code)
reviews_page_content = rev.content
review_soup = BeautifulSoup(reviews_page_content, features='lxml')
print(review_soup)
passed_last_page = review_soup.find('div', attrs={'class': 'a-section a-spacing-top-large a-text-center no-reviews-section'})
if passed_last_page == None:
for d in review_soup.findAll('div', attrs={'data-hook':'review'}):
# print(d)
try:
date = d.find('span', attrs={'data-hook':'review-date'})
date = date.text.split(' ')[-3:]
date = ' '.join(date)
except:
date = 'null'
try:
title = d.find('a', attrs={'data-hook': 'review-title'})
except:
title = 'null'
try:
product = d.find('a', attrs={'data-hook': 'format-strip'})
product = product.text
except:
product = 'null'
try:
review_asin = product['href'].split('/')[3]
except:
review_asin = asin
try:
verified = d.find('span', attrs={'data-hook':'avp-badge'})
if verified == None:
verified = 'Not Verified'
else:
verified = verified.text
except:
verified = 'null'
try:
description = d.find('span', attrs={'data-hook': 'review-body'})
except:
description = 'null'
try:
reviewer_name = d.find('span', attrs={'class': 'a-profile-name'})
except:
reviewer_name = 'null'
try:
stars = d.find('span', attrs={'class': 'a-icon-alt'})
except:
stars = 'null'
reviews.append([review_asin, product, date, verified, title.text, description.text, reviewer_name.text, float(stars.text[0:3])])
else:
pass
counter += 1
time.sleep(15)
reviews_df = pd.DataFrame(reviews, columns=['asin','product','date', 'verified', 'title', 'desc', 'reviewer_name', 'rating'])
reviews_df.to_csv(f'data/reviews/{filename}')
print(f'{len(reviews)} reviews for {len(asin_list)} asins stored successfully in {filename}')
return reviews, reviews_df
if __name__ == '__main__':
reviews_scraper(*sys.argv[1:]) | [((3894, 4012), 'pandas.DataFrame', 'pd.DataFrame', (['reviews'], {'columns': "['asin', 'product', 'date', 'verified', 'title', 'desc', 'reviewer_name',\n 'rating']"}), "(reviews, columns=['asin', 'product', 'date', 'verified',\n 'title', 'desc', 'reviewer_name', 'rating'])\n", (3906, 4012), True, 'import pandas as pd\n'), ((1188, 1230), 'requests.get', 'requests.get', (['reviews_url'], {'headers': 'headers'}), '(reviews_url, headers=headers)\n', (1200, 1230), False, 'import requests\n'), ((1340, 1392), 'bs4.BeautifulSoup', 'BeautifulSoup', (['reviews_page_content'], {'features': '"""lxml"""'}), "(reviews_page_content, features='lxml')\n", (1353, 1392), False, 'from bs4 import BeautifulSoup\n'), ((3853, 3867), 'time.sleep', 'time.sleep', (['(15)'], {}), '(15)\n', (3863, 3867), False, 'import time\n')] |
cglumberjack/lumber_metadata | lumberdata/metadata.py | aebca5dbecb8d7684b1b169bf2961e4ab0daca2b | # noinspection PyUnresolvedReferences
import os
import re
# TODO I'm going to need to make a dictionary for my big list of stuff i care about and what's needed for
# every file type....
RAF = ['EXIF:LensModel', 'MakerNotes:RawImageHeight', 'MakerNotes:RawImageWidth', 'EXIF:CreateDate', 'EXIF:ModifyDate',
'EXIF:SerialNumber', 'Composite:Aperture', 'EXIF:FocalLength', 'EXIF:Make', 'EXIF:Model', 'EXIF:LensMake']
MOV = ['EXIF:LensModel', 'MakerNotes:RawImageHeight', 'MakerNotes:RawImageWidth', 'EXIF:CreateDate', 'EXIF:ModifyDate',
'EXIF:SerialNumber', 'Composite:Aperture', 'EXIF:FocalLength', 'EXIF:Make', 'EXIF:Model', 'EXIF:LensMake',
'QuickTime:VideoFrameRate', 'QuickTime:Duration']
R3D = ['ClipName', 'EdgeTC', 'EndEdgeTC', 'TotalFrames', 'FrameHeight', 'FrameWidth', 'Aperture', 'ISO', 'Date',
'AudioSlate', 'VideoSlate', 'Camera', 'CameraModel', 'CameraPIN', 'MediaSerialNumber', 'LensSerialNumber',
'FPS', 'AspectRatio', 'Kelvin', 'LensName', 'LensBrand', 'FocalLength', 'Shutter(deg)', 'SensorID', 'SensorName',
'Take']
def check_exiftool():
"""
checks if exiftool is installed.
:return:
"""
pass
def check_redline():
"""
checks if redline is installed
:return:
"""
pass
def check_ffprobe():
"""
checks if ffprobe is installed
:return:
"""
pass
def get(filein, tool='exiftool', print_output=False):
"""
Due to issues with the exiftool module this is provided as a way to parse output directly
from exiftool through the system commands and cglexecute. For the moment it's only designed
to get the lumberdata for a single file.
:param filein:
:return: dictionary containing lumberdata from exiftool
"""
ext = os.path.splitext(filein)[-1]
d = {}
if tool == 'exiftool':
command = r'exiftool %s' % filein
output = cgl_execute(command=command, verbose=False, print_output=print_output)
for each in output['printout']:
key, value = re.split("\s+:\s+", each)
d[key] = value
return d
elif tool == 'ffprobe':
command = r'%s %s' % ('ffprobe', filein)
output = cgl_execute(command=command)
for each in output['printout']:
try:
values = re.split(":\s+", each)
key = values[0]
values.pop(0)
if 'Stream' in key:
split_v = values[1].split(',')
d['Image Size'] = split_v[2].split()[0]
d['Source Image Width'], d['Source Image Height'] = d['Image Size'].split('x')
d['Video Frame Rate'] = split_v[4].split(' fps')[0].replace(' ', '')
if 'Duration' in key:
d['Track Duration'] = '%s s' % values[0].split(',')[0]
value = ' '.join(values)
d[key] = value
except ValueError:
print('skipping %s' % each)
return d
def get_red_data(filein):
"""
method for pulling lumberdata from r3d files. REDLINE is a command line interface from RED that is required
for this
https://www.red.com/downloads/options?itemInternalId=16144
:param filein:
:return:
"""
file_, ext_ = os.path.splitext(filein)
if ext_.upper() == '.R3D':
command = r'REDLINE --i %s --printMeta 1' % filein
d = {}
for line in os.popen(command).readlines():
line = line.strip('\n')
line = line.replace('\t', '')
line = line.replace(' ', '')
try:
key_, value = line.split(':', 1)
if key_ != 'None':
d[key_] = value
except ValueError:
pass
return d
| [((3281, 3305), 'os.path.splitext', 'os.path.splitext', (['filein'], {}), '(filein)\n', (3297, 3305), False, 'import os\n'), ((1764, 1788), 'os.path.splitext', 'os.path.splitext', (['filein'], {}), '(filein)\n', (1780, 1788), False, 'import os\n'), ((2026, 2053), 're.split', 're.split', (['"""\\\\s+:\\\\s+"""', 'each'], {}), "('\\\\s+:\\\\s+', each)\n", (2034, 2053), False, 'import re\n'), ((3431, 3448), 'os.popen', 'os.popen', (['command'], {}), '(command)\n', (3439, 3448), False, 'import os\n'), ((2301, 2324), 're.split', 're.split', (['""":\\\\s+"""', 'each'], {}), "(':\\\\s+', each)\n", (2309, 2324), False, 'import re\n')] |
robfiras/RLBench | rlbench/task_environment.py | 97ab9526b6efb718f2b5aae40897ccd75aeff11e | import logging
from typing import List, Callable
import numpy as np
from pyquaternion import Quaternion
from pyrep import PyRep
from pyrep.errors import IKError
from pyrep.objects import Dummy, Object
from rlbench import utils
from rlbench.action_modes import ArmActionMode, ActionMode
from rlbench.backend.exceptions import BoundaryError, WaypointError
from rlbench.backend.observation import Observation
from rlbench.backend.robot import Robot
from rlbench.backend.scene import Scene
from rlbench.backend.task import Task
from rlbench.demo import Demo
from rlbench.observation_config import ObservationConfig
_TORQUE_MAX_VEL = 9999
_DT = 0.05
_MAX_RESET_ATTEMPTS = 40
_MAX_DEMO_ATTEMPTS = 10
class InvalidActionError(Exception):
pass
class TaskEnvironmentError(Exception):
pass
class TaskEnvironment(object):
def __init__(self, pyrep: PyRep, robot: Robot, scene: Scene, task: Task,
action_mode: ActionMode, dataset_root: str,
obs_config: ObservationConfig,
static_positions: bool = False,
attach_grasped_objects: bool = True):
self._pyrep = pyrep
self._robot = robot
self._scene = scene
self._task = task
self._variation_number = 0
self._action_mode = action_mode
self._dataset_root = dataset_root
self._obs_config = obs_config
self._static_positions = static_positions
self._attach_grasped_objects = attach_grasped_objects
self._reset_called = False
self._prev_ee_velocity = None
self._enable_path_observations = False
self._scene.load(self._task)
self._pyrep.start()
self._target_workspace_check = Dummy.create()
self._last_e = None
def get_name(self) -> str:
return self._task.get_name()
def sample_variation(self) -> int:
self._variation_number = np.random.randint(
0, self._task.variation_count())
return self._variation_number
def set_variation(self, v: int) -> None:
if v >= self.variation_count():
raise TaskEnvironmentError(
'Requested variation %d, but there are only %d variations.' % (
v, self.variation_count()))
self._variation_number = v
def variation_count(self) -> int:
return self._task.variation_count()
def reset(self) -> (List[str], Observation):
self._scene.reset()
try:
desc = self._scene.init_episode(
self._variation_number, max_attempts=_MAX_RESET_ATTEMPTS,
randomly_place=not self._static_positions)
except (BoundaryError, WaypointError) as e:
raise TaskEnvironmentError(
'Could not place the task %s in the scene. This should not '
'happen, please raise an issues on this task.'
% self._task.get_name()) from e
self._reset_called = True
# redundancy resolution
self._last_e = None
# Returns a list of descriptions and the first observation
return desc, self._scene.get_observation()
def get_observation(self) -> Observation:
return self._scene.get_observation()
def get_joint_upper_velocity_limits(self):
return self._robot.arm.get_joint_upper_velocity_limits()
def get_all_graspable_objects(self):
return self._task.get_graspable_objects()
def get_robot_visuals(self):
return self._robot.arm.get_visuals()
def get_all_graspable_object_positions(self, relative_to_cameras=False):
""" returns the positions of all graspable object relative to all enabled cameras """
objects = self._task.get_graspable_objects()
positions = []
for ob in objects:
if relative_to_camera:
positions.append(self._scene.get_object_position_relative_to_cameras(ob))
else:
positions.append({"left_shoulder_camera": ob.get_position(),
"right_shoulder_camera": ob.get_position(),
"front_camera": ob.get_position(),
"wrist_camera": ob.get_position()})
return positions
def get_all_graspable_object_poses(self, relative_to_cameras=False):
""" returns the pose of all graspable object relative to all enabled cameras """
objects = self._task.get_graspable_objects()
poses = []
for ob in objects:
if relative_to_cameras:
poses.append(self._scene.get_object_pose_relative_to_cameras(ob))
else:
poses.append({"left_shoulder_camera": ob.get_pose(),
"right_shoulder_camera": ob.get_pose(),
"front_camera": ob.get_pose(),
"wrist_camera": ob.get_pose()})
return poses
def _assert_action_space(self, action, expected_shape):
if np.shape(action) != expected_shape:
raise RuntimeError(
'Expected the action shape to be: %s, but was shape: %s' % (
str(expected_shape), str(np.shape(action))))
def _assert_unit_quaternion(self, quat):
if not np.isclose(np.linalg.norm(quat), 1.0):
raise RuntimeError('Action contained non unit quaternion!')
def _torque_action(self, action):
self._robot.arm.set_joint_target_velocities(
[(_TORQUE_MAX_VEL if t < 0 else -_TORQUE_MAX_VEL)
for t in action])
self._robot.arm.set_joint_forces(np.abs(action))
def _ee_action(self, action, relative_to=None):
self._assert_unit_quaternion(action[3:])
try:
joint_positions = self._robot.arm.solve_ik(
action[:3], quaternion=action[3:], relative_to=relative_to)
self._robot.arm.set_joint_target_positions(joint_positions)
except IKError as e:
raise InvalidActionError('Could not find a path.') from e
done = False
prev_values = None
# Move until reached target joint positions or until we stop moving
# (e.g. when we collide wth something)
while not done:
self._scene.step()
cur_positions = self._robot.arm.get_joint_positions()
reached = np.allclose(cur_positions, joint_positions, atol=0.01)
not_moving = False
if prev_values is not None:
not_moving = np.allclose(
cur_positions, prev_values, atol=0.001)
prev_values = cur_positions
done = reached or not_moving
def _path_action(self, action, relative_to=None):
self._assert_unit_quaternion(action[3:])
try:
# Check if the target is in the workspace; if not, then quick reject
# Only checks position, not rotation
pos_to_check = action[:3]
if relative_to is not None:
self._target_workspace_check.set_position(
pos_to_check, relative_to)
pos_to_check = self._target_workspace_check.get_position()
valid = self._scene.check_target_in_workspace(pos_to_check)
if not valid:
raise InvalidActionError('Target is outside of workspace.')
path = self._robot.arm.get_path(
action[:3], quaternion=action[3:], ignore_collisions=True,
relative_to=relative_to)
done = False
observations = []
while not done:
done = path.step()
self._scene.step()
if self._enable_path_observations:
observations.append(self._scene.get_observation())
success, terminate = self._task.success()
# If the task succeeds while traversing path, then break early
if success:
break
observations.append(self._scene.get_observation())
return observations
except IKError as e:
raise InvalidActionError('Could not find a path.') from e
def step(self, action, camcorder=None) -> (Observation, int, bool):
# returns observation, reward, done, info
if not self._reset_called:
raise RuntimeError(
"Call 'reset' before calling 'step' on a task.")
# action should contain 1 extra value for gripper open close state
arm_action = np.array(action[:-1])
ee_action = action[-1]
if 0.0 > ee_action > 1.0:
raise ValueError('Gripper action expected to be within 0 and 1.')
# Discretize the gripper action
current_ee = (1.0 if self._robot.gripper.get_open_amount()[0] > 0.9 else 0.0)
if ee_action > 0.5:
ee_action = 1.0
elif ee_action < 0.5:
ee_action = 0.0
if current_ee != ee_action:
arm_action = np.array([0.0]*7)
if self._action_mode.arm == ArmActionMode.ABS_JOINT_VELOCITY:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
self._robot.arm.set_joint_target_velocities(arm_action)
self._scene.step()
# if needed save some images
if camcorder:
obs = self._scene.get_observation()
camcorder.save(obs, self.get_robot_visuals(), self.get_all_graspable_objects())
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_VELOCITY:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
cur = np.array(self._robot.arm.get_joint_velocities())
self._robot.arm.set_joint_target_velocities(cur + arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_JOINT_POSITION:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
self._robot.arm.set_joint_target_positions(arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_POSITION:
self._assert_action_space(arm_action,
(len(self._robot.arm.joints),))
cur = np.array(self._robot.arm.get_joint_positions())
self._robot.arm.set_joint_target_positions(cur + arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_JOINT_TORQUE:
self._assert_action_space(
arm_action, (len(self._robot.arm.joints),))
self._torque_action(arm_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.DELTA_JOINT_TORQUE:
cur = np.array(self._robot.arm.get_joint_forces())
new_action = cur + arm_action
self._torque_action(new_action)
self._scene.step()
elif self._action_mode.arm == ArmActionMode.ABS_EE_POSE_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
self._ee_action(list(arm_action))
elif self._action_mode.arm == ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
self._path_observations = []
self._path_observations = self._path_action(list(arm_action))
elif self._action_mode.arm == ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
a_x, a_y, a_z, a_qx, a_qy, a_qz, a_qw = arm_action
x, y, z, qx, qy, qz, qw = self._robot.arm.get_tip().get_pose()
new_rot = Quaternion(a_qw, a_qx, a_qy, a_qz) * Quaternion(qw, qx,
qy, qz)
qw, qx, qy, qz = list(new_rot)
new_pose = [a_x + x, a_y + y, a_z + z] + [qx, qy, qz, qw]
self._path_observations = []
self._path_observations = self._path_action(list(new_pose))
elif self._action_mode.arm == ArmActionMode.DELTA_EE_POSE_WORLD_FRAME:
self._assert_action_space(arm_action, (7,))
a_x, a_y, a_z, a_qx, a_qy, a_qz, a_qw = arm_action
x, y, z, qx, qy, qz, qw = self._robot.arm.get_tip().get_pose()
new_rot = Quaternion(a_qw, a_qx, a_qy, a_qz) * Quaternion(
qw, qx, qy, qz)
qw, qx, qy, qz = list(new_rot)
new_pose = [a_x + x, a_y + y, a_z + z] + [qx, qy, qz, qw]
self._ee_action(list(new_pose))
elif self._action_mode.arm == ArmActionMode.EE_POSE_EE_FRAME:
self._assert_action_space(arm_action, (7,))
self._ee_action(
list(arm_action), relative_to=self._robot.arm.get_tip())
elif self._action_mode.arm == ArmActionMode.EE_POSE_PLAN_EE_FRAME:
self._assert_action_space(arm_action, (7,))
self._path_observations = []
self._path_observations = self._path_action(
list(arm_action), relative_to=self._robot.arm.get_tip())
else:
raise RuntimeError('Unrecognised action mode.')
if current_ee != ee_action:
done = False
while not done:
done = self._robot.gripper.actuate(ee_action, velocity=0.2)
self._pyrep.step()
self._task.step()
# if needed save some images
if camcorder:
obs = self._scene.get_observation()
camcorder.save(obs, self.get_robot_visuals(), self.get_all_graspable_objects())
if ee_action == 0.0 and self._attach_grasped_objects:
# If gripper close action, the check for grasp.
for g_obj in self._task.get_graspable_objects():
self._robot.gripper.grasp(g_obj)
else:
# If gripper open action, the check for ungrasp.
self._robot.gripper.release()
success, terminate = self._task.success()
task_reward = self._task.reward()
reward = float(success) if task_reward is None else task_reward
return self._scene.get_observation(), reward, terminate
def resolve_redundancy_joint_velocities(self, actions, setup):
"""
Resolves redundant self-motion into the nullspace without changing the gripper tip position
:param actions:
Current actions without redundancy resolution.
:param setup:
Setup for redundancy resolution defining the mode, weighting etc.
:return: Array of joint velocities, which move the robot's tip according to the provided actions yet push
the joint position towards a reference position.
"""
# get the Jacobian
J = self._robot.arm.get_jacobian()
J = np.transpose(J)
J = np.flip(J)
J = J[-3:]
# compute the pseudo inverse
J_plus = np.linalg.pinv(J)
# weighting
if type(setup["W"]) is list:
W = np.array(setup["W"])
elif setup["W"] is None:
# use default weighting later
W = None
else:
raise TypeError("Unsupported type %s for weighting vector." % type(setup["W"]))
# compute the error
if setup["mode"] == "reference_position":
dL, L = self.get_loss_reference_position(setup["ref_position"], W)
elif setup["mode"] == "collision_avoidance":
dL, L = self.get_loss_collision_avoidance(W, setup)
# compute the joint velocities
q_dot_redundancy = setup["alpha"] * np.matmul((np.identity(len(self._robot.arm.joints)) - np.matmul(J_plus, J)), dL)
# the provided jacobian seems to be inaccurate resulting in slight movement of the ee. This is why
# the velocites are set to 0 once the error stops changing much.
e = dL
if setup["cut-off_error"] is not None:
if self._last_e is not None:
e_dot = np.sum(np.abs(e - self._last_e))
if self._last_e is not None and e_dot < setup["cut-off_error"]:
q_dot_redundancy = np.array([0.0] * 7)
self._last_e = e
else:
self._last_e = e
return actions - q_dot_redundancy, L
def get_loss_reference_position(self, ref_pos, W):
"""
Calculates the summed squarred error between the current and the reference consfiguration as well as
its partial derivatives with respect to al q's for redundancy resoltuion.
-> L(q) = 1/2 sum_{i=1}^N w_i (q_i - \tilde{q}_i)^2
:param ref_pos:
Reference position.
:param W:
Weighting vector.
:return:
1: The partial derivatives of the summed squarred error between the current and the
reference configuration -> -> \nabla_q L(q)
2: Summed squarred error between the current and the reference configuration. -> L(q)
"""
if W is None:
# default weighting
W = np.array([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])
e = (self._robot.arm.get_joint_positions() - ref_pos)
return e * W, 0.5*np.dot(e,e*W)
def get_loss_collision_avoidance(self, W, setup):
"""
Calculates the loss as well as the respective partial derivatives for redundancy resoltuion with
collision avoidance. This only works with tasks that include one obstacles!
L(q) = \sum_{i=1}^N d(q)^{-1}
:param W:
Weighting vector.
:return:
1: The partial derivatives of the loss above. -> \nable_q L(q)
2: The loss shown above.-> L(q)
"""
# get the position of the object
p_obs = self._task.obstacle.get_position() + np.array([0, 0, 0.33]) - self._robot.arm.joints[0].get_position()
#p_obs = self._task.obstacle.get_position()
p_obs = np.append(p_obs, [1])
# get the transformation matrices, their derivatives, and the positions of the links
A_1, A_2, A_3, A_4, A_5, A_6, A_7 = self._robot.get_transformation_matrices()
dA_1, dA_2, dA_3, dA_4, dA_5, dA_6, dA_7 = self._robot.get_transformation_matrices_derivatives()
p_1, p_2, p_3, p_4, p_5, p_6, p_7 = self._robot.get_link_positions_in_ref_frames()
# we use reciprocal of the distance between each link and an obstacle as our Loss
# the chain rule delivers: d/dq L = (p_i^0 (q_1,..., q_i) - p_obs)^T * d/dq (p_i^0 (q_1,..., q_i) - p_obs)
# where p_i^0 = (\prod_{j=1}^i A_j^{j-1}(q_j)) * p_i
# as the left side of d/dq L is used often, let's calculate it in advance
d_1_T = np.transpose(A_1.dot(p_1) - p_obs)
d_2_T = np.transpose(A_1.dot(A_2).dot(p_2) - p_obs)
d_3_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(p_3) - p_obs)
d_4_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(p_4) - p_obs)
d_5_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5) - p_obs)
d_6_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6) - p_obs)
d_7_T = np.transpose(A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7) - p_obs)
# now we can calculate the derivatives in each dimension
dq_1 = -np.matmul(d_1_T, dA_1.dot(p_1)) + \
-np.matmul(d_2_T, dA_1.dot(A_2).dot(p_2)) + \
-np.matmul(d_3_T, dA_1.dot(A_2).dot(A_3).dot(p_3)) + \
-np.matmul(d_4_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, dA_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_2 = -np.matmul(d_2_T, A_1.dot(dA_2).dot(p_2)) + \
-np.matmul(d_3_T, A_1.dot(dA_2).dot(A_3).dot(p_3)) + \
-np.matmul(d_4_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(dA_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_3 = -np.matmul(d_3_T, A_1.dot(A_2).dot(dA_3).dot(p_3)) + \
-np.matmul(d_4_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(dA_3).dot(A_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_4 = -np.matmul(d_4_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(p_4)) + \
-np.matmul(d_5_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(dA_4).dot(A_5).dot(A_6).dot(A_7).dot(p_7))
dq_5 = -np.matmul(d_5_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(p_5)) + \
-np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(A_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(dA_5).dot(A_6).dot(A_7).dot(p_7))
dq_6 = -np.matmul(d_6_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(dA_6).dot(p_6)) + \
-np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(dA_6).dot(A_7).dot(p_7))
dq_7 = -np.matmul(d_7_T, A_1.dot(A_2).dot(A_3).dot(A_4).dot(A_5).dot(A_6).dot(dA_7).dot(p_7))
if W is None:
# default weighting vector -> based on the reciprocal of the distance. The greater the distance the smaller
# the weight. That is, it is concentrated on close objects.
W = np.array([1 / np.sum(np.square(d_1_T)),
1 / np.sum(np.square(d_2_T)) ,
1 / np.sum(np.square(d_3_T)) ,
1 / np.sum(np.square(d_4_T)) ,
1 / np.sum(np.square(d_5_T)) ,
1 / np.sum(np.square(d_6_T)) ,
1 / np.sum(np.square(d_7_T)) ]) * 0.1
# --- scaling to keep distance to joint limits ---
# get the minimum distance of each joint to its limit
joint_positions = np.array([j.get_joint_position() for j in self._robot.arm.joints])
lower_joint_limits = np.array(setup["lower_joint_pos_limit"])
upper_joint_limits = np.array(setup["upper_joint_pos_limit"])
min_j_distances = [np.minimum(u-j, j-l) for l,u,j in zip(lower_joint_limits, upper_joint_limits,
joint_positions)]
# start scaling down error when joint limit is 15° away.
# Scaling is done linearly from 0 to 1 for 0° <= d <= 15°
rad_thres = 15*(np.pi/180)
W *= np.array([ np.minimum((1/rad_thres)*d, 1.0) for d in min_j_distances])
# concatenate the derivaties to vector and apply weightig
dL = np.array([dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7])*W
# calculate the loss
L = np.sqrt(np.dot(d_1_T, d_1_T))*W[0] \
+ np.sqrt(np.dot(d_2_T, d_2_T))*W[1] \
+ np.sqrt(np.dot(d_3_T, d_3_T))*W[2] \
+ np.sqrt(np.dot(d_4_T, d_4_T))*W[3] \
+ np.sqrt(np.dot(d_5_T, d_5_T))*W[4] \
+ np.sqrt(np.dot(d_6_T, d_6_T))*W[5] \
+ np.sqrt(np.dot(d_7_T, d_7_T))*W[6]
return dL, L
def enable_path_observations(self, value: bool) -> None:
if (self._action_mode.arm != ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.EE_POSE_PLAN_EE_FRAME):
raise RuntimeError('Only available in DELTA_EE_POSE_PLAN or '
'ABS_EE_POSE_PLAN action mode.')
self._enable_path_observations = value
def get_path_observations(self):
if (self._action_mode.arm != ArmActionMode.DELTA_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.ABS_EE_POSE_PLAN_WORLD_FRAME and
self._action_mode.arm != ArmActionMode.EE_POSE_PLAN_EE_FRAME):
raise RuntimeError('Only available in DELTA_EE_POSE_PLAN or '
'ABS_EE_POSE_PLAN action mode.')
return self._path_observations
def get_demos(self, amount: int, live_demos: bool = False,
image_paths: bool = False,
callable_each_step: Callable[[Observation], None] = None,
max_attempts: int = _MAX_DEMO_ATTEMPTS,
) -> List[Demo]:
"""Negative means all demos"""
if not live_demos and (self._dataset_root is None
or len(self._dataset_root) == 0):
raise RuntimeError(
"Can't ask for a stored demo when no dataset root provided.")
if not live_demos:
if self._dataset_root is None or len(self._dataset_root) == 0:
raise RuntimeError(
"Can't ask for stored demo when no dataset root provided.")
demos = utils.get_stored_demos(
amount, image_paths, self._dataset_root, self._variation_number,
self._task.get_name(), self._obs_config)
else:
ctr_loop = self._robot.arm.joints[0].is_control_loop_enabled()
self._robot.arm.set_control_loop_enabled(True)
demos = self._get_live_demos(
amount, callable_each_step, max_attempts)
self._robot.arm.set_control_loop_enabled(ctr_loop)
return demos
def _get_live_demos(self, amount: int,
callable_each_step: Callable[
[Observation], None] = None,
max_attempts: int = _MAX_DEMO_ATTEMPTS) -> List[Demo]:
demos = []
for i in range(amount):
attempts = max_attempts
while attempts > 0:
random_seed = np.random.get_state()
self.reset()
logging.info('Collecting demo %d' % i)
try:
demo = self._scene.get_demo(
callable_each_step=callable_each_step)
demo.random_seed = random_seed
demos.append(demo)
break
except Exception as e:
attempts -= 1
logging.info('Bad demo. ' + str(e))
if attempts <= 0:
raise RuntimeError(
'Could not collect demos. Maybe a problem with the task?')
return demos
def reset_to_demo(self, demo: Demo) -> (List[str], Observation):
demo.restore_state()
return self.reset()
| [((1721, 1735), 'pyrep.objects.Dummy.create', 'Dummy.create', ([], {}), '()\n', (1733, 1735), False, 'from pyrep.objects import Dummy, Object\n'), ((8536, 8557), 'numpy.array', 'np.array', (['action[:-1]'], {}), '(action[:-1])\n', (8544, 8557), True, 'import numpy as np\n'), ((14997, 15012), 'numpy.transpose', 'np.transpose', (['J'], {}), '(J)\n', (15009, 15012), True, 'import numpy as np\n'), ((15025, 15035), 'numpy.flip', 'np.flip', (['J'], {}), '(J)\n', (15032, 15035), True, 'import numpy as np\n'), ((15110, 15127), 'numpy.linalg.pinv', 'np.linalg.pinv', (['J'], {}), '(J)\n', (15124, 15127), True, 'import numpy as np\n'), ((18089, 18110), 'numpy.append', 'np.append', (['p_obs', '[1]'], {}), '(p_obs, [1])\n', (18098, 18110), True, 'import numpy as np\n'), ((22729, 22769), 'numpy.array', 'np.array', (["setup['lower_joint_pos_limit']"], {}), "(setup['lower_joint_pos_limit'])\n", (22737, 22769), True, 'import numpy as np\n'), ((22799, 22839), 'numpy.array', 'np.array', (["setup['upper_joint_pos_limit']"], {}), "(setup['upper_joint_pos_limit'])\n", (22807, 22839), True, 'import numpy as np\n'), ((5012, 5028), 'numpy.shape', 'np.shape', (['action'], {}), '(action)\n', (5020, 5028), True, 'import numpy as np\n'), ((5620, 5634), 'numpy.abs', 'np.abs', (['action'], {}), '(action)\n', (5626, 5634), True, 'import numpy as np\n'), ((6368, 6422), 'numpy.allclose', 'np.allclose', (['cur_positions', 'joint_positions'], {'atol': '(0.01)'}), '(cur_positions, joint_positions, atol=0.01)\n', (6379, 6422), True, 'import numpy as np\n'), ((9006, 9025), 'numpy.array', 'np.array', (['([0.0] * 7)'], {}), '([0.0] * 7)\n', (9014, 9025), True, 'import numpy as np\n'), ((15202, 15222), 'numpy.array', 'np.array', (["setup['W']"], {}), "(setup['W'])\n", (15210, 15222), True, 'import numpy as np\n'), ((17229, 17274), 'numpy.array', 'np.array', (['[1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0]'], {}), '([1.0, 1.0, 1.0, 1.0, 1.0, 1.0, 1.0])\n', (17237, 17274), True, 'import numpy as np\n'), ((22867, 22891), 'numpy.minimum', 'np.minimum', (['(u - j)', '(j - l)'], {}), '(u - j, j - l)\n', (22877, 22891), True, 'import numpy as np\n'), ((23356, 23408), 'numpy.array', 'np.array', (['[dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7]'], {}), '([dq_1, dq_2, dq_3, dq_4, dq_5, dq_6, dq_7])\n', (23364, 23408), True, 'import numpy as np\n'), ((5294, 5314), 'numpy.linalg.norm', 'np.linalg.norm', (['quat'], {}), '(quat)\n', (5308, 5314), True, 'import numpy as np\n'), ((6523, 6574), 'numpy.allclose', 'np.allclose', (['cur_positions', 'prev_values'], {'atol': '(0.001)'}), '(cur_positions, prev_values, atol=0.001)\n', (6534, 6574), True, 'import numpy as np\n'), ((16317, 16336), 'numpy.array', 'np.array', (['([0.0] * 7)'], {}), '([0.0] * 7)\n', (16325, 16336), True, 'import numpy as np\n'), ((17363, 17379), 'numpy.dot', 'np.dot', (['e', '(e * W)'], {}), '(e, e * W)\n', (17369, 17379), True, 'import numpy as np\n'), ((17954, 17976), 'numpy.array', 'np.array', (['[0, 0, 0.33]'], {}), '([0, 0, 0.33])\n', (17962, 17976), True, 'import numpy as np\n'), ((23216, 23250), 'numpy.minimum', 'np.minimum', (['(1 / rad_thres * d)', '(1.0)'], {}), '(1 / rad_thres * d, 1.0)\n', (23226, 23250), True, 'import numpy as np\n'), ((26437, 26458), 'numpy.random.get_state', 'np.random.get_state', ([], {}), '()\n', (26456, 26458), True, 'import numpy as np\n'), ((26504, 26542), 'logging.info', 'logging.info', (["('Collecting demo %d' % i)"], {}), "('Collecting demo %d' % i)\n", (26516, 26542), False, 'import logging\n'), ((15838, 15858), 'numpy.matmul', 'np.matmul', (['J_plus', 'J'], {}), '(J_plus, J)\n', (15847, 15858), True, 'import numpy as np\n'), ((16180, 16204), 'numpy.abs', 'np.abs', (['(e - self._last_e)'], {}), '(e - self._last_e)\n', (16186, 16204), True, 'import numpy as np\n'), ((23761, 23781), 'numpy.dot', 'np.dot', (['d_7_T', 'd_7_T'], {}), '(d_7_T, d_7_T)\n', (23767, 23781), True, 'import numpy as np\n'), ((23711, 23731), 'numpy.dot', 'np.dot', (['d_6_T', 'd_6_T'], {}), '(d_6_T, d_6_T)\n', (23717, 23731), True, 'import numpy as np\n'), ((5202, 5218), 'numpy.shape', 'np.shape', (['action'], {}), '(action)\n', (5210, 5218), True, 'import numpy as np\n'), ((23661, 23681), 'numpy.dot', 'np.dot', (['d_5_T', 'd_5_T'], {}), '(d_5_T, d_5_T)\n', (23667, 23681), True, 'import numpy as np\n'), ((22117, 22133), 'numpy.square', 'np.square', (['d_1_T'], {}), '(d_1_T)\n', (22126, 22133), True, 'import numpy as np\n'), ((22173, 22189), 'numpy.square', 'np.square', (['d_2_T'], {}), '(d_2_T)\n', (22182, 22189), True, 'import numpy as np\n'), ((22230, 22246), 'numpy.square', 'np.square', (['d_3_T'], {}), '(d_3_T)\n', (22239, 22246), True, 'import numpy as np\n'), ((22287, 22303), 'numpy.square', 'np.square', (['d_4_T'], {}), '(d_4_T)\n', (22296, 22303), True, 'import numpy as np\n'), ((22344, 22360), 'numpy.square', 'np.square', (['d_5_T'], {}), '(d_5_T)\n', (22353, 22360), True, 'import numpy as np\n'), ((22401, 22417), 'numpy.square', 'np.square', (['d_6_T'], {}), '(d_6_T)\n', (22410, 22417), True, 'import numpy as np\n'), ((22458, 22474), 'numpy.square', 'np.square', (['d_7_T'], {}), '(d_7_T)\n', (22467, 22474), True, 'import numpy as np\n'), ((23611, 23631), 'numpy.dot', 'np.dot', (['d_4_T', 'd_4_T'], {}), '(d_4_T, d_4_T)\n', (23617, 23631), True, 'import numpy as np\n'), ((23561, 23581), 'numpy.dot', 'np.dot', (['d_3_T', 'd_3_T'], {}), '(d_3_T, d_3_T)\n', (23567, 23581), True, 'import numpy as np\n'), ((23461, 23481), 'numpy.dot', 'np.dot', (['d_1_T', 'd_1_T'], {}), '(d_1_T, d_1_T)\n', (23467, 23481), True, 'import numpy as np\n'), ((23511, 23531), 'numpy.dot', 'np.dot', (['d_2_T', 'd_2_T'], {}), '(d_2_T, d_2_T)\n', (23517, 23531), True, 'import numpy as np\n'), ((11795, 11829), 'pyquaternion.Quaternion', 'Quaternion', (['a_qw', 'a_qx', 'a_qy', 'a_qz'], {}), '(a_qw, a_qx, a_qy, a_qz)\n', (11805, 11829), False, 'from pyquaternion import Quaternion\n'), ((11832, 11858), 'pyquaternion.Quaternion', 'Quaternion', (['qw', 'qx', 'qy', 'qz'], {}), '(qw, qx, qy, qz)\n', (11842, 11858), False, 'from pyquaternion import Quaternion\n'), ((12452, 12486), 'pyquaternion.Quaternion', 'Quaternion', (['a_qw', 'a_qx', 'a_qy', 'a_qz'], {}), '(a_qw, a_qx, a_qy, a_qz)\n', (12462, 12486), False, 'from pyquaternion import Quaternion\n'), ((12489, 12515), 'pyquaternion.Quaternion', 'Quaternion', (['qw', 'qx', 'qy', 'qz'], {}), '(qw, qx, qy, qz)\n', (12499, 12515), False, 'from pyquaternion import Quaternion\n')] |
Yoann-Vie/esgi-hearthstone | tests/generic_relations/test_forms.py | 115d03426c7e8e80d89883b78ac72114c29bed12 | from django import forms
from django.contrib.contenttypes.forms import generic_inlineformset_factory
from django.contrib.contenttypes.models import ContentType
from django.db import models
from django.test import TestCase
from django.test.utils import isolate_apps
from .models import (
Animal, ForProxyModelModel, Gecko, Mineral, ProxyRelatedModel, TaggedItem,
)
class CustomWidget(forms.TextInput):
pass
class TaggedItemForm(forms.ModelForm):
class Meta:
model = TaggedItem
fields = '__all__'
widgets = {'tag': CustomWidget}
class GenericInlineFormsetTests(TestCase):
def test_output(self):
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet()
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">
Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id"
id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>"""
)
formset = GenericFormSet(instance=Animal())
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">
Tag:</label> <input id="id_generic_relations-taggeditem-content_type-object_id-0-tag"
type="text" name="generic_relations-taggeditem-content_type-object_id-0-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE"><input type="hidden"
name="generic_relations-taggeditem-content_type-object_id-0-id"
id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>"""
)
platypus = Animal.objects.create(
common_name='Platypus', latin_name='Ornithorhynchus anatinus',
)
platypus.tags.create(tag='shiny')
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
formset = GenericFormSet(instance=platypus)
tagged_item_id = TaggedItem.objects.get(tag='shiny', object_id=platypus.id).id
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-tag">Tag:</label>
<input id="id_generic_relations-taggeditem-content_type-object_id-0-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-0-tag" value="shiny" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-0-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-0-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id"
value="%s" id="id_generic_relations-taggeditem-content_type-object_id-0-id"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-1-tag">Tag:</label>
<input id="id_generic_relations-taggeditem-content_type-object_id-1-tag" type="text"
name="generic_relations-taggeditem-content_type-object_id-1-tag" maxlength="50"></p>
<p><label for="id_generic_relations-taggeditem-content_type-object_id-1-DELETE">Delete:</label>
<input type="checkbox" name="generic_relations-taggeditem-content_type-object_id-1-DELETE"
id="id_generic_relations-taggeditem-content_type-object_id-1-DELETE">
<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-1-id"
id="id_generic_relations-taggeditem-content_type-object_id-1-id"></p>""" % tagged_item_id
)
lion = Animal.objects.create(common_name='Lion', latin_name='Panthera leo')
formset = GenericFormSet(instance=lion, prefix='x')
self.assertHTMLEqual(
''.join(form.as_p() for form in formset.forms),
"""<p><label for="id_x-0-tag">Tag:</label>
<input id="id_x-0-tag" type="text" name="x-0-tag" maxlength="50"></p>
<p><label for="id_x-0-DELETE">Delete:</label> <input type="checkbox" name="x-0-DELETE" id="id_x-0-DELETE">
<input type="hidden" name="x-0-id" id="id_x-0-id"></p>"""
)
def test_options(self):
TaggedItemFormSet = generic_inlineformset_factory(
TaggedItem,
can_delete=False,
exclude=['tag'],
extra=3,
)
platypus = Animal.objects.create(common_name='Platypus', latin_name='Ornithorhynchus anatinus')
harmless = platypus.tags.create(tag='harmless')
mammal = platypus.tags.create(tag='mammal')
# Works without a queryset.
formset = TaggedItemFormSet(instance=platypus)
self.assertEqual(len(formset.forms), 5)
self.assertHTMLEqual(
formset.forms[0].as_p(),
'<input type="hidden" name="generic_relations-taggeditem-content_type-object_id-0-id" value="%s" '
'id="id_generic_relations-taggeditem-content_type-object_id-0-id">' % harmless.pk
)
self.assertEqual(formset.forms[0].instance, harmless)
self.assertEqual(formset.forms[1].instance, mammal)
self.assertIsNone(formset.forms[2].instance.pk)
# A queryset can be used to alter display ordering.
formset = TaggedItemFormSet(instance=platypus, queryset=TaggedItem.objects.order_by('-tag'))
self.assertEqual(len(formset.forms), 5)
self.assertEqual(formset.forms[0].instance, mammal)
self.assertEqual(formset.forms[1].instance, harmless)
self.assertIsNone(formset.forms[2].instance.pk)
# A queryset that omits items.
formset = TaggedItemFormSet(instance=platypus, queryset=TaggedItem.objects.filter(tag__startswith='harm'))
self.assertEqual(len(formset.forms), 4)
self.assertEqual(formset.forms[0].instance, harmless)
self.assertIsNone(formset.forms[1].instance.pk)
def test_get_queryset_ordering(self):
"""
BaseGenericInlineFormSet.get_queryset() adds default ordering, if
needed.
"""
inline_formset = generic_inlineformset_factory(TaggedItem, exclude=('tag',))
formset = inline_formset(instance=Gecko.objects.create())
self.assertIs(formset.get_queryset().ordered, True)
def test_initial(self):
quartz = Mineral.objects.create(name='Quartz', hardness=7)
GenericFormSet = generic_inlineformset_factory(TaggedItem, extra=1)
ctype = ContentType.objects.get_for_model(quartz)
initial_data = [{
'tag': 'lizard',
'content_type': ctype.pk,
'object_id': quartz.pk,
}]
formset = GenericFormSet(initial=initial_data)
self.assertEqual(formset.forms[0].initial, initial_data[0])
def test_meta_widgets(self):
"""TaggedItemForm has a widget defined in Meta."""
Formset = generic_inlineformset_factory(TaggedItem, TaggedItemForm)
form = Formset().forms[0]
self.assertIsInstance(form['tag'].field.widget, CustomWidget)
@isolate_apps('generic_relations')
def test_incorrect_content_type(self):
class BadModel(models.Model):
content_type = models.PositiveIntegerField()
msg = "fk_name 'generic_relations.BadModel.content_type' is not a ForeignKey to ContentType"
with self.assertRaisesMessage(Exception, msg):
generic_inlineformset_factory(BadModel, TaggedItemForm)
def test_save_new_uses_form_save(self):
class SaveTestForm(forms.ModelForm):
def save(self, *args, **kwargs):
self.instance.saved_by = 'custom method'
return super().save(*args, **kwargs)
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', form=SaveTestForm)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj = formset.save()[0]
self.assertEqual(new_obj.saved_by, 'custom method')
def test_save_new_for_proxy(self):
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', for_concrete_model=False)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertEqual(new_obj.obj, instance)
def test_save_new_for_concrete(self):
Formset = generic_inlineformset_factory(ForProxyModelModel, fields='__all__', for_concrete_model=True)
instance = ProxyRelatedModel.objects.create()
data = {
'form-TOTAL_FORMS': '1',
'form-INITIAL_FORMS': '0',
'form-MAX_NUM_FORMS': '',
'form-0-title': 'foo',
}
formset = Formset(data, instance=instance, prefix='form')
self.assertTrue(formset.is_valid())
new_obj, = formset.save()
self.assertNotIsInstance(new_obj.obj, ProxyRelatedModel)
def test_initial_count(self):
GenericFormSet = generic_inlineformset_factory(TaggedItem)
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '3',
'form-MAX_NUM_FORMS': '',
}
formset = GenericFormSet(data=data, prefix='form')
self.assertEqual(formset.initial_form_count(), 3)
formset = GenericFormSet(data=data, prefix='form', save_as_new=True)
self.assertEqual(formset.initial_form_count(), 0)
def test_save_as_new(self):
"""
The save_as_new parameter creates new items that are associated with
the object.
"""
lion = Animal.objects.create(common_name='Lion', latin_name='Panthera leo')
yellow = lion.tags.create(tag='yellow')
hairy = lion.tags.create(tag='hairy')
GenericFormSet = generic_inlineformset_factory(TaggedItem)
data = {
'form-TOTAL_FORMS': '3',
'form-INITIAL_FORMS': '2',
'form-MAX_NUM_FORMS': '',
'form-0-id': str(yellow.pk),
'form-0-tag': 'hunts',
'form-1-id': str(hairy.pk),
'form-1-tag': 'roars',
}
formset = GenericFormSet(data, instance=lion, prefix='form', save_as_new=True)
self.assertTrue(formset.is_valid())
tags = formset.save()
self.assertEqual([tag.tag for tag in tags], ['hunts', 'roars'])
hunts, roars = tags
self.assertSequenceEqual(lion.tags.order_by('tag'), [hairy, hunts, roars, yellow])
| [((7814, 7847), 'django.test.utils.isolate_apps', 'isolate_apps', (['"""generic_relations"""'], {}), "('generic_relations')\n", (7826, 7847), False, 'from django.test.utils import isolate_apps\n'), ((692, 742), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (721, 742), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((2634, 2684), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (2663, 2684), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((4952, 5041), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'can_delete': '(False)', 'exclude': "['tag']", 'extra': '(3)'}), "(TaggedItem, can_delete=False, exclude=['tag'],\n extra=3)\n", (4981, 5041), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((6834, 6893), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'exclude': "('tag',)"}), "(TaggedItem, exclude=('tag',))\n", (6863, 6893), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7147, 7197), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {'extra': '(1)'}), '(TaggedItem, extra=1)\n', (7176, 7197), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7215, 7256), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['quartz'], {}), '(quartz)\n', (7248, 7256), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((7642, 7699), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem', 'TaggedItemForm'], {}), '(TaggedItem, TaggedItemForm)\n', (7671, 7699), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((8490, 8581), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'form': 'SaveTestForm'}), "(ForProxyModelModel, fields='__all__', form=\n SaveTestForm)\n", (8519, 8581), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((9085, 9182), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'for_concrete_model': '(False)'}), "(ForProxyModelModel, fields='__all__',\n for_concrete_model=False)\n", (9114, 9182), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((9676, 9772), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['ForProxyModelModel'], {'fields': '"""__all__"""', 'for_concrete_model': '(True)'}), "(ForProxyModelModel, fields='__all__',\n for_concrete_model=True)\n", (9705, 9772), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((10282, 10323), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {}), '(TaggedItem)\n', (10311, 10323), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((11093, 11134), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['TaggedItem'], {}), '(TaggedItem)\n', (11122, 11134), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n'), ((7959, 7988), 'django.db.models.PositiveIntegerField', 'models.PositiveIntegerField', ([], {}), '()\n', (7986, 7988), False, 'from django.db import models\n'), ((8162, 8217), 'django.contrib.contenttypes.forms.generic_inlineformset_factory', 'generic_inlineformset_factory', (['BadModel', 'TaggedItemForm'], {}), '(BadModel, TaggedItemForm)\n', (8191, 8217), False, 'from django.contrib.contenttypes.forms import generic_inlineformset_factory\n')] |
tamnguyen135/sage | src/sage/rings/polynomial/pbori/fglm.py | 2c87dc16f26604033bb1b2d1dc6796d279c88b16 | from .PyPolyBoRi import (BooleSet, Polynomial, BoolePolynomialVector,
FGLMStrategy)
def _fglm(I, from_ring, to_ring):
r"""
Unchecked variant of fglm
"""
vec = BoolePolynomialVector(I)
return FGLMStrategy(from_ring, to_ring, vec).main()
def fglm(I, from_ring, to_ring):
r"""
Convert *reduced* Groebner Basis in from_ring to a GroebnerBasis in to_ring.
It acts independent of the global ring, which is restored at the end of the
computation.
TESTS::
sage: from sage.rings.polynomial.pbori import *
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import OrderCode
sage: dp_asc = OrderCode.dp_asc
sage: r=declare_ring(['x','y','z'],dict())
sage: old_ring = r
sage: new_ring = old_ring.clone(ordering=dp_asc)
sage: (x,y,z) = [old_ring.variable(i) for i in range(3)]
sage: ideal=[x+z, y+z]# lp Groebner basis
sage: from sage.rings.polynomial.pbori.fglm import fglm
sage: list(fglm(ideal, old_ring, new_ring))
[y + x, z + x]
"""
for poly in I:
if poly.ring().id() != from_ring.id():
raise ValueError("Ideal I must be from the first ring argument")
return _fglm(I, from_ring, to_ring)
def vars_real_divisors(monomial, monomial_set):
r"""
Returns all elements of of monomial_set, which result multiplied by a variable in monomial.
TESTS::
sage: from sage.rings.polynomial.pbori.pbori import *
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import OrderCode
sage: dp_asc = OrderCode.dp_asc
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import Ring
sage: r=Ring(1000)
sage: x = r.variable
sage: b=BooleSet([x(1)*x(2),x(2)])
sage: from sage.rings.polynomial.pbori.fglm import vars_real_divisors
sage: vars_real_divisors(x(1)*x(2)*x(3),b)
{{x(1),x(2)}}
"""
return BooleSet(Polynomial(monomial_set.divisors_of(monomial)). \
graded_part(monomial.deg() - 1))
def m_k_plus_one(completed_elements, variables):
r"""
Calculates $m_{k+1}$ from the FGLM algorithm as described in Wichmanns diploma thesis
It would be nice to be able to efficiently extract the smallest term of a polynomial.
TESTS::
sage: from sage.rings.polynomial.pbori.pbori import *
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import OrderCode
sage: dp_asc = OrderCode.dp_asc
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import Ring
sage: r=Ring(1000)
sage: x = r.variable
sage: from sage.rings.polynomial.pbori.PyPolyBoRi import Monomial
sage: s=BooleSet([x(1)*x(2),x(1),x(2),Monomial(r),x(3)])
sage: from sage.rings.polynomial.pbori.fglm import m_k_plus_one
sage: variables=BooleSet([x(1),x(2),x(3)])
sage: m_k_plus_one(s,variables)
x(2)*x(3)
sage: r2 = r.clone(ordering=dp_asc)
sage: m_k_plus_one(r2(s).set(),r2(variables).set())
x(1)*x(3)
"""
return sorted(completed_elements.cartesian_product(variables).diff(
completed_elements))[0]
| [] |
coursetable/ferry | ferry/embed/umap_reduce.py | f369b9588557c359af8589f2575a03493d6b08b6 | """
Uses UMAP (https://umap-learn.readthedocs.io/en/latest/index.html) to reduce course
embeddings to two dimensions for visualization.
"""
import pandas as pd
import umap
from sklearn.preprocessing import StandardScaler
from ferry import config
courses = pd.read_csv(
config.DATA_DIR / "course_embeddings/courses_deduplicated.csv",
index_col=0,
)
# mypy: ignore-errors
embeddings = pd.read_hdf(
config.DATA_DIR / "course_embeddings/fasttext_embeddings.h5",
key="embeddings",
)
embeddings = StandardScaler().fit_transform(embeddings)
reducer = umap.UMAP()
umap_embeddings = reducer.fit_transform(embeddings)
courses["umap1"] = umap_embeddings[:, 0]
courses["umap2"] = umap_embeddings[:, 1]
courses.to_csv(config.DATA_DIR / "course_embeddings/courses_deduplicated_umap.csv")
| [((258, 350), 'pandas.read_csv', 'pd.read_csv', (["(config.DATA_DIR / 'course_embeddings/courses_deduplicated.csv')"], {'index_col': '(0)'}), "(config.DATA_DIR / 'course_embeddings/courses_deduplicated.csv',\n index_col=0)\n", (269, 350), True, 'import pandas as pd\n'), ((394, 489), 'pandas.read_hdf', 'pd.read_hdf', (["(config.DATA_DIR / 'course_embeddings/fasttext_embeddings.h5')"], {'key': '"""embeddings"""'}), "(config.DATA_DIR / 'course_embeddings/fasttext_embeddings.h5',\n key='embeddings')\n", (405, 489), True, 'import pandas as pd\n'), ((565, 576), 'umap.UMAP', 'umap.UMAP', ([], {}), '()\n', (574, 576), False, 'import umap\n'), ((511, 527), 'sklearn.preprocessing.StandardScaler', 'StandardScaler', ([], {}), '()\n', (525, 527), False, 'from sklearn.preprocessing import StandardScaler\n')] |
zhumakova/ClassProject | flora_fauna.py | b869258706dae7c8e8ab723c61a45fd78e26494f | import inheritance
class Flora:
def __init__(self, name, lifespan, habitat, plant_type):
self.name = name
self.lifespan = lifespan
self.habitat = habitat
self.plant_type = plant_type
self.plant_size = 0
class Fauna:
def __init__(self, name):
self.name = name
class Predator(Fauna):
def __init__(self, name:str, predator_type:str, what_eats:str, lifespan:int):
super().__init__(name)
self.predator_type = predator_type
self.what_eats = what_eats
self.lifespan = lifespan
# def check_planet(self,planet:tsk4.Planet):
# if planet.fauna and not planet.humanity:
# print('YES')
# else:
# print('NO')
class Mammal(Fauna):
def __init__(self, name, mammal_type, lifespan):
super().__init__(name)
self.mammal_type = mammal_type
self.lifespan = lifespan
def check_planet(self,planet:inheritance.Planet):
if planet.flora and planet.fauna and not planet.humanity:
planet.add_fauna(self)
shark = Predator('baby shark','sea','all',20)
giraffe = Mammal('malwan','earth',20)
giraffe.check_planet(inheritance.friendly)
marti = Mammal('marti','earth',20)
marti.check_planet(inheritance.friendly)
print(inheritance.friendly.__dict__)
print(inheritance.Planet.__dict__)
| [] |
rdenham/jug | jug/subcommands/demo.py | 40925445a5f96f9eec237de37e46e6fabcce6526 | #!/usr/bin/python
# -*- coding: utf-8 -*-
# Copyright (C) 2017, Luis Pedro Coelho <[email protected]>
# vim: set ts=4 sts=4 sw=4 expandtab smartindent:
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN
# THE SOFTWARE.
from . import SubCommand
__all__ = ['DemoCommand']
class DemoCommand(SubCommand):
'''Create demo directory.
'''
name = "demo"
def run(self, *args, **kwargs):
import os
from os import path
print('''
Jug will create a directory called 'jug-demo/' with a file called 'primes.py'
inside.
You can test jug by switching to that directory and running the commands:
jug status primes.py
followed by
jug execute primes.py
Upon termination of the process, results will be in a file called 'output.txt'.
PARALLEL USAGE
You can speed up the process by running several 'jug execute' in parallel:
jug execute primes.py &
jug execute primes.py &
jug execute primes.py &
jug execute primes.py &
TROUBLE SHOOTING:
Should you run into issues, you can run the internal tests for jug with
jug test-jug
FURTHER READING
The online documentation contains further reading. You can read the next
tutorial here:
http://jug.readthedocs.io/en/latest/decrypt-example.html
''')
if path.exists('jug-demo'):
print("Jug-demo previously created")
return
os.mkdir('jug-demo')
output = open('jug-demo/primes.py', 'wt')
output.write(r'''
from time import sleep
from jug import TaskGenerator
@TaskGenerator
def is_prime(n):
sleep(1.)
for j in range(2, n - 1):
if (n % j) == 0:
return False
return True
@TaskGenerator
def count_primes(ps):
return sum(ps)
@TaskGenerator
def write_output(n):
output = open('output.txt', 'wt')
output.write("Found {0} primes <= 100.\n".format(n))
output.close()
primes100 = []
for n in range(2, 101):
primes100.append(is_prime(n))
n_primes = count_primes(primes100)
write_output(n_primes)
''')
output.close()
demo = DemoCommand()
| [((2271, 2294), 'os.path.exists', 'path.exists', (['"""jug-demo"""'], {}), "('jug-demo')\n", (2282, 2294), False, 'from os import path\n'), ((2372, 2392), 'os.mkdir', 'os.mkdir', (['"""jug-demo"""'], {}), "('jug-demo')\n", (2380, 2392), False, 'import os\n')] |
ID2797370/arxiv-search | search/controllers/simple/tests.py | 889402e8eef9a2faaa8e900978cd27ff2784ce33 | """Tests for simple search controller, :mod:`search.controllers.simple`."""
from http import HTTPStatus
from unittest import TestCase, mock
from werkzeug.datastructures import MultiDict
from werkzeug.exceptions import InternalServerError, NotFound, BadRequest
from search.domain import SimpleQuery
from search.controllers import simple
from search.controllers.simple.forms import SimpleSearchForm
from search.services.index import (
IndexConnectionError,
QueryError,
DocumentNotFound,
)
class TestRetrieveDocument(TestCase):
"""Tests for :func:`.simple.retrieve_document`."""
@mock.patch("search.controllers.simple.SearchSession")
def test_encounters_queryerror(self, mock_index):
"""There is a bug in the index or query."""
def _raiseQueryError(*args, **kwargs):
raise QueryError("What now")
mock_index.get_document.side_effect = _raiseQueryError
with self.assertRaises(InternalServerError):
try:
response_data, code, headers = simple.retrieve_document(1)
except QueryError as ex:
self.fail("QueryError should be handled (caught %s)" % ex)
self.assertEqual(
mock_index.get_document.call_count,
1,
"A search should be attempted",
)
@mock.patch("search.controllers.simple.SearchSession")
def test_index_raises_connection_exception(self, mock_index):
"""Index service raises a IndexConnectionError."""
mock_index.get_document.side_effect = IndexConnectionError
with self.assertRaises(InternalServerError):
response_data, code, headers = simple.retrieve_document("124.5678")
self.assertEqual(
mock_index.get_document.call_count,
1,
"A search should be attempted",
)
call_args, call_kwargs = mock_index.get_document.call_args
self.assertIsInstance(call_args[0], str, "arXiv ID is passed")
# self.assertEqual(code, status.HTTP_500_INTERNAL_SERVER_ERROR)
@mock.patch("search.controllers.simple.SearchSession")
def test_document_not_found(self, mock_index):
"""The document is not found."""
def _raiseDocumentNotFound(*args, **kwargs):
raise DocumentNotFound("What now")
mock_index.get_document.side_effect = _raiseDocumentNotFound
with self.assertRaises(NotFound):
try:
response_data, code, headers = simple.retrieve_document(1)
except DocumentNotFound as ex:
self.fail(
"DocumentNotFound should be handled (caught %s)" % ex
)
self.assertEqual(
mock_index.get_document.call_count,
1,
"A search should be attempted",
)
class TestSearchController(TestCase):
"""Tests for :func:`.simple.search`."""
@mock.patch(
"search.controllers.simple.url_for",
lambda *a, **k: f'https://arxiv.org/{k["paper_id"]}',
)
@mock.patch("search.controllers.simple.SearchSession")
def test_arxiv_id(self, mock_index):
"""Query parameter contains an arXiv ID."""
request_data = MultiDict({"query": "1702.00123"})
response_data, code, headers = simple.search(request_data)
self.assertEqual(
code,
HTTPStatus.MOVED_PERMANENTLY,
"Response should be a 301 redirect.",
)
self.assertIn("Location", headers, "Location header should be set")
self.assertEqual(
mock_index.search.call_count, 0, "No search should be attempted"
)
@mock.patch("search.controllers.simple.SearchSession")
def test_no_form_data(self, mock_index):
"""No form data has been submitted."""
request_data = MultiDict()
response_data, code, headers = simple.search(request_data)
self.assertEqual(code, HTTPStatus.OK, "Response should be OK.")
self.assertIn("form", response_data, "Response should include form.")
self.assertEqual(
mock_index.search.call_count, 0, "No search should be attempted"
)
@mock.patch("search.controllers.simple.SearchSession")
def test_single_field_term(self, mock_index):
"""Form data are present."""
mock_index.search.return_value = {"metadata": {}, "results": []}
request_data = MultiDict({"searchtype": "title", "query": "foo title"})
response_data, code, headers = simple.search(request_data)
self.assertEqual(
mock_index.search.call_count, 1, "A search should be attempted"
)
call_args, call_kwargs = mock_index.search.call_args
self.assertIsInstance(
call_args[0],
SimpleQuery,
"An SimpleQuery is passed to the search index",
)
self.assertEqual(code, HTTPStatus.OK, "Response should be OK.")
@mock.patch("search.controllers.simple.SearchSession")
def test_invalid_data(self, mock_index):
"""Form data are invalid."""
request_data = MultiDict({"searchtype": "title"})
response_data, code, headers = simple.search(request_data)
self.assertEqual(code, HTTPStatus.OK, "Response should be OK.")
self.assertIn("form", response_data, "Response should include form.")
self.assertEqual(
mock_index.search.call_count, 0, "No search should be attempted"
)
@mock.patch("search.controllers.simple.SearchSession")
def test_index_raises_connection_exception(self, mock_index):
"""Index service raises a IndexConnectionError."""
def _raiseIndexConnectionError(*args, **kwargs):
raise IndexConnectionError("What now")
mock_index.search.side_effect = _raiseIndexConnectionError
request_data = MultiDict({"searchtype": "title", "query": "foo title"})
with self.assertRaises(InternalServerError):
_, _, _ = simple.search(request_data)
self.assertEqual(
mock_index.search.call_count, 1, "A search should be attempted"
)
call_args, call_kwargs = mock_index.search.call_args
self.assertIsInstance(
call_args[0],
SimpleQuery,
"An SimpleQuery is passed to the search index",
)
@mock.patch("search.controllers.simple.SearchSession")
def test_index_raises_query_error(self, mock_index):
"""Index service raises a QueryError."""
def _raiseQueryError(*args, **kwargs):
raise QueryError("What now")
mock_index.search.side_effect = _raiseQueryError
request_data = MultiDict({"searchtype": "title", "query": "foo title"})
with self.assertRaises(InternalServerError):
try:
response_data, code, headers = simple.search(request_data)
except QueryError as ex:
self.fail("QueryError should be handled (caught %s)" % ex)
self.assertEqual(
mock_index.search.call_count, 1, "A search should be attempted"
)
class TestSimpleSearchForm(TestCase):
"""Tests for :class:`.SimpleSearchForm`."""
def test_searchtype_only(self):
"""User has entered only a searchtype (field)."""
data = MultiDict({"searchtype": "title"})
form = SimpleSearchForm(data)
self.assertFalse(form.validate(), "Form should be invalid")
def test_query_only(self):
"""User has entered only a query (value); this should never happen."""
data = MultiDict({"query": "someone monkeyed with the request"})
form = SimpleSearchForm(data)
self.assertFalse(form.validate(), "Form should be invalid")
def test_query_and_searchtype(self):
"""User has entered a searchtype (field) and query (value)."""
data = MultiDict({"searchtype": "title", "query": "foo title"})
form = SimpleSearchForm(data)
self.assertTrue(form.validate(), "Form should be valid")
class TestQueryFromForm(TestCase):
"""Tests for :func:`.simple._query_from_form`."""
def test_multiple_simple(self):
"""Form data has three simple."""
data = MultiDict({"searchtype": "title", "query": "foo title"})
form = SimpleSearchForm(data)
query = simple._query_from_form(form)
self.assertIsInstance(
query, SimpleQuery, "Should return an instance of SimpleQuery"
)
def test_form_data_has_order(self):
"""Form data includes sort order."""
data = MultiDict(
{
"searchtype": "title",
"query": "foo title",
"order": "submitted_date",
}
)
form = SimpleSearchForm(data)
query = simple._query_from_form(form)
self.assertIsInstance(
query, SimpleQuery, "Should return an instance of SimpleQuery"
)
self.assertEqual(query.order, "submitted_date")
def test_form_data_has_no_order(self):
"""Form data includes sort order parameter, but it is 'None'."""
data = MultiDict(
{"searchtype": "title", "query": "foo title", "order": "None"} #
)
form = SimpleSearchForm(data)
query = simple._query_from_form(form)
self.assertIsInstance(
query, SimpleQuery, "Should return an instance of SimpleQuery"
)
self.assertIsNone(query.order, "Order should be None")
def test_querystring_has_wildcard_at_start(self):
"""Querystring starts with a wildcard."""
data = MultiDict({"searchtype": "title", "query": "*foo title"})
form = SimpleSearchForm(data)
self.assertFalse(form.validate(), "Form should be invalid")
def test_input_whitespace_is_stripped(self):
"""If query has padding whitespace, it should be removed."""
data = MultiDict({"searchtype": "title", "query": " foo title "})
form = SimpleSearchForm(data)
self.assertTrue(form.validate(), "Form should be valid.")
self.assertEqual(form.query.data, "foo title")
def test_querystring_has_unbalanced_quotes(self):
"""Querystring has an odd number of quote characters."""
data = MultiDict({"searchtype": "title", "query": '"rhubarb'})
form = SimpleSearchForm(data)
self.assertFalse(form.validate(), "Form should be invalid")
data["query"] = '"rhubarb"'
form = SimpleSearchForm(data)
self.assertTrue(form.validate(), "Form should be valid")
data["query"] = '"rhubarb" "pie'
form = SimpleSearchForm(data)
self.assertFalse(form.validate(), "Form should be invalid")
data["query"] = '"rhubarb" "pie"'
form = SimpleSearchForm(data)
self.assertTrue(form.validate(), "Form should be valid")
class TestPaginationParametersAreFunky(TestCase):
"""
The user may have monkeyed with the order or sort parameters.
Since these are limited to specific values, there is no other reason for
them to be invalid. Given that they are passed around among
views (to persist users' selection), it's important to break the chain.
To do this, we return a 400 Bad Request, with a clean link back to the
search form.
"""
@mock.patch("search.controllers.simple.url_for")
def test_order_is_invalid(self, mock_url_for):
"""The order parameter on the request is invalid."""
request_data = MultiDict(
{
"searchtype": "title",
"query": "foo title",
"size": 50, # Valid.
"order": "foo", # Invalid
}
)
with self.assertRaises(BadRequest):
simple.search(request_data)
@mock.patch("search.controllers.simple.url_for")
def test_size_is_invalid(self, mock_url_for):
"""The order parameter on the request is invalid."""
request_data = MultiDict(
{
"searchtype": "title",
"query": "foo title",
"size": 51, # Invalid
"order": "", # Valid
}
)
with self.assertRaises(BadRequest):
simple.search(request_data)
class TestClassicAuthorSyntaxIsIntercepted(TestCase):
"""
The user may have entered an author query using `surname_f` syntax.
This is an artefact of the classic search system, and not intended to be
supported. Nevertheless, users have become accustomed to this syntax. We
therefore rewrite the query using a comma, and show the user a warning
about the syntax change.
"""
@mock.patch("search.controllers.simple.SearchSession")
def test_all_fields_search_contains_classic_syntax(self, mock_index):
"""User has entered a `surname_f` query in an all-fields search."""
request_data = MultiDict(
{
"searchtype": "all",
"query": "franklin_r",
"size": 50,
"order": "",
}
)
mock_index.search.return_value = {"metadata": {}, "results": []}
data, code, headers = simple.search(request_data)
self.assertEqual(
data["query"].value,
"franklin, r",
"The query should be rewritten.",
)
self.assertTrue(
data["has_classic_format"],
"A flag denoting the syntax interception should be set"
" in the response context, so that a message may be"
" rendered in the template.",
)
@mock.patch("search.controllers.simple.SearchSession")
def test_author_search_contains_classic_syntax(self, mock_index):
"""User has entered a `surname_f` query in an author search."""
request_data = MultiDict(
{
"searchtype": "author",
"query": "franklin_r",
"size": 50,
"order": "",
}
)
mock_index.search.return_value = {"metadata": {}, "results": []}
data, code, headers = simple.search(request_data)
self.assertEqual(
data["query"].value,
"franklin, r",
"The query should be rewritten.",
)
self.assertTrue(
data["has_classic_format"],
"A flag denoting the syntax interception should be set"
" in the response context, so that a message may be"
" rendered in the template.",
)
@mock.patch("search.controllers.simple.SearchSession")
def test_all_fields_search_multiple_classic_syntax(self, mock_index):
"""User has entered a classic query with multiple authors."""
request_data = MultiDict(
{
"searchtype": "all",
"query": "j franklin_r hawking_s",
"size": 50,
"order": "",
}
)
mock_index.search.return_value = {"metadata": {}, "results": []}
data, code, headers = simple.search(request_data)
self.assertEqual(
data["query"].value,
"j franklin, r; hawking, s",
"The query should be rewritten.",
)
self.assertTrue(
data["has_classic_format"],
"A flag denoting the syntax interception should be set"
" in the response context, so that a message may be"
" rendered in the template.",
)
@mock.patch("search.controllers.simple.SearchSession")
def test_title_search_contains_classic_syntax(self, mock_index):
"""User has entered a `surname_f` query in a title search."""
request_data = MultiDict(
{
"searchtype": "title",
"query": "franklin_r",
"size": 50,
"order": "",
}
)
mock_index.search.return_value = {"metadata": {}, "results": []}
data, code, headers = simple.search(request_data)
self.assertEqual(
data["query"].value,
"franklin_r",
"The query should not be rewritten.",
)
self.assertFalse(
data["has_classic_format"],
"Flag should not be set, as no rewrite has occurred.",
)
| [((603, 656), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (613, 656), False, 'from unittest import TestCase, mock\n'), ((1324, 1377), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (1334, 1377), False, 'from unittest import TestCase, mock\n'), ((2064, 2117), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (2074, 2117), False, 'from unittest import TestCase, mock\n'), ((2912, 3017), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.url_for"""', '(lambda *a, **k: f"https://arxiv.org/{k[\'paper_id\']}")'], {}), '(\'search.controllers.simple.url_for\', lambda *a, **k:\n f"https://arxiv.org/{k[\'paper_id\']}")\n', (2922, 3017), False, 'from unittest import TestCase, mock\n'), ((3042, 3095), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (3052, 3095), False, 'from unittest import TestCase, mock\n'), ((3656, 3709), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (3666, 3709), False, 'from unittest import TestCase, mock\n'), ((4175, 4228), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (4185, 4228), False, 'from unittest import TestCase, mock\n'), ((4939, 4992), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (4949, 4992), False, 'from unittest import TestCase, mock\n'), ((5471, 5524), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (5481, 5524), False, 'from unittest import TestCase, mock\n'), ((6343, 6396), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (6353, 6396), False, 'from unittest import TestCase, mock\n'), ((11297, 11344), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.url_for"""'], {}), "('search.controllers.simple.url_for')\n", (11307, 11344), False, 'from unittest import TestCase, mock\n'), ((11777, 11824), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.url_for"""'], {}), "('search.controllers.simple.url_for')\n", (11787, 11824), False, 'from unittest import TestCase, mock\n'), ((12655, 12708), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (12665, 12708), False, 'from unittest import TestCase, mock\n'), ((13594, 13647), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (13604, 13647), False, 'from unittest import TestCase, mock\n'), ((14528, 14581), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (14538, 14581), False, 'from unittest import TestCase, mock\n'), ((15487, 15540), 'unittest.mock.patch', 'mock.patch', (['"""search.controllers.simple.SearchSession"""'], {}), "('search.controllers.simple.SearchSession')\n", (15497, 15540), False, 'from unittest import TestCase, mock\n'), ((3212, 3246), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'query': '1702.00123'}"], {}), "({'query': '1702.00123'})\n", (3221, 3246), False, 'from werkzeug.datastructures import MultiDict\n'), ((3286, 3313), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (3299, 3313), False, 'from search.controllers import simple\n'), ((3825, 3836), 'werkzeug.datastructures.MultiDict', 'MultiDict', ([], {}), '()\n', (3834, 3836), False, 'from werkzeug.datastructures import MultiDict\n'), ((3876, 3903), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (3889, 3903), False, 'from search.controllers import simple\n'), ((4412, 4468), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title'}"], {}), "({'searchtype': 'title', 'query': 'foo title'})\n", (4421, 4468), False, 'from werkzeug.datastructures import MultiDict\n'), ((4508, 4535), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (4521, 4535), False, 'from search.controllers import simple\n'), ((5098, 5132), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title'}"], {}), "({'searchtype': 'title'})\n", (5107, 5132), False, 'from werkzeug.datastructures import MultiDict\n'), ((5172, 5199), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (5185, 5199), False, 'from search.controllers import simple\n'), ((5851, 5907), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title'}"], {}), "({'searchtype': 'title', 'query': 'foo title'})\n", (5860, 5907), False, 'from werkzeug.datastructures import MultiDict\n'), ((6674, 6730), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title'}"], {}), "({'searchtype': 'title', 'query': 'foo title'})\n", (6683, 6730), False, 'from werkzeug.datastructures import MultiDict\n'), ((7299, 7333), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title'}"], {}), "({'searchtype': 'title'})\n", (7308, 7333), False, 'from werkzeug.datastructures import MultiDict\n'), ((7349, 7371), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (7365, 7371), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((7566, 7623), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'query': 'someone monkeyed with the request'}"], {}), "({'query': 'someone monkeyed with the request'})\n", (7575, 7623), False, 'from werkzeug.datastructures import MultiDict\n'), ((7639, 7661), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (7655, 7661), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((7858, 7914), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title'}"], {}), "({'searchtype': 'title', 'query': 'foo title'})\n", (7867, 7914), False, 'from werkzeug.datastructures import MultiDict\n'), ((7930, 7952), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (7946, 7952), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((8203, 8259), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title'}"], {}), "({'searchtype': 'title', 'query': 'foo title'})\n", (8212, 8259), False, 'from werkzeug.datastructures import MultiDict\n'), ((8275, 8297), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (8291, 8297), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((8314, 8343), 'search.controllers.simple._query_from_form', 'simple._query_from_form', (['form'], {}), '(form)\n', (8337, 8343), False, 'from search.controllers import simple\n'), ((8561, 8648), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title', 'order': 'submitted_date'}"], {}), "({'searchtype': 'title', 'query': 'foo title', 'order':\n 'submitted_date'})\n", (8570, 8648), False, 'from werkzeug.datastructures import MultiDict\n'), ((8745, 8767), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (8761, 8767), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((8784, 8813), 'search.controllers.simple._query_from_form', 'simple._query_from_form', (['form'], {}), '(form)\n', (8807, 8813), False, 'from search.controllers import simple\n'), ((9118, 9191), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title', 'order': 'None'}"], {}), "({'searchtype': 'title', 'query': 'foo title', 'order': 'None'})\n", (9127, 9191), False, 'from werkzeug.datastructures import MultiDict\n'), ((9232, 9254), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (9248, 9254), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((9271, 9300), 'search.controllers.simple._query_from_form', 'simple._query_from_form', (['form'], {}), '(form)\n', (9294, 9300), False, 'from search.controllers import simple\n'), ((9600, 9657), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': '*foo title'}"], {}), "({'searchtype': 'title', 'query': '*foo title'})\n", (9609, 9657), False, 'from werkzeug.datastructures import MultiDict\n'), ((9673, 9695), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (9689, 9695), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((9898, 9956), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': ' foo title '}"], {}), "({'searchtype': 'title', 'query': ' foo title '})\n", (9907, 9956), False, 'from werkzeug.datastructures import MultiDict\n'), ((9972, 9994), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (9988, 9994), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((10251, 10306), 'werkzeug.datastructures.MultiDict', 'MultiDict', (['{\'searchtype\': \'title\', \'query\': \'"rhubarb\'}'], {}), '({\'searchtype\': \'title\', \'query\': \'"rhubarb\'})\n', (10260, 10306), False, 'from werkzeug.datastructures import MultiDict\n'), ((10322, 10344), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (10338, 10344), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((10465, 10487), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (10481, 10487), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((10610, 10632), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (10626, 10632), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((10759, 10781), 'search.controllers.simple.forms.SimpleSearchForm', 'SimpleSearchForm', (['data'], {}), '(data)\n', (10775, 10781), False, 'from search.controllers.simple.forms import SimpleSearchForm\n'), ((11480, 11568), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title', 'size': 50, 'order': 'foo'}"], {}), "({'searchtype': 'title', 'query': 'foo title', 'size': 50, 'order':\n 'foo'})\n", (11489, 11568), False, 'from werkzeug.datastructures import MultiDict\n'), ((11959, 12044), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'foo title', 'size': 51, 'order': ''}"], {}), "({'searchtype': 'title', 'query': 'foo title', 'size': 51, 'order':\n ''})\n", (11968, 12044), False, 'from werkzeug.datastructures import MultiDict\n'), ((12882, 12967), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'all', 'query': 'franklin_r', 'size': 50, 'order': ''}"], {}), "({'searchtype': 'all', 'query': 'franklin_r', 'size': 50, 'order': ''}\n )\n", (12891, 12967), False, 'from werkzeug.datastructures import MultiDict\n'), ((13168, 13195), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (13181, 13195), False, 'from search.controllers import simple\n'), ((13813, 13900), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'author', 'query': 'franklin_r', 'size': 50, 'order': ''}"], {}), "({'searchtype': 'author', 'query': 'franklin_r', 'size': 50,\n 'order': ''})\n", (13822, 13900), False, 'from werkzeug.datastructures import MultiDict\n'), ((14102, 14129), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (14115, 14129), False, 'from search.controllers import simple\n'), ((14749, 14846), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'all', 'query': 'j franklin_r hawking_s', 'size': 50,\n 'order': ''}"], {}), "({'searchtype': 'all', 'query': 'j franklin_r hawking_s', 'size': \n 50, 'order': ''})\n", (14758, 14846), False, 'from werkzeug.datastructures import MultiDict\n'), ((15047, 15074), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (15060, 15074), False, 'from search.controllers import simple\n'), ((15703, 15789), 'werkzeug.datastructures.MultiDict', 'MultiDict', (["{'searchtype': 'title', 'query': 'franklin_r', 'size': 50, 'order': ''}"], {}), "({'searchtype': 'title', 'query': 'franklin_r', 'size': 50,\n 'order': ''})\n", (15712, 15789), False, 'from werkzeug.datastructures import MultiDict\n'), ((15991, 16018), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (16004, 16018), False, 'from search.controllers import simple\n'), ((829, 851), 'search.services.index.QueryError', 'QueryError', (['"""What now"""'], {}), "('What now')\n", (839, 851), False, 'from search.services.index import IndexConnectionError, QueryError, DocumentNotFound\n'), ((1667, 1703), 'search.controllers.simple.retrieve_document', 'simple.retrieve_document', (['"""124.5678"""'], {}), "('124.5678')\n", (1691, 1703), False, 'from search.controllers import simple\n'), ((2282, 2310), 'search.services.index.DocumentNotFound', 'DocumentNotFound', (['"""What now"""'], {}), "('What now')\n", (2298, 2310), False, 'from search.services.index import IndexConnectionError, QueryError, DocumentNotFound\n'), ((5726, 5758), 'search.services.index.IndexConnectionError', 'IndexConnectionError', (['"""What now"""'], {}), "('What now')\n", (5746, 5758), False, 'from search.services.index import IndexConnectionError, QueryError, DocumentNotFound\n'), ((5983, 6010), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (5996, 6010), False, 'from search.controllers import simple\n'), ((6569, 6591), 'search.services.index.QueryError', 'QueryError', (['"""What now"""'], {}), "('What now')\n", (6579, 6591), False, 'from search.services.index import IndexConnectionError, QueryError, DocumentNotFound\n'), ((11743, 11770), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (11756, 11770), False, 'from search.controllers import simple\n'), ((12218, 12245), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (12231, 12245), False, 'from search.controllers import simple\n'), ((1034, 1061), 'search.controllers.simple.retrieve_document', 'simple.retrieve_document', (['(1)'], {}), '(1)\n', (1058, 1061), False, 'from search.controllers import simple\n'), ((2488, 2515), 'search.controllers.simple.retrieve_document', 'simple.retrieve_document', (['(1)'], {}), '(1)\n', (2512, 2515), False, 'from search.controllers import simple\n'), ((6848, 6875), 'search.controllers.simple.search', 'simple.search', (['request_data'], {}), '(request_data)\n', (6861, 6875), False, 'from search.controllers import simple\n')] |
hcrlab/kuri_wandering_robot | kuri_wandering_robot/scripts/kuri_wandering_robot_executive_node.py | 9c747bfe27e3c3450fd4717e26b866af2ef70149 | #!/usr/bin/env python
# ROS Libraries
import actionlib
from actionlib_msgs.msg import GoalStatus
from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal
from kuri_wandering_robot.msg import Power
from wandering_behavior.msg import WanderAction, WanderGoal
import rospy
from sensor_msgs.msg import CompressedImage
from std_msgs.msg import Empty
from trajectory_msgs.msg import JointTrajectoryPoint
# Python Default Libraries
import base64
import csv
from enum import Enum
import os
import requests
import threading
import time
import traceback
# Custom Libraries
from sent_messages_database import SentMessagesDatabase
class KuriWanderingRobotState(Enum):
"""
During NORMAL, the base moves according to wandering_behavior.
During CHARGING, the robot's eyes are closed and it is charging. The robot
transitions from NORMAL to CHARGING if its battery is below a threshold and
it is on the charger. It transitions from CHARGING to NORMAL if it's battery
is above a threshold or it is off the charger.
"""
NORMAL = 1
CHARGING = 2
class KuriWanderingRobot(object):
"""
The central executive node. This node runs a control loop that manages the
robot's state: turning on and monitoring progress of the wandering module
in NORMAL, turning off wandering in CHARGING, and switching back to NORMAL
when the robot is sufficiently charged.
This node also runs anomaly detection to detect low battery; when it detects
low battery, it sends a low battery request to the Slackbot, which then
sends it to the helpers. This node can be extended with additional anomaly
detection and help requests, as needed. This node also subscribes to a dummy
`where_am_i_help` topic, which sends helpers the sample `where_am_i` help
message. Note that that is only in place to illsutrate the sample
`where_am_i` help message, and actually using that would require developing
a custom anomaly detection system to trigger the robot asking for that type
of help.
Finally, this node has a separate thread that continually queries the
Slackbot for responses to its help requests.
"""
def __init__(self):
"""
Initialize an instance of the KuriWanderingRobot class
"""
self.has_loaded = False
# Get the Slackbot URL
self.slackbot_url = rospy.get_param('~slackbot_url')
# Initialize the state.
self.state_lock = threading.Lock()
self.state_changed = True
self.state = KuriWanderingRobotState.NORMAL
# Initialize the wandering module
self.wandering_module_action = actionlib.SimpleActionClient('/wandering_behavior/navigate', WanderAction)
# Initialize the eye controller
self.eyelid_controller_action = actionlib.SimpleActionClient('/eyelids_controller/follow_joint_trajectory', FollowJointTrajectoryAction)
self.eye_closed_position = 0.41
self.eye_open_position = 0.0
# Initialize the camera
self.img_sub = rospy.Subscriber(
'/upward_looking_camera/compressed', CompressedImage, self.image_callback, queue_size=1)
self.latest_image = None
self.latest_image_lock = threading.Lock()
# Initialize low battery anomaly detector
self.battery_sub = rospy.Subscriber(
"/mobile_base/power", Power, self.power_callback, queue_size=1)
self.previous_battery_lock = threading.Lock()
self.previous_battery = None
self.previous_dock_present = None
self.battery_notification_thresholds = rospy.get_param('~battery_notification_thresholds', [40, 20, 10, 5, 4, 3, 2, 1])
# if the battery is less than this and Kuri is docked, charge
self.to_charge_threshold = rospy.get_param('~to_charge_threshold', 50)
# if the batter is greater than this and Kuri is charging, switch back to NORMAL
self.charging_done_threshold = rospy.get_param('~charging_done_threshold', 90)
# Whether the low battery message should include Kuri's current camera image
self.low_battery_message_include_image = rospy.get_param('~low_battery_message_include_image', True)
# Initialize the dummy `where_am_i` anomaly detector
self.where_am_i_help_sub = rospy.Subscriber(
"/where_am_i_help", Empty, self.where_am_i_help_callback, queue_size=1)
# Initialize storing images and message IDs
self.sent_messages_database_filepath = rospy.get_param('~send_messages_database_filepath')
self.sent_messages_database = SentMessagesDatabase.load(
self.sent_messages_database_filepath)
self.database_save_interval = 1
self.database_updates_since_last_save = 0
# Initialize the head controller
self.head_state_sub = rospy.Subscriber(
"/head_controller/state", JointTrajectoryControllerState, self.head_state_callback, queue_size=1)
self.head_controller_action = actionlib.SimpleActionClient('/head_controller/follow_joint_trajectory', FollowJointTrajectoryAction)
self.head_tilt_speed = 0.2 # head tilt is in [-0.8, 0.3]
self.head_pan_speed = 0.2 # head pan is in [-0.75, 0.75]
# Initialize the Slackbot updates thread
self.slackbot_responses_thread = threading.Thread(
target=self.get_slackbot_updates,
)
self.slackbot_responses_thread.start()
# Initialize the state machine
self.state_machine_thread = threading.Thread(
target=self.state_machine_control_loop,
)
self.state_machine_thread.start()
self.has_centered_head = False
self.has_loaded = True
def database_updated(self, num_updates=1):
"""
Called everytime the database is updated. Saves the database every
self.database_save_interval updates
"""
self.database_updates_since_last_save += num_updates
if self.database_updates_since_last_save % self.database_save_interval == 0:
self.sent_messages_database.save(self.sent_messages_database_filepath)
rospy.logdebug("Saved sent_messages_database!")
def open_eyes(self, duration_secs=0.2):
"""
Open the robot's eyes
"""
rospy.logdebug("Open Eyes")
duration = rospy.Duration.from_sec(duration_secs)
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["eyelids_joint"]
point = JointTrajectoryPoint()
point.positions = [self.eye_open_position]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = duration
goal.trajectory.points = [point]
# Send the goal
self.eyelid_controller_action.wait_for_server()
self.eyelid_controller_action.send_goal(goal)
self.eyelid_controller_action.wait_for_result(duration)
def close_eyes(self, duration_secs=0.2):
"""
Close the robot's eyes
"""
rospy.logdebug("Close Eyes")
duration = rospy.Duration.from_sec(duration_secs)
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["eyelids_joint"]
point = JointTrajectoryPoint()
point.positions = [self.eye_closed_position]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = duration
goal.trajectory.points = [point]
# Send the goal
self.eyelid_controller_action.wait_for_server()
self.eyelid_controller_action.send_goal(goal)
self.eyelid_controller_action.wait_for_result(duration)
def head_state_callback(self, head_state_msg):
"""
Get the head's current position
"""
if not self.has_loaded:
return
if not self.has_centered_head:
self.center_head(head_state_msg.actual.positions[0], head_state_msg.actual.positions[1])
def center_head(self, current_pan, current_tilt):
"""
Center Kuri's head. This involves moving from the current pan and tilt
to the centered values of (0.0, -0.3)
"""
pan_endpoint = 0.0
tilt_endpoint = -0.3
n_waypoints = 10
# Compute the actual endpoint and duration_secs
duration_secs = max(
abs(pan_endpoint-current_pan)/self.head_pan_speed,
abs(tilt_endpoint-current_tilt)/self.head_tilt_speed)
duration = rospy.Duration.from_sec(duration_secs)
# Create the goal
goal = FollowJointTrajectoryGoal()
goal.trajectory.header.stamp = rospy.Time.now()
goal.trajectory.joint_names = ["head_1_joint", "head_2_joint"]
goal.trajectory.points = []
pan_interval = (pan_endpoint-current_pan)/(n_waypoints-1)
tilt_interval = (tilt_endpoint-current_tilt)/(n_waypoints-1)
time_interval = duration/n_waypoints
for i in range(n_waypoints):
point = JointTrajectoryPoint()
point.positions = [current_pan + i*pan_interval, current_tilt + i*tilt_interval]
point.velocities = []
point.accelerations = []
point.effort = []
point.time_from_start = (i+1)*time_interval
goal.trajectory.points.append(point)
# Send the goal
self.head_controller_action.wait_for_server()
self.head_controller_action.send_goal(goal)
self.head_controller_action.wait_for_result(duration)
self.has_centered_head = True
def state_machine_control_loop(self, rate_hz=10):
"""
The control loop for the state machine. All of the state machine logic
is handled in this function and the functions it calls.
During NORMAL, the base moves according to wandering_behavior.
During CHARGING, the robot's eyes are closed and it is charging. The
robot transitions from NORMAL to CHARGING if its battery is below a
threshold and it is on the charger. It transitions from CHARGING to
NORMAL if it's battery is above a threshold or it is off the charger.
"""
rate = rospy.Rate(rate_hz)
while not rospy.is_shutdown():
rate.sleep()
with self.state_lock:
state_at_start_of_loop = self.state
if (self.state == KuriWanderingRobotState.NORMAL):
goal_state = self.wandering_module_action.get_state()
if (self.state_changed or goal_state == GoalStatus.ABORTED or goal_state == GoalStatus.SUCCEEDED):
rospy.logdebug("Waiting for wandering_module_action server")
self.wandering_module_action.wait_for_server()
rospy.logdebug("Sending goal to wandering_module_action")
# Effort -1 means "don't stop unless preempted"
self.wandering_module_action.send_goal(WanderGoal(effort=-1))
self.open_eyes()
with self.previous_battery_lock:
if (self.previous_battery is not None and self.previous_battery < self.to_charge_threshold and self.previous_dock_present):
self.close_eyes()
self.state = KuriWanderingRobotState.CHARGING
self.wandering_module_action.cancel_all_goals()
rospy.loginfo("State: NORMAL ==> CHARGING")
elif self.state == KuriWanderingRobotState.CHARGING:
with self.previous_battery_lock:
if (self.previous_battery is None or not self.previous_dock_present or self.previous_battery >= self.charging_done_threshold):
self.state = KuriWanderingRobotState.NORMAL
rospy.loginfo("State: CHARGING ==> NORMAL")
state_at_end_of_loop = self.state
self.state_changed = (state_at_start_of_loop != state_at_end_of_loop)
def image_callback(self, img_msg):
"""
Store the latest image.
"""
if not self.has_loaded: return
with self.latest_image_lock:
self.latest_image = img_msg
def power_callback(self, msg):
"""
Callback function for Kuri's power update. It Kuri's battery has crossed
a battery_notification_threshold, notify the Slackbot.
"""
if not self.has_loaded: return
with self.state_lock:
with self.previous_battery_lock:
self.previous_dock_present = msg.dock_present
if self.state == KuriWanderingRobotState.CHARGING:
self.previous_battery = msg.battery.pct
else:
update_previous_battery = True
if msg.battery.pct <= self.battery_notification_thresholds[0]:
# Send the low-battery helper notifications when the battery
# crosses the thresholds defined in self.battery_notification_thresholds
for i in range(len(self.battery_notification_thresholds)):
if (self.previous_battery is None or (self.previous_battery > self.battery_notification_thresholds[i]) and msg.battery.pct <= self.battery_notification_thresholds[i]):
try:
# Send a low_battery_alert
dict_to_send = {'battery_pct':msg.battery.pct}
if self.low_battery_message_include_image:
with self.latest_image_lock:
if self.latest_image is not None:
image_contents = base64.b64encode(bytearray(self.latest_image.data)).decode('ascii')
dict_to_send['image'] = image_contents
rospy.loginfo("Sending battery request for pct %s" % msg.battery.pct)
res = requests.post(
os.path.join(self.slackbot_url, 'low_battery'),
json=dict_to_send,
)
res_json = res.json()
if not res_json['success']:
update_previous_battery = False
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /low_battery at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
update_previous_battery = False
break
if (update_previous_battery and (self.previous_battery is None or msg.battery.pct < self.previous_battery)):
self.previous_battery = msg.battery.pct
def where_am_i_help_callback(self, msg):
"""
A dummy callback that triggers sending a where_am_i help message to the
Slackbot. This is merely intended to showcase some of the Slackbot's
capabilities. Users who want a robot that autonomously asks the human to
tell it where it is should implement their own anomaly detection system
for triggering this help request.
"""
with self.latest_image_lock:
if self.latest_image is None:
rospy.loginfo("Attempted to send where_am_i help request but have no image.")
return
try:
# Send a low_battery_alert
rospy.loginfo("Sending where_am_i help request")
with self.latest_image_lock:
image_contents = base64.b64encode(bytearray(self.latest_image.data)).decode('ascii')
res = requests.post(
os.path.join(self.slackbot_url, 'where_am_i'),
json={'image':image_contents, 'options':['Lounge', "Office#252", "200 Corridoor", "Atrium"]},
)
res_json = res.json()
message_id = res_json['message_id']
self.sent_messages_database.add_respondable_message(message_id)
self.database_updated()
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /where_am_i at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
def get_slackbot_updates(self, refresh_secs=5.0):
"""
Once every refresh_secs seconds, request updates (e.g., human responses)
from the Slackbot. Note that you can optionally request updates for
partular message_ids (e.g., those that have not received responses yet)
"""
r = rospy.Rate(1.0/refresh_secs)
while not rospy.is_shutdown():
if not self.has_loaded: r.sleep()
try:
message_ids_and_action_ts = self.sent_messages_database.get_message_ids_and_latest_action_ts()
# Request responses for those message_ids
res = requests.post(
os.path.join(self.slackbot_url, 'get_updates'),
json={'message_ids_and_action_ts':message_ids_and_action_ts},
)
res_json = res.json()
rospy.logdebug("Got updates from Slackbot %s" % res_json)
message_id_to_responses = res_json["message_id_to_responses"]
if len(message_id_to_responses) > 0:
num_updates = 0
# Insert reactions into the database
for message_id in message_id_to_responses:
for action_ts, response in message_id_to_responses[message_id]:
rospy.loginfo("Got reaction %s from at ts %s" % (response, action_ts))
self.sent_messages_database.add_user_response(message_id, action_ts, response)
num_updates += 1
self.database_updated(num_updates)
except Exception as e:
rospy.logwarn("Error communicating with Slackbot /get_updates at URL %s." % self.slackbot_url)
if "res" in locals():
rospy.logwarn("Response text %s." % res.text)
rospy.logwarn(traceback.format_exc())
rospy.logwarn("Error %s." % e)
r.sleep()
if __name__ == "__main__":
rospy.init_node("kuri_wandering_robot")
kuri_wandering_robot = KuriWanderingRobot()
rospy.spin()
| [((19163, 19202), 'rospy.init_node', 'rospy.init_node', (['"""kuri_wandering_robot"""'], {}), "('kuri_wandering_robot')\n", (19178, 19202), False, 'import rospy\n'), ((19257, 19269), 'rospy.spin', 'rospy.spin', ([], {}), '()\n', (19267, 19269), False, 'import rospy\n'), ((2419, 2451), 'rospy.get_param', 'rospy.get_param', (['"""~slackbot_url"""'], {}), "('~slackbot_url')\n", (2434, 2451), False, 'import rospy\n'), ((2511, 2527), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (2525, 2527), False, 'import threading\n'), ((2696, 2770), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/wandering_behavior/navigate"""', 'WanderAction'], {}), "('/wandering_behavior/navigate', WanderAction)\n", (2724, 2770), False, 'import actionlib\n'), ((2852, 2960), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/eyelids_controller/follow_joint_trajectory"""', 'FollowJointTrajectoryAction'], {}), "('/eyelids_controller/follow_joint_trajectory',\n FollowJointTrajectoryAction)\n", (2880, 2960), False, 'import actionlib\n'), ((3090, 3200), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/upward_looking_camera/compressed"""', 'CompressedImage', 'self.image_callback'], {'queue_size': '(1)'}), "('/upward_looking_camera/compressed', CompressedImage, self\n .image_callback, queue_size=1)\n", (3106, 3200), False, 'import rospy\n'), ((3275, 3291), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (3289, 3291), False, 'import threading\n'), ((3370, 3455), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/mobile_base/power"""', 'Power', 'self.power_callback'], {'queue_size': '(1)'}), "('/mobile_base/power', Power, self.power_callback, queue_size=1\n )\n", (3386, 3455), False, 'import rospy\n'), ((3501, 3517), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (3515, 3517), False, 'import threading\n'), ((3644, 3729), 'rospy.get_param', 'rospy.get_param', (['"""~battery_notification_thresholds"""', '[40, 20, 10, 5, 4, 3, 2, 1]'], {}), "('~battery_notification_thresholds', [40, 20, 10, 5, 4, 3, 2, 1]\n )\n", (3659, 3729), False, 'import rospy\n'), ((3830, 3873), 'rospy.get_param', 'rospy.get_param', (['"""~to_charge_threshold"""', '(50)'], {}), "('~to_charge_threshold', 50)\n", (3845, 3873), False, 'import rospy\n'), ((4002, 4049), 'rospy.get_param', 'rospy.get_param', (['"""~charging_done_threshold"""', '(90)'], {}), "('~charging_done_threshold', 90)\n", (4017, 4049), False, 'import rospy\n'), ((4184, 4243), 'rospy.get_param', 'rospy.get_param', (['"""~low_battery_message_include_image"""', '(True)'], {}), "('~low_battery_message_include_image', True)\n", (4199, 4243), False, 'import rospy\n'), ((4341, 4433), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/where_am_i_help"""', 'Empty', 'self.where_am_i_help_callback'], {'queue_size': '(1)'}), "('/where_am_i_help', Empty, self.where_am_i_help_callback,\n queue_size=1)\n", (4357, 4433), False, 'import rospy\n'), ((4543, 4594), 'rospy.get_param', 'rospy.get_param', (['"""~send_messages_database_filepath"""'], {}), "('~send_messages_database_filepath')\n", (4558, 4594), False, 'import rospy\n'), ((4633, 4696), 'sent_messages_database.SentMessagesDatabase.load', 'SentMessagesDatabase.load', (['self.sent_messages_database_filepath'], {}), '(self.sent_messages_database_filepath)\n', (4658, 4696), False, 'from sent_messages_database import SentMessagesDatabase\n'), ((4872, 4990), 'rospy.Subscriber', 'rospy.Subscriber', (['"""/head_controller/state"""', 'JointTrajectoryControllerState', 'self.head_state_callback'], {'queue_size': '(1)'}), "('/head_controller/state', JointTrajectoryControllerState,\n self.head_state_callback, queue_size=1)\n", (4888, 4990), False, 'import rospy\n'), ((5038, 5143), 'actionlib.SimpleActionClient', 'actionlib.SimpleActionClient', (['"""/head_controller/follow_joint_trajectory"""', 'FollowJointTrajectoryAction'], {}), "('/head_controller/follow_joint_trajectory',\n FollowJointTrajectoryAction)\n", (5066, 5143), False, 'import actionlib\n'), ((5361, 5411), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.get_slackbot_updates'}), '(target=self.get_slackbot_updates)\n', (5377, 5411), False, 'import threading\n'), ((5558, 5614), 'threading.Thread', 'threading.Thread', ([], {'target': 'self.state_machine_control_loop'}), '(target=self.state_machine_control_loop)\n', (5574, 5614), False, 'import threading\n'), ((6338, 6365), 'rospy.logdebug', 'rospy.logdebug', (['"""Open Eyes"""'], {}), "('Open Eyes')\n", (6352, 6365), False, 'import rospy\n'), ((6385, 6423), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (6408, 6423), False, 'import rospy\n'), ((6439, 6466), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (6464, 6466), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((6506, 6522), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (6520, 6522), False, 'import rospy\n'), ((6595, 6617), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (6615, 6617), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((7148, 7176), 'rospy.logdebug', 'rospy.logdebug', (['"""Close Eyes"""'], {}), "('Close Eyes')\n", (7162, 7176), False, 'import rospy\n'), ((7196, 7234), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (7219, 7234), False, 'import rospy\n'), ((7250, 7277), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (7275, 7277), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((7317, 7333), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (7331, 7333), False, 'import rospy\n'), ((7406, 7428), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (7426, 7428), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((8679, 8717), 'rospy.Duration.from_sec', 'rospy.Duration.from_sec', (['duration_secs'], {}), '(duration_secs)\n', (8702, 8717), False, 'import rospy\n'), ((8760, 8787), 'control_msgs.msg.FollowJointTrajectoryGoal', 'FollowJointTrajectoryGoal', ([], {}), '()\n', (8785, 8787), False, 'from control_msgs.msg import JointTrajectoryControllerState, FollowJointTrajectoryAction, FollowJointTrajectoryGoal\n'), ((8827, 8843), 'rospy.Time.now', 'rospy.Time.now', ([], {}), '()\n', (8841, 8843), False, 'import rospy\n'), ((10359, 10378), 'rospy.Rate', 'rospy.Rate', (['rate_hz'], {}), '(rate_hz)\n', (10369, 10378), False, 'import rospy\n'), ((17475, 17505), 'rospy.Rate', 'rospy.Rate', (['(1.0 / refresh_secs)'], {}), '(1.0 / refresh_secs)\n', (17485, 17505), False, 'import rospy\n'), ((6183, 6230), 'rospy.logdebug', 'rospy.logdebug', (['"""Saved sent_messages_database!"""'], {}), "('Saved sent_messages_database!')\n", (6197, 6230), False, 'import rospy\n'), ((9188, 9210), 'trajectory_msgs.msg.JointTrajectoryPoint', 'JointTrajectoryPoint', ([], {}), '()\n', (9208, 9210), False, 'from trajectory_msgs.msg import JointTrajectoryPoint\n'), ((10397, 10416), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (10414, 10416), False, 'import rospy\n'), ((16216, 16264), 'rospy.loginfo', 'rospy.loginfo', (['"""Sending where_am_i help request"""'], {}), "('Sending where_am_i help request')\n", (16229, 16264), False, 'import rospy\n'), ((17522, 17541), 'rospy.is_shutdown', 'rospy.is_shutdown', ([], {}), '()\n', (17539, 17541), False, 'import rospy\n'), ((16051, 16128), 'rospy.loginfo', 'rospy.loginfo', (['"""Attempted to send where_am_i help request but have no image."""'], {}), "('Attempted to send where_am_i help request but have no image.')\n", (16064, 16128), False, 'import rospy\n'), ((16456, 16501), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""where_am_i"""'], {}), "(self.slackbot_url, 'where_am_i')\n", (16468, 16501), False, 'import os\n'), ((16864, 16961), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /where_am_i at URL %s.' % self.slackbot_url\n )"], {}), "('Error communicating with Slackbot /where_am_i at URL %s.' %\n self.slackbot_url)\n", (16877, 16961), False, 'import rospy\n'), ((17116, 17146), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (17129, 17146), False, 'import rospy\n'), ((18032, 18089), 'rospy.logdebug', 'rospy.logdebug', (["('Got updates from Slackbot %s' % res_json)"], {}), "('Got updates from Slackbot %s' % res_json)\n", (18046, 18089), False, 'import rospy\n'), ((17008, 17053), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (17021, 17053), False, 'import rospy\n'), ((17080, 17102), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (17100, 17102), False, 'import traceback\n'), ((17830, 17876), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""get_updates"""'], {}), "(self.slackbot_url, 'get_updates')\n", (17842, 17876), False, 'import os\n'), ((18821, 18919), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /get_updates at URL %s.' % self.\n slackbot_url)"], {}), "('Error communicating with Slackbot /get_updates at URL %s.' %\n self.slackbot_url)\n", (18834, 18919), False, 'import rospy\n'), ((19078, 19108), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (19091, 19108), False, 'import rospy\n'), ((10813, 10873), 'rospy.logdebug', 'rospy.logdebug', (['"""Waiting for wandering_module_action server"""'], {}), "('Waiting for wandering_module_action server')\n", (10827, 10873), False, 'import rospy\n'), ((10969, 11026), 'rospy.logdebug', 'rospy.logdebug', (['"""Sending goal to wandering_module_action"""'], {}), "('Sending goal to wandering_module_action')\n", (10983, 11026), False, 'import rospy\n'), ((18968, 19013), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (18981, 19013), False, 'import rospy\n'), ((19041, 19063), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (19061, 19063), False, 'import traceback\n'), ((11162, 11183), 'wandering_behavior.msg.WanderGoal', 'WanderGoal', ([], {'effort': '(-1)'}), '(effort=-1)\n', (11172, 11183), False, 'from wandering_behavior.msg import WanderAction, WanderGoal\n'), ((11651, 11694), 'rospy.loginfo', 'rospy.loginfo', (['"""State: NORMAL ==> CHARGING"""'], {}), "('State: NORMAL ==> CHARGING')\n", (11664, 11694), False, 'import rospy\n'), ((18494, 18564), 'rospy.loginfo', 'rospy.loginfo', (["('Got reaction %s from at ts %s' % (response, action_ts))"], {}), "('Got reaction %s from at ts %s' % (response, action_ts))\n", (18507, 18564), False, 'import rospy\n'), ((12068, 12111), 'rospy.loginfo', 'rospy.loginfo', (['"""State: CHARGING ==> NORMAL"""'], {}), "('State: CHARGING ==> NORMAL')\n", (12081, 12111), False, 'import rospy\n'), ((14250, 14319), 'rospy.loginfo', 'rospy.loginfo', (["('Sending battery request for pct %s' % msg.battery.pct)"], {}), "('Sending battery request for pct %s' % msg.battery.pct)\n", (14263, 14319), False, 'import rospy\n'), ((14417, 14463), 'os.path.join', 'os.path.join', (['self.slackbot_url', '"""low_battery"""'], {}), "(self.slackbot_url, 'low_battery')\n", (14429, 14463), False, 'import os\n'), ((14847, 14945), 'rospy.logwarn', 'rospy.logwarn', (["('Error communicating with Slackbot /low_battery at URL %s.' % self.\n slackbot_url)"], {}), "('Error communicating with Slackbot /low_battery at URL %s.' %\n self.slackbot_url)\n", (14860, 14945), False, 'import rospy\n'), ((15196, 15226), 'rospy.logwarn', 'rospy.logwarn', (["('Error %s.' % e)"], {}), "('Error %s.' % e)\n", (15209, 15226), False, 'import rospy\n'), ((15040, 15085), 'rospy.logwarn', 'rospy.logwarn', (["('Response text %s.' % res.text)"], {}), "('Response text %s.' % res.text)\n", (15053, 15085), False, 'import rospy\n'), ((15136, 15158), 'traceback.format_exc', 'traceback.format_exc', ([], {}), '()\n', (15156, 15158), False, 'import traceback\n')] |
montehoover/NimbusML | src/python/nimbusml/internal/entrypoints/trainers_lightgbmbinaryclassifier.py | f6be39ce9359786976429bab0ccd837e849b4ba5 | # - Generated by tools/entrypoint_compiler.py: do not edit by hand
"""
Trainers.LightGbmBinaryClassifier
"""
import numbers
from ..utils.entrypoints import EntryPoint
from ..utils.utils import try_set, unlist
def trainers_lightgbmbinaryclassifier(
training_data,
predictor_model=None,
number_of_iterations=100,
learning_rate=None,
number_of_leaves=None,
minimum_example_count_per_leaf=None,
feature_column_name='Features',
booster=None,
label_column_name='Label',
example_weight_column_name=None,
row_group_column_name=None,
normalize_features='Auto',
caching='Auto',
unbalanced_sets=False,
weight_of_positive_examples=1.0,
sigmoid=0.5,
evaluation_metric='Logloss',
maximum_bin_count_per_feature=255,
verbose=False,
silent=True,
number_of_threads=None,
early_stopping_round=0,
batch_size=1048576,
use_categorical_split=None,
handle_missing_value=True,
use_zero_as_missing_value=False,
minimum_example_count_per_group=100,
maximum_categorical_split_point_count=32,
categorical_smoothing=10.0,
l2_categorical_regularization=10.0,
seed=None,
parallel_trainer=None,
**params):
"""
**Description**
Train a LightGBM binary classification model.
:param number_of_iterations: Number of iterations. (inputs).
:param training_data: The data to be used for training (inputs).
:param learning_rate: Shrinkage rate for trees, used to prevent
over-fitting. Range: (0,1]. (inputs).
:param number_of_leaves: Maximum leaves for trees. (inputs).
:param minimum_example_count_per_leaf: Minimum number of
instances needed in a child. (inputs).
:param feature_column_name: Column to use for features (inputs).
:param booster: Which booster to use, can be gbtree, gblinear or
dart. gbtree and dart use tree based model while gblinear
uses linear function. (inputs).
:param label_column_name: Column to use for labels (inputs).
:param example_weight_column_name: Column to use for example
weight (inputs).
:param row_group_column_name: Column to use for example groupId
(inputs).
:param normalize_features: Normalize option for the feature
column (inputs).
:param caching: Whether trainer should cache input training data
(inputs).
:param unbalanced_sets: Use for binary classification when
training data is not balanced. (inputs).
:param weight_of_positive_examples: Control the balance of
positive and negative weights, useful for unbalanced classes.
A typical value to consider: sum(negative cases) /
sum(positive cases). (inputs).
:param sigmoid: Parameter for the sigmoid function. (inputs).
:param evaluation_metric: Evaluation metrics. (inputs).
:param maximum_bin_count_per_feature: Maximum number of bucket
bin for features. (inputs).
:param verbose: Verbose (inputs).
:param silent: Printing running messages. (inputs).
:param number_of_threads: Number of parallel threads used to run
LightGBM. (inputs).
:param early_stopping_round: Rounds of early stopping, 0 will
disable it. (inputs).
:param batch_size: Number of entries in a batch when loading
data. (inputs).
:param use_categorical_split: Enable categorical split or not.
(inputs).
:param handle_missing_value: Enable special handling of missing
value or not. (inputs).
:param use_zero_as_missing_value: Enable usage of zero (0) as
missing value. (inputs).
:param minimum_example_count_per_group: Minimum number of
instances per categorical group. (inputs).
:param maximum_categorical_split_point_count: Max number of
categorical thresholds. (inputs).
:param categorical_smoothing: Lapalace smooth term in categorical
feature spilt. Avoid the bias of small categories. (inputs).
:param l2_categorical_regularization: L2 Regularization for
categorical split. (inputs).
:param seed: Sets the random seed for LightGBM to use. (inputs).
:param parallel_trainer: Parallel LightGBM Learning Algorithm
(inputs).
:param predictor_model: The trained model (outputs).
"""
entrypoint_name = 'Trainers.LightGbmBinaryClassifier'
inputs = {}
outputs = {}
if number_of_iterations is not None:
inputs['NumberOfIterations'] = try_set(
obj=number_of_iterations,
none_acceptable=True,
is_of_type=numbers.Real)
if training_data is not None:
inputs['TrainingData'] = try_set(
obj=training_data,
none_acceptable=False,
is_of_type=str)
if learning_rate is not None:
inputs['LearningRate'] = try_set(
obj=learning_rate,
none_acceptable=True,
is_of_type=numbers.Real)
if number_of_leaves is not None:
inputs['NumberOfLeaves'] = try_set(
obj=number_of_leaves,
none_acceptable=True,
is_of_type=numbers.Real)
if minimum_example_count_per_leaf is not None:
inputs['MinimumExampleCountPerLeaf'] = try_set(
obj=minimum_example_count_per_leaf,
none_acceptable=True,
is_of_type=numbers.Real)
if feature_column_name is not None:
inputs['FeatureColumnName'] = try_set(
obj=feature_column_name,
none_acceptable=True,
is_of_type=str,
is_column=True)
if booster is not None:
inputs['Booster'] = try_set(
obj=booster,
none_acceptable=True,
is_of_type=dict)
if label_column_name is not None:
inputs['LabelColumnName'] = try_set(
obj=label_column_name,
none_acceptable=True,
is_of_type=str,
is_column=True)
if example_weight_column_name is not None:
inputs['ExampleWeightColumnName'] = try_set(
obj=example_weight_column_name,
none_acceptable=True,
is_of_type=str,
is_column=True)
if row_group_column_name is not None:
inputs['RowGroupColumnName'] = try_set(
obj=row_group_column_name,
none_acceptable=True,
is_of_type=str,
is_column=True)
if normalize_features is not None:
inputs['NormalizeFeatures'] = try_set(
obj=normalize_features,
none_acceptable=True,
is_of_type=str,
values=[
'No',
'Warn',
'Auto',
'Yes'])
if caching is not None:
inputs['Caching'] = try_set(
obj=caching,
none_acceptable=True,
is_of_type=str,
values=[
'Auto',
'Memory',
'None'])
if unbalanced_sets is not None:
inputs['UnbalancedSets'] = try_set(
obj=unbalanced_sets,
none_acceptable=True,
is_of_type=bool)
if weight_of_positive_examples is not None:
inputs['WeightOfPositiveExamples'] = try_set(
obj=weight_of_positive_examples,
none_acceptable=True,
is_of_type=numbers.Real)
if sigmoid is not None:
inputs['Sigmoid'] = try_set(
obj=sigmoid,
none_acceptable=True,
is_of_type=numbers.Real)
if evaluation_metric is not None:
inputs['EvaluationMetric'] = try_set(
obj=evaluation_metric,
none_acceptable=True,
is_of_type=str,
values=[
'None',
'Default',
'Logloss',
'Error',
'AreaUnderCurve'])
if maximum_bin_count_per_feature is not None:
inputs['MaximumBinCountPerFeature'] = try_set(
obj=maximum_bin_count_per_feature,
none_acceptable=True,
is_of_type=numbers.Real)
if verbose is not None:
inputs['Verbose'] = try_set(
obj=verbose,
none_acceptable=True,
is_of_type=bool)
if silent is not None:
inputs['Silent'] = try_set(
obj=silent,
none_acceptable=True,
is_of_type=bool)
if number_of_threads is not None:
inputs['NumberOfThreads'] = try_set(
obj=number_of_threads,
none_acceptable=True,
is_of_type=numbers.Real)
if early_stopping_round is not None:
inputs['EarlyStoppingRound'] = try_set(
obj=early_stopping_round,
none_acceptable=True,
is_of_type=numbers.Real)
if batch_size is not None:
inputs['BatchSize'] = try_set(
obj=batch_size,
none_acceptable=True,
is_of_type=numbers.Real)
if use_categorical_split is not None:
inputs['UseCategoricalSplit'] = try_set(
obj=use_categorical_split, none_acceptable=True, is_of_type=bool)
if handle_missing_value is not None:
inputs['HandleMissingValue'] = try_set(
obj=handle_missing_value,
none_acceptable=True,
is_of_type=bool)
if use_zero_as_missing_value is not None:
inputs['UseZeroAsMissingValue'] = try_set(
obj=use_zero_as_missing_value,
none_acceptable=True,
is_of_type=bool)
if minimum_example_count_per_group is not None:
inputs['MinimumExampleCountPerGroup'] = try_set(
obj=minimum_example_count_per_group,
none_acceptable=True,
is_of_type=numbers.Real,
valid_range={
'Inf': 0,
'Max': 2147483647})
if maximum_categorical_split_point_count is not None:
inputs['MaximumCategoricalSplitPointCount'] = try_set(
obj=maximum_categorical_split_point_count,
none_acceptable=True,
is_of_type=numbers.Real,
valid_range={
'Inf': 0,
'Max': 2147483647})
if categorical_smoothing is not None:
inputs['CategoricalSmoothing'] = try_set(
obj=categorical_smoothing,
none_acceptable=True,
is_of_type=numbers.Real, valid_range={'Min': 0.0})
if l2_categorical_regularization is not None:
inputs['L2CategoricalRegularization'] = try_set(
obj=l2_categorical_regularization,
none_acceptable=True,
is_of_type=numbers.Real, valid_range={'Min': 0.0})
if seed is not None:
inputs['Seed'] = try_set(
obj=seed,
none_acceptable=True,
is_of_type=numbers.Real)
if parallel_trainer is not None:
inputs['ParallelTrainer'] = try_set(
obj=parallel_trainer,
none_acceptable=True,
is_of_type=dict)
if predictor_model is not None:
outputs['PredictorModel'] = try_set(
obj=predictor_model, none_acceptable=False, is_of_type=str)
input_variables = {
x for x in unlist(inputs.values())
if isinstance(x, str) and x.startswith("$")}
output_variables = {
x for x in unlist(outputs.values())
if isinstance(x, str) and x.startswith("$")}
entrypoint = EntryPoint(
name=entrypoint_name, inputs=inputs, outputs=outputs,
input_variables=input_variables,
output_variables=output_variables)
return entrypoint
| [] |
Candida18/Job-Portal-with-Automated-Resume-Screening | Job Portal with Automated Resume Screening/gensim-4.1.2/gensim/test/test_rpmodel.py | 19d19464ad3d1714da856656753a4afdfe257b31 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (C) 2010 Radim Rehurek <[email protected]>
# Licensed under the GNU LGPL v2.1 - http://www.gnu.org/licenses/lgpl.html
"""
Automated tests for checking transformation algorithms (the models package).
"""
import logging
import unittest
import numpy as np
from gensim.corpora.mmcorpus import MmCorpus
from gensim.models import rpmodel
from gensim import matutils
from gensim.test.utils import datapath, get_tmpfile
class TestRpModel(unittest.TestCase):
def setUp(self):
self.corpus = MmCorpus(datapath('testcorpus.mm'))
def test_transform(self):
# create the transformation model
# HACK; set fixed seed so that we always get the same random matrix (and can compare against expected results)
np.random.seed(13)
model = rpmodel.RpModel(self.corpus, num_topics=2)
# transform one document
doc = list(self.corpus)[0]
transformed = model[doc]
vec = matutils.sparse2full(transformed, 2) # convert to dense vector, for easier equality tests
expected = np.array([-0.70710677, 0.70710677])
self.assertTrue(np.allclose(vec, expected)) # transformed entries must be equal up to sign
def test_persistence(self):
fname = get_tmpfile('gensim_models.tst')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
def test_persistence_compressed(self):
fname = get_tmpfile('gensim_models.tst.gz')
model = rpmodel.RpModel(self.corpus, num_topics=2)
model.save(fname)
model2 = rpmodel.RpModel.load(fname, mmap=None)
self.assertEqual(model.num_topics, model2.num_topics)
self.assertTrue(np.allclose(model.projection, model2.projection))
tstvec = []
self.assertTrue(np.allclose(model[tstvec], model2[tstvec])) # try projecting an empty vector
if __name__ == '__main__':
logging.basicConfig(format='%(asctime)s : %(levelname)s : %(message)s', level=logging.DEBUG)
unittest.main()
| [((2240, 2336), 'logging.basicConfig', 'logging.basicConfig', ([], {'format': '"""%(asctime)s : %(levelname)s : %(message)s"""', 'level': 'logging.DEBUG'}), "(format='%(asctime)s : %(levelname)s : %(message)s',\n level=logging.DEBUG)\n", (2259, 2336), False, 'import logging\n'), ((2337, 2352), 'unittest.main', 'unittest.main', ([], {}), '()\n', (2350, 2352), False, 'import unittest\n'), ((801, 819), 'numpy.random.seed', 'np.random.seed', (['(13)'], {}), '(13)\n', (815, 819), True, 'import numpy as np\n'), ((836, 878), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (851, 878), False, 'from gensim.models import rpmodel\n'), ((995, 1031), 'gensim.matutils.sparse2full', 'matutils.sparse2full', (['transformed', '(2)'], {}), '(transformed, 2)\n', (1015, 1031), False, 'from gensim import matutils\n'), ((1106, 1141), 'numpy.array', 'np.array', (['[-0.70710677, 0.70710677]'], {}), '([-0.70710677, 0.70710677])\n', (1114, 1141), True, 'import numpy as np\n'), ((1291, 1323), 'gensim.test.utils.get_tmpfile', 'get_tmpfile', (['"""gensim_models.tst"""'], {}), "('gensim_models.tst')\n", (1302, 1323), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1340, 1382), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (1355, 1382), False, 'from gensim.models import rpmodel\n'), ((1426, 1453), 'gensim.models.rpmodel.RpModel.load', 'rpmodel.RpModel.load', (['fname'], {}), '(fname)\n', (1446, 1453), False, 'from gensim.models import rpmodel\n'), ((1772, 1807), 'gensim.test.utils.get_tmpfile', 'get_tmpfile', (['"""gensim_models.tst.gz"""'], {}), "('gensim_models.tst.gz')\n", (1783, 1807), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1824, 1866), 'gensim.models.rpmodel.RpModel', 'rpmodel.RpModel', (['self.corpus'], {'num_topics': '(2)'}), '(self.corpus, num_topics=2)\n', (1839, 1866), False, 'from gensim.models import rpmodel\n'), ((1910, 1948), 'gensim.models.rpmodel.RpModel.load', 'rpmodel.RpModel.load', (['fname'], {'mmap': 'None'}), '(fname, mmap=None)\n', (1930, 1948), False, 'from gensim.models import rpmodel\n'), ((574, 599), 'gensim.test.utils.datapath', 'datapath', (['"""testcorpus.mm"""'], {}), "('testcorpus.mm')\n", (582, 599), False, 'from gensim.test.utils import datapath, get_tmpfile\n'), ((1166, 1192), 'numpy.allclose', 'np.allclose', (['vec', 'expected'], {}), '(vec, expected)\n', (1177, 1192), True, 'import numpy as np\n'), ((1540, 1588), 'numpy.allclose', 'np.allclose', (['model.projection', 'model2.projection'], {}), '(model.projection, model2.projection)\n', (1551, 1588), True, 'import numpy as np\n'), ((1634, 1676), 'numpy.allclose', 'np.allclose', (['model[tstvec]', 'model2[tstvec]'], {}), '(model[tstvec], model2[tstvec])\n', (1645, 1676), True, 'import numpy as np\n'), ((2035, 2083), 'numpy.allclose', 'np.allclose', (['model.projection', 'model2.projection'], {}), '(model.projection, model2.projection)\n', (2046, 2083), True, 'import numpy as np\n'), ((2129, 2171), 'numpy.allclose', 'np.allclose', (['model[tstvec]', 'model2[tstvec]'], {}), '(model[tstvec], model2[tstvec])\n', (2140, 2171), True, 'import numpy as np\n')] |
tianhaoz95/mangekyo | playground/tianhaoz95/gan_getting_started/cgan_model.py | fd2b151538d0c15cca60e05a844baffcbe08e68c | import tensorflow as tf
from tensorflow import keras
class CondGeneratorModel(keras.Model):
def __init__(self):
super(CondGeneratorModel, self).__init__()
# Expand 7*7*128 features into a (7,7,128) tensor
self.dense_1 = keras.layers.Dense(7*7*256)
self.reshape_1 = keras.layers.Reshape((7, 7, 256))
# Expand (10,) to (7,7,1)
self.embedder = keras.layers.Embedding(10, 100)
self.dense_2 = keras.layers.Dense(7*7*256)
# From (7,7,256) to (7,7,128)
self.convt_1 = keras.layers.Conv2DTranspose(
128, (5, 5), strides=1, padding='same', use_bias=False)
self.convt_bn_1 = keras.layers.BatchNormalization()
self.convt_relu_1 = keras.layers.LeakyReLU()
# From (7,7,128) to (14,14,64)
self.convt_2 = keras.layers.Conv2DTranspose(
64, (5, 5), strides=2, padding='same', use_bias=False)
self.convt_bn_2 = keras.layers.BatchNormalization()
self.convt_relu_2 = keras.layers.LeakyReLU()
# From (14,14,64) to (28,28,1)
self.convt_out = keras.layers.Conv2DTranspose(
1, (5, 5), strides=2, padding='same', use_bias=False)
def call(self, inputs):
feat_x = inputs[0]
label = inputs[2]
# Expand label input to be the same as latent feature
label_x = self.embedder(label)
label_x = self.dense_2(label_x)
label_x = tf.squeeze(label_x, 1)
# Expand features to image channels
feat_x = self.dense_1(feat_x)
# Combine latent feature and label input
x = tf.math.multiply(feat_x, label_x)
x = self.reshape_1(x)
# From (7,7,256) to (7,7,128)
x = self.convt_1(x)
x = self.convt_bn_1(x)
x = self.convt_relu_1(x)
# From (7,7,128) to (14,14,64)
x = self.convt_2(x)
x = self.convt_bn_2(x)
x = self.convt_relu_2(x)
# From (14,14,64) to (28,28,1)
x = self.convt_out(x)
return [x, None, label]
class CondDiscriminatorModel(keras.Model):
def __init__(self):
super(CondDiscriminatorModel, self).__init__()
self.embedder = keras.layers.Embedding(10, 100)
self.expand_layer = keras.layers.Dense(28*28*1)
self.reshape_layer = keras.layers.Reshape((28, 28, 1))
self.conv_1 = keras.layers.Conv2D(
64, (5, 5), strides=2, padding='same', input_shape=(28, 28, 1))
self.relu_1 = keras.layers.LeakyReLU()
self.drop_1 = keras.layers.Dropout(0.3)
self.conv_2 = keras.layers.Conv2D(
128, (5, 5), strides=2, padding='same')
self.relu_2 = keras.layers.LeakyReLU()
self.drop_2 = keras.layers.Dropout(0.3)
self.flatten = keras.layers.Flatten()
self.out = keras.layers.Dense(1)
def call(self, inputs):
images_x = inputs[0]
labels = inputs[2]
labels_x = self.embedder(labels)
labels_x = self.expand_layer(labels_x)
labels_x = self.reshape_layer(labels_x)
x = tf.math.multiply(images_x, labels_x)
x = self.conv_1(x)
x = self.relu_1(x)
x = self.drop_1(x)
x = self.conv_2(x)
x = self.relu_2(x)
x = self.drop_2(x)
x = self.flatten(x)
x = self.out(x)
return x
| [((250, 281), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(7 * 7 * 256)'], {}), '(7 * 7 * 256)\n', (268, 281), False, 'from tensorflow import keras\n'), ((303, 336), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['(7, 7, 256)'], {}), '((7, 7, 256))\n', (323, 336), False, 'from tensorflow import keras\n'), ((395, 426), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['(10)', '(100)'], {}), '(10, 100)\n', (417, 426), False, 'from tensorflow import keras\n'), ((450, 481), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(7 * 7 * 256)'], {}), '(7 * 7 * 256)\n', (468, 481), False, 'from tensorflow import keras\n'), ((539, 627), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(128)', '(5, 5)'], {'strides': '(1)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(128, (5, 5), strides=1, padding='same',\n use_bias=False)\n", (567, 627), False, 'from tensorflow import keras\n'), ((663, 696), 'tensorflow.keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (694, 696), False, 'from tensorflow import keras\n'), ((725, 749), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (747, 749), False, 'from tensorflow import keras\n'), ((812, 899), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(64)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(64, (5, 5), strides=2, padding='same',\n use_bias=False)\n", (840, 899), False, 'from tensorflow import keras\n'), ((935, 968), 'tensorflow.keras.layers.BatchNormalization', 'keras.layers.BatchNormalization', ([], {}), '()\n', (966, 968), False, 'from tensorflow import keras\n'), ((997, 1021), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (1019, 1021), False, 'from tensorflow import keras\n'), ((1086, 1173), 'tensorflow.keras.layers.Conv2DTranspose', 'keras.layers.Conv2DTranspose', (['(1)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'use_bias': '(False)'}), "(1, (5, 5), strides=2, padding='same', use_bias\n =False)\n", (1114, 1173), False, 'from tensorflow import keras\n'), ((1423, 1445), 'tensorflow.squeeze', 'tf.squeeze', (['label_x', '(1)'], {}), '(label_x, 1)\n', (1433, 1445), True, 'import tensorflow as tf\n'), ((1589, 1622), 'tensorflow.math.multiply', 'tf.math.multiply', (['feat_x', 'label_x'], {}), '(feat_x, label_x)\n', (1605, 1622), True, 'import tensorflow as tf\n'), ((2163, 2194), 'tensorflow.keras.layers.Embedding', 'keras.layers.Embedding', (['(10)', '(100)'], {}), '(10, 100)\n', (2185, 2194), False, 'from tensorflow import keras\n'), ((2223, 2254), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(28 * 28 * 1)'], {}), '(28 * 28 * 1)\n', (2241, 2254), False, 'from tensorflow import keras\n'), ((2280, 2313), 'tensorflow.keras.layers.Reshape', 'keras.layers.Reshape', (['(28, 28, 1)'], {}), '((28, 28, 1))\n', (2300, 2313), False, 'from tensorflow import keras\n'), ((2336, 2423), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(64)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""', 'input_shape': '(28, 28, 1)'}), "(64, (5, 5), strides=2, padding='same', input_shape=(28,\n 28, 1))\n", (2355, 2423), False, 'from tensorflow import keras\n'), ((2455, 2479), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (2477, 2479), False, 'from tensorflow import keras\n'), ((2502, 2527), 'tensorflow.keras.layers.Dropout', 'keras.layers.Dropout', (['(0.3)'], {}), '(0.3)\n', (2522, 2527), False, 'from tensorflow import keras\n'), ((2550, 2609), 'tensorflow.keras.layers.Conv2D', 'keras.layers.Conv2D', (['(128)', '(5, 5)'], {'strides': '(2)', 'padding': '"""same"""'}), "(128, (5, 5), strides=2, padding='same')\n", (2569, 2609), False, 'from tensorflow import keras\n'), ((2645, 2669), 'tensorflow.keras.layers.LeakyReLU', 'keras.layers.LeakyReLU', ([], {}), '()\n', (2667, 2669), False, 'from tensorflow import keras\n'), ((2692, 2717), 'tensorflow.keras.layers.Dropout', 'keras.layers.Dropout', (['(0.3)'], {}), '(0.3)\n', (2712, 2717), False, 'from tensorflow import keras\n'), ((2741, 2763), 'tensorflow.keras.layers.Flatten', 'keras.layers.Flatten', ([], {}), '()\n', (2761, 2763), False, 'from tensorflow import keras\n'), ((2783, 2804), 'tensorflow.keras.layers.Dense', 'keras.layers.Dense', (['(1)'], {}), '(1)\n', (2801, 2804), False, 'from tensorflow import keras\n'), ((3038, 3074), 'tensorflow.math.multiply', 'tf.math.multiply', (['images_x', 'labels_x'], {}), '(images_x, labels_x)\n', (3054, 3074), True, 'import tensorflow as tf\n')] |
miracum/ahd2fhir | ahd2fhir/utils/resource_handler.py | 0c1bf3e0d86278145f9f1fa5c99a121f8e961d5f | import base64
import datetime
import logging
import os
import time
from typing import List, Tuple
import structlog
import tenacity
from averbis import Pipeline
from fhir.resources.bundle import Bundle
from fhir.resources.codeableconcept import CodeableConcept
from fhir.resources.composition import Composition, CompositionSection
from fhir.resources.documentreference import DocumentReference
from fhir.resources.fhirtypes import DateTime
from fhir.resources.identifier import Identifier
from fhir.resources.reference import Reference
from fhir.resources.resource import Resource
from prometheus_client import Counter, Histogram, Summary
from tenacity.after import after_log
from ahd2fhir.mappers import ahd_to_condition, ahd_to_medication_statement
from ahd2fhir.utils.bundle_builder import BundleBuilder
from ahd2fhir.utils.custom_mappers import custom_mappers, mapper_functions
from ahd2fhir.utils.device_builder import build_device
from ahd2fhir.utils.fhir_utils import sha256_of_identifier
MAPPING_FAILURES_COUNTER = Counter("mapping_failures", "Exceptions during mapping")
MAPPING_DURATION_SUMMARY = Histogram(
"map_duration_seconds",
"Time spent mapping",
buckets=(
0.05,
0.1,
0.5,
1.0,
2.0,
3.0,
5.0,
8.0,
13.0,
21.0,
34.0,
55.0,
"inf",
),
)
EXTRACTED_RESOURCES_COUNT_SUMMARY = Summary(
"extracted_resources", "Number of extracted resources for each processed document"
)
DOCUMENT_LENGTH_SUMMARY = Summary(
"document_length",
"Length of each processed document's text in charactes",
)
DISCHARGE_SUMMARY_CONCEPT_TEXT = (
"Clinical document Kind of document from LOINC Document Ontology"
)
DISCHARGE_SUMMARY_CONCEPT = CodeableConcept(
**{
"coding": [
{
"system": "http://loinc.org",
"code": "74477-1",
"display": DISCHARGE_SUMMARY_CONCEPT_TEXT,
},
],
"text": DISCHARGE_SUMMARY_CONCEPT_TEXT,
}
)
AHD_TYPE_DOCUMENT_ANNOTATION = "de.averbis.types.health.DocumentAnnotation"
AHD_TYPE_MEDICATION = "de.averbis.types.health.Medication"
AHD_TYPE_DIAGNOSIS = "de.averbis.types.health.Diagnosis"
log = structlog.get_logger()
class TransientError(Exception):
pass
class ResourceHandler:
def __init__(self, averbis_pipeline: Pipeline):
self.pipeline = averbis_pipeline
self.bundle_builder = BundleBuilder()
@MAPPING_FAILURES_COUNTER.count_exceptions()
@MAPPING_DURATION_SUMMARY.time()
def handle_documents(self, document_references: List[DocumentReference]) -> Bundle:
"""
Process a list of DocumentReferences
"""
all_resources = []
bundle_id = None
for document_reference in document_references:
resources_from_document = self._process_documentreference(
document_reference
)
composition = self._build_composition(
document_reference, resources_from_document
)
bundle_id = composition.id
all_resources.extend(resources_from_document)
all_resources.append(composition)
EXTRACTED_RESOURCES_COUNT_SUMMARY.observe(len(all_resources))
result_bundle = self.bundle_builder.build_from_resources(
all_resources, bundle_id
)
return result_bundle
def handle_bundle(self, bundle: Bundle):
"""
Process all FHIR DocumentReference resources from a given bundle
"""
document_references = []
for entry in bundle.entry:
if entry.resource.resource_type == "DocumentReference":
document_references.append(entry.resource)
return self.handle_documents(document_references)
def _build_composition(
self, document_reference: DocumentReference, all_resources: List[Resource]
):
composition_type = (
document_reference.type
if document_reference.type is not None
else DISCHARGE_SUMMARY_CONCEPT
)
composition_subject = document_reference.subject
composition_category = document_reference.category
composition_encounter = None
if document_reference.context is not None:
if len(document_reference.context.encounter) > 1:
log.warning(
"DocumentReference contains more than one encounter. "
+ "Using the first."
)
composition_encounter = document_reference.context.encounter[0]
composition_author = None
composition_sections = []
for resource in all_resources:
resource_type = resource.resource_type
if resource_type == "Device":
author = Reference.construct()
author.reference = f"Device/{resource.id}"
author.type = "Device"
composition_author = author
continue
# Check if no resource specific section exists ands adds it,
# otherwise select the correct section
if not any(
section.title == resource_type for section in composition_sections
):
resource_section = CompositionSection.construct()
resource_section.title = resource_type
resource_section.entry = []
composition_sections.append(resource_section)
ind = len(composition_sections) - 1
else:
ind = [
ind
for ind, section in enumerate(composition_sections)
if section.title == resource_type
][0]
entry_reference = Reference.construct()
entry_reference.reference = resource_type + "/" + resource.id
composition_sections[ind].entry.append(entry_reference)
if composition_author is None:
composition_author = Reference(**{"display": "Averbis Health Discovery"})
composition_identifier = (
self._build_composition_identifier_from_documentreference(
document_reference
)
)
composition = Composition(
**{
"title": "NLP FHIR Results " + time.strftime("%Y-%m-%dT%H:%M"),
"status": "final",
"date": DateTime.validate(datetime.datetime.now(datetime.timezone.utc)),
"type": composition_type,
"identifier": composition_identifier,
"id": sha256_of_identifier(composition_identifier),
"subject": composition_subject,
"category": composition_category,
"encounter": composition_encounter,
"author": [composition_author],
"section": composition_sections,
}
)
return composition
def _process_documentreference(self, document_reference: DocumentReference):
log = structlog.get_logger().bind(
document_id=f"{document_reference.get_resource_type()}/"
+ f"{document_reference.id}"
)
# Text extraction and text analysis
(text, content_type, lang) = self._extract_text_from_resource(
document_reference
)
DOCUMENT_LENGTH_SUMMARY.observe(len(text))
averbis_result = None
try:
averbis_result = self._perform_text_analysis(
text=text, mime_type=content_type, lang=lang
)
except Exception as exc:
log.exception(exc)
log.error("Failed to perform text analysis", error=exc)
raise TransientError(exc)
total_results = []
# Building FHIR resources as results
medication_statement_lists = []
for val in averbis_result:
if val["type"] == AHD_TYPE_DIAGNOSIS:
mapped_condition = ahd_to_condition.get_fhir_condition(
val, document_reference
)
if mapped_condition is not None:
total_results.append(mapped_condition)
if val["type"] == AHD_TYPE_DOCUMENT_ANNOTATION:
device = build_device(val)
if device is not None:
total_results.append(device)
if val["type"] == AHD_TYPE_MEDICATION:
statement = ahd_to_medication_statement.get_fhir_medication_statement(
val, document_reference
)
if statement is not None:
medication_statement_lists.append(statement)
# if custom_mappers_enabled
if os.getenv("CUSTOM_MAPPERS_ENABLED", "False").lower() in ["true", "1"]:
total_results.extend(custom_mappers(val, document_reference))
medication_results = []
medication_statement_results = []
for medication_statement_list in medication_statement_lists:
for medication_statement_dict in medication_statement_list:
medication_results.append(medication_statement_dict["medication"])
medication_statement_results.append(
medication_statement_dict["statement"]
)
# de-duplicate any Medication and MedicationStatement resources
medication_resources_unique = {m.id: m for m in medication_results}.values()
medication_statements_unique = {
m.id: m for m in medication_statement_results
}.values()
total_results.extend(medication_resources_unique)
total_results.extend(medication_statements_unique)
return total_results
def _extract_text_from_resource(
self,
document_reference: DocumentReference,
) -> Tuple[str, str]:
valid_content = [
content
for content in document_reference.content
if content.attachment.data is not None
]
if len(valid_content) == 0:
raise ValueError(
f"Document {document_reference.id} contains no valid content"
)
if len(valid_content) > 1:
raise ValueError(
f"Document {document_reference.id} contains more than one attachment"
)
content = valid_content[0]
language = None
if content.attachment.language:
language = content.attachment.language.lower().split("-")[0]
return (
base64.b64decode(content.attachment.data).decode("utf8"),
content.attachment.contentType,
language,
)
@tenacity.retry(
stop=tenacity.stop.stop_after_attempt(10),
wait=tenacity.wait.wait_fixed(5)
+ tenacity.wait.wait_random_exponential(multiplier=1, max=30),
after=after_log(logging.getLogger(), logging.WARNING),
reraise=True,
)
def _perform_text_analysis(
self, text: str, mime_type: str = "text/plain", lang: str = None
):
types = ",".join(
[
AHD_TYPE_DIAGNOSIS,
AHD_TYPE_MEDICATION,
AHD_TYPE_DOCUMENT_ANNOTATION,
*mapper_functions.keys(),
]
)
analyse_args = {"language": lang, "annotation_types": types}
try:
if mime_type == "text/html":
return self.pipeline.analyse_html(text, **analyse_args)
else:
return self.pipeline.analyse_text(text, **analyse_args)
except Exception as exc:
log.exception(exc)
log.error("Text analysis failed")
raise exc
def _build_composition_identifier_from_documentreference(
self,
doc_ref: DocumentReference,
):
"""
construct a hopefully unqiue identifier for the condition from
the document identifier as well as the offset into the text
and the unique id of the annotation
"""
doc_ref_identifier = None
if doc_ref.identifier is None or len(doc_ref.identifier) == 0:
log.warning(
"No identifier specified on the document. "
+ "Trying to fall-back to the DocumentReference.id"
)
doc_ref_identifier = doc_ref.id
else:
if len(doc_ref.identifier) > 1:
log.warning(
"More than one identifier specified on the document. "
+ "Using the first occurrence."
)
doc_ref_identifier = doc_ref.identifier[0].value
composition_identifier_system = (
"https://fhir.miracum.org/nlp/identifiers/ahd-analysis-result-composition"
)
composition_identifier_value = f"{doc_ref_identifier}_ahd-analysis-result"
return Identifier(
**{
"system": composition_identifier_system,
"value": composition_identifier_value,
}
)
| [((1026, 1082), 'prometheus_client.Counter', 'Counter', (['"""mapping_failures"""', '"""Exceptions during mapping"""'], {}), "('mapping_failures', 'Exceptions during mapping')\n", (1033, 1082), False, 'from prometheus_client import Counter, Histogram, Summary\n'), ((1110, 1251), 'prometheus_client.Histogram', 'Histogram', (['"""map_duration_seconds"""', '"""Time spent mapping"""'], {'buckets': "(0.05, 0.1, 0.5, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0, 'inf')"}), "('map_duration_seconds', 'Time spent mapping', buckets=(0.05, 0.1,\n 0.5, 1.0, 2.0, 3.0, 5.0, 8.0, 13.0, 21.0, 34.0, 55.0, 'inf'))\n", (1119, 1251), False, 'from prometheus_client import Counter, Histogram, Summary\n'), ((1410, 1505), 'prometheus_client.Summary', 'Summary', (['"""extracted_resources"""', '"""Number of extracted resources for each processed document"""'], {}), "('extracted_resources',\n 'Number of extracted resources for each processed document')\n", (1417, 1505), False, 'from prometheus_client import Counter, Histogram, Summary\n'), ((1534, 1621), 'prometheus_client.Summary', 'Summary', (['"""document_length"""', '"""Length of each processed document\'s text in charactes"""'], {}), '(\'document_length\',\n "Length of each processed document\'s text in charactes")\n', (1541, 1621), False, 'from prometheus_client import Counter, Histogram, Summary\n'), ((1765, 1938), 'fhir.resources.codeableconcept.CodeableConcept', 'CodeableConcept', ([], {}), "(**{'coding': [{'system': 'http://loinc.org', 'code':\n '74477-1', 'display': DISCHARGE_SUMMARY_CONCEPT_TEXT}], 'text':\n DISCHARGE_SUMMARY_CONCEPT_TEXT})\n", (1780, 1938), False, 'from fhir.resources.codeableconcept import CodeableConcept\n'), ((2246, 2268), 'structlog.get_logger', 'structlog.get_logger', ([], {}), '()\n', (2266, 2268), False, 'import structlog\n'), ((2461, 2476), 'ahd2fhir.utils.bundle_builder.BundleBuilder', 'BundleBuilder', ([], {}), '()\n', (2474, 2476), False, 'from ahd2fhir.utils.bundle_builder import BundleBuilder\n'), ((12951, 13049), 'fhir.resources.identifier.Identifier', 'Identifier', ([], {}), "(**{'system': composition_identifier_system, 'value':\n composition_identifier_value})\n", (12961, 13049), False, 'from fhir.resources.identifier import Identifier\n'), ((5817, 5838), 'fhir.resources.reference.Reference.construct', 'Reference.construct', ([], {}), '()\n', (5836, 5838), False, 'from fhir.resources.reference import Reference\n'), ((6055, 6107), 'fhir.resources.reference.Reference', 'Reference', ([], {}), "(**{'display': 'Averbis Health Discovery'})\n", (6064, 6107), False, 'from fhir.resources.reference import Reference\n'), ((10778, 10814), 'tenacity.stop.stop_after_attempt', 'tenacity.stop.stop_after_attempt', (['(10)'], {}), '(10)\n', (10810, 10814), False, 'import tenacity\n'), ((4856, 4877), 'fhir.resources.reference.Reference.construct', 'Reference.construct', ([], {}), '()\n', (4875, 4877), False, 'from fhir.resources.reference import Reference\n'), ((5327, 5357), 'fhir.resources.composition.CompositionSection.construct', 'CompositionSection.construct', ([], {}), '()\n', (5355, 5357), False, 'from fhir.resources.composition import Composition, CompositionSection\n'), ((7088, 7110), 'structlog.get_logger', 'structlog.get_logger', ([], {}), '()\n', (7108, 7110), False, 'import structlog\n'), ((8029, 8089), 'ahd2fhir.mappers.ahd_to_condition.get_fhir_condition', 'ahd_to_condition.get_fhir_condition', (['val', 'document_reference'], {}), '(val, document_reference)\n', (8064, 8089), False, 'from ahd2fhir.mappers import ahd_to_condition, ahd_to_medication_statement\n'), ((8322, 8339), 'ahd2fhir.utils.device_builder.build_device', 'build_device', (['val'], {}), '(val)\n', (8334, 8339), False, 'from ahd2fhir.utils.device_builder import build_device\n'), ((8508, 8594), 'ahd2fhir.mappers.ahd_to_medication_statement.get_fhir_medication_statement', 'ahd_to_medication_statement.get_fhir_medication_statement', (['val', 'document_reference'], {}), '(val,\n document_reference)\n', (8565, 8594), False, 'from ahd2fhir.mappers import ahd_to_condition, ahd_to_medication_statement\n'), ((10829, 10856), 'tenacity.wait.wait_fixed', 'tenacity.wait.wait_fixed', (['(5)'], {}), '(5)\n', (10853, 10856), False, 'import tenacity\n'), ((10867, 10926), 'tenacity.wait.wait_random_exponential', 'tenacity.wait.wait_random_exponential', ([], {'multiplier': '(1)', 'max': '(30)'}), '(multiplier=1, max=30)\n', (10904, 10926), False, 'import tenacity\n'), ((10952, 10971), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (10969, 10971), False, 'import logging\n'), ((6648, 6692), 'ahd2fhir.utils.fhir_utils.sha256_of_identifier', 'sha256_of_identifier', (['composition_identifier'], {}), '(composition_identifier)\n', (6668, 6692), False, 'from ahd2fhir.utils.fhir_utils import sha256_of_identifier\n'), ((8900, 8939), 'ahd2fhir.utils.custom_mappers.custom_mappers', 'custom_mappers', (['val', 'document_reference'], {}), '(val, document_reference)\n', (8914, 8939), False, 'from ahd2fhir.utils.custom_mappers import custom_mappers, mapper_functions\n'), ((10609, 10650), 'base64.b64decode', 'base64.b64decode', (['content.attachment.data'], {}), '(content.attachment.data)\n', (10625, 10650), False, 'import base64\n'), ((11307, 11330), 'ahd2fhir.utils.custom_mappers.mapper_functions.keys', 'mapper_functions.keys', ([], {}), '()\n', (11328, 11330), False, 'from ahd2fhir.utils.custom_mappers import custom_mappers, mapper_functions\n'), ((6373, 6404), 'time.strftime', 'time.strftime', (['"""%Y-%m-%dT%H:%M"""'], {}), "('%Y-%m-%dT%H:%M')\n", (6386, 6404), False, 'import time\n'), ((6483, 6527), 'datetime.datetime.now', 'datetime.datetime.now', (['datetime.timezone.utc'], {}), '(datetime.timezone.utc)\n', (6504, 6527), False, 'import datetime\n'), ((8792, 8836), 'os.getenv', 'os.getenv', (['"""CUSTOM_MAPPERS_ENABLED"""', '"""False"""'], {}), "('CUSTOM_MAPPERS_ENABLED', 'False')\n", (8801, 8836), False, 'import os\n')] |
Infinityloopsistemas/SIVA | maestros/lookups.py | 92b6c82f018d39ef405989639974d1f2757476ed | # -*- coding: utf-8 -*-
from selectable.decorators import login_required
from maestros.models import TiposMedidasActuacion, TiposLimitesCriticos, TiposMedidasVigilancia, TiposTemperaturas, TiposFrecuencias, Zonas, Terceros, CatalogoEquipos, Personal, Consumibles, ParametrosAnalisis, Actividades, Etapas, Peligros, TiposCursos, TiposLegislacion, Unidades, Firmas, HorarioTurnos
from selectable.base import ModelLookup
from selectable.registry import registry
from maestros_generales.models import Empresas
from siva import settings
__author__ = 'julian'
@login_required
class TPActuacionPrevLookup(ModelLookup):
model = TiposMedidasActuacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPActuacionPrevLookup, self).get_query(request, term)
results = results.filter(tipo="P",empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPActuacionPrevLookup)
@login_required
class TPActuacionCorrLookup(ModelLookup):
model = TiposMedidasActuacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPActuacionCorrLookup, self).get_query(request, term)
results = results.filter(tipo="C",empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPActuacionCorrLookup)
@login_required
class TPLimitesCritLookup(ModelLookup):
model = TiposLimitesCriticos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TPLimitesCritLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TPLimitesCritLookup)
@login_required
class ActividadesLookup(ModelLookup):
model = Actividades
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ActividadesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ActividadesLookup)
@login_required
class TipoMedidasVigilanciaLookup(ModelLookup):
model = TiposMedidasVigilancia
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TipoMedidasVigilanciaLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TipoMedidasVigilanciaLookup)
@login_required
class TiposTemperaturasLookup(ModelLookup):
model = TiposTemperaturas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposTemperaturasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposTemperaturasLookup)
@login_required
class TiposFrecuenciasLookup(ModelLookup):
model = TiposFrecuencias
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposFrecuenciasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposFrecuenciasLookup)
@login_required
class ZonasLookup(ModelLookup):
model = Zonas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ZonasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ZonasLookup)
@login_required
class TercerosLookup(ModelLookup):
model = Terceros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TercerosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TercerosLookup)
@login_required
class TercerosTiposLookup(ModelLookup):
model = Terceros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TercerosTiposLookup, self).get_query(request, term)
results = results.filter(tipotercero__descripcion=settings.ASESORSANITARIO, empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TercerosTiposLookup)
@login_required
class CatalogoEquiposLookup(ModelLookup):
model = CatalogoEquipos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(CatalogoEquiposLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(CatalogoEquiposLookup)
@login_required
class PersonalLookup(ModelLookup):
model = Personal
search_fields = ('apellidos__icontains',)
def get_query(self, request, term):
results = super(PersonalLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.apellidos
def get_item_label(self, item):
return "%s %s" % (item.apellidos, item.nombres)
registry.register(PersonalLookup)
@login_required
class TiposCursosLookup(ModelLookup):
model = TiposCursos
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposCursosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposCursosLookup)
@login_required
class TiposLegislacionLookup(ModelLookup):
model = TiposLegislacion
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(TiposLegislacionLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(TiposLegislacionLookup)
@login_required
class ConsumiblesLookup(ModelLookup):
model = Consumibles
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ConsumiblesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ConsumiblesLookup)
@login_required
class ParametrosAnalisisLookup(ModelLookup):
model = ParametrosAnalisis
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(ParametrosAnalisisLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(ParametrosAnalisisLookup)
@login_required
class EtapasLookup(ModelLookup):
model = Etapas
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(EtapasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(EtapasLookup)
@login_required
class PeligrosLookup(ModelLookup):
model = Peligros
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(PeligrosLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(PeligrosLookup)
@login_required
class UnidadesLookup(ModelLookup):
model = Unidades
search_fields = ('denominacion__icontains',)
def get_query(self, request, term):
results = super(UnidadesLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.denominacion
def get_item_label(self, item):
return "%s" % (item.denominacion)
registry.register(UnidadesLookup)
@login_required
class FirmasLookup(ModelLookup):
model = Firmas
search_fields = ('personal__apellidos__icontains',)
def get_query(self, request, term):
results = super(FirmasLookup, self).get_query(request, term)
results = results.filter(empresa__in=Empresas.objects.filter(usuario__username=request.user))
return results
def get_item_value(self, item):
return item.personal.apellidos
def get_item_label(self, item):
return "%s %s" % (item.personal__apellidos, item.personal__nombres)
registry.register(FirmasLookup)
@login_required
class HorarioTurnoLookup(ModelLookup):
model = HorarioTurnos
search_fields = ('ihora__icontains','fhora__icontains')
def get_query(self, request, term):
results = super(HorarioTurnoLookup, self).get_query(request, term)
idtpturno = request.GET.get('idtpturno', '')
if idtpturno:
results = results.filter(tpturnos_id=idtpturno)
return results
def get_item_value(self, item):
return "%s - %s" % (item.ihora, item.fhora)
def get_item_label(self, item):
return "%s - %s" % (item.ihora, item.fhora)
registry.register(HorarioTurnoLookup)
| [((1100, 1140), 'selectable.registry.registry.register', 'registry.register', (['TPActuacionPrevLookup'], {}), '(TPActuacionPrevLookup)\n', (1117, 1140), False, 'from selectable.registry import registry\n'), ((1686, 1726), 'selectable.registry.registry.register', 'registry.register', (['TPActuacionCorrLookup'], {}), '(TPActuacionCorrLookup)\n', (1703, 1726), False, 'from selectable.registry import registry\n'), ((2258, 2296), 'selectable.registry.registry.register', 'registry.register', (['TPLimitesCritLookup'], {}), '(TPLimitesCritLookup)\n', (2275, 2296), False, 'from selectable.registry import registry\n'), ((2816, 2852), 'selectable.registry.registry.register', 'registry.register', (['ActividadesLookup'], {}), '(ActividadesLookup)\n', (2833, 2852), False, 'from selectable.registry import registry\n'), ((3402, 3448), 'selectable.registry.registry.register', 'registry.register', (['TipoMedidasVigilanciaLookup'], {}), '(TipoMedidasVigilanciaLookup)\n', (3419, 3448), False, 'from selectable.registry import registry\n'), ((3985, 4027), 'selectable.registry.registry.register', 'registry.register', (['TiposTemperaturasLookup'], {}), '(TiposTemperaturasLookup)\n', (4002, 4027), False, 'from selectable.registry import registry\n'), ((4561, 4602), 'selectable.registry.registry.register', 'registry.register', (['TiposFrecuenciasLookup'], {}), '(TiposFrecuenciasLookup)\n', (4578, 4602), False, 'from selectable.registry import registry\n'), ((5103, 5133), 'selectable.registry.registry.register', 'registry.register', (['ZonasLookup'], {}), '(ZonasLookup)\n', (5120, 5133), False, 'from selectable.registry import registry\n'), ((5643, 5676), 'selectable.registry.registry.register', 'registry.register', (['TercerosLookup'], {}), '(TercerosLookup)\n', (5660, 5676), False, 'from selectable.registry import registry\n'), ((6247, 6285), 'selectable.registry.registry.register', 'registry.register', (['TercerosTiposLookup'], {}), '(TercerosTiposLookup)\n', (6264, 6285), False, 'from selectable.registry import registry\n'), ((6817, 6857), 'selectable.registry.registry.register', 'registry.register', (['CatalogoEquiposLookup'], {}), '(CatalogoEquiposLookup)\n', (6834, 6857), False, 'from selectable.registry import registry\n'), ((7375, 7408), 'selectable.registry.registry.register', 'registry.register', (['PersonalLookup'], {}), '(PersonalLookup)\n', (7392, 7408), False, 'from selectable.registry import registry\n'), ((7927, 7963), 'selectable.registry.registry.register', 'registry.register', (['TiposCursosLookup'], {}), '(TiposCursosLookup)\n', (7944, 7963), False, 'from selectable.registry import registry\n'), ((8498, 8539), 'selectable.registry.registry.register', 'registry.register', (['TiposLegislacionLookup'], {}), '(TiposLegislacionLookup)\n', (8515, 8539), False, 'from selectable.registry import registry\n'), ((9059, 9095), 'selectable.registry.registry.register', 'registry.register', (['ConsumiblesLookup'], {}), '(ConsumiblesLookup)\n', (9076, 9095), False, 'from selectable.registry import registry\n'), ((9636, 9679), 'selectable.registry.registry.register', 'registry.register', (['ParametrosAnalisisLookup'], {}), '(ParametrosAnalisisLookup)\n', (9653, 9679), False, 'from selectable.registry import registry\n'), ((10183, 10214), 'selectable.registry.registry.register', 'registry.register', (['EtapasLookup'], {}), '(EtapasLookup)\n', (10200, 10214), False, 'from selectable.registry import registry\n'), ((10724, 10757), 'selectable.registry.registry.register', 'registry.register', (['PeligrosLookup'], {}), '(PeligrosLookup)\n', (10741, 10757), False, 'from selectable.registry import registry\n'), ((11268, 11301), 'selectable.registry.registry.register', 'registry.register', (['UnidadesLookup'], {}), '(UnidadesLookup)\n', (11285, 11301), False, 'from selectable.registry import registry\n'), ((11853, 11884), 'selectable.registry.registry.register', 'registry.register', (['FirmasLookup'], {}), '(FirmasLookup)\n', (11870, 11884), False, 'from selectable.registry import registry\n'), ((12482, 12519), 'selectable.registry.registry.register', 'registry.register', (['HorarioTurnoLookup'], {}), '(HorarioTurnoLookup)\n', (12499, 12519), False, 'from selectable.registry import registry\n'), ((870, 925), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (893, 925), False, 'from maestros_generales.models import Empresas\n'), ((1456, 1511), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (1479, 1511), False, 'from maestros_generales.models import Empresas\n'), ((2028, 2083), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (2051, 2083), False, 'from maestros_generales.models import Empresas\n'), ((2586, 2641), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (2609, 2641), False, 'from maestros_generales.models import Empresas\n'), ((3172, 3227), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (3195, 3227), False, 'from maestros_generales.models import Empresas\n'), ((3755, 3810), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (3778, 3810), False, 'from maestros_generales.models import Empresas\n'), ((4331, 4386), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (4354, 4386), False, 'from maestros_generales.models import Empresas\n'), ((4873, 4928), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (4896, 4928), False, 'from maestros_generales.models import Empresas\n'), ((5413, 5468), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (5436, 5468), False, 'from maestros_generales.models import Empresas\n'), ((6017, 6072), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (6040, 6072), False, 'from maestros_generales.models import Empresas\n'), ((6587, 6642), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (6610, 6642), False, 'from maestros_generales.models import Empresas\n'), ((7134, 7189), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (7157, 7189), False, 'from maestros_generales.models import Empresas\n'), ((7697, 7752), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (7720, 7752), False, 'from maestros_generales.models import Empresas\n'), ((8268, 8323), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (8291, 8323), False, 'from maestros_generales.models import Empresas\n'), ((8829, 8884), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (8852, 8884), False, 'from maestros_generales.models import Empresas\n'), ((9406, 9461), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (9429, 9461), False, 'from maestros_generales.models import Empresas\n'), ((9953, 10008), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (9976, 10008), False, 'from maestros_generales.models import Empresas\n'), ((10494, 10549), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (10517, 10549), False, 'from maestros_generales.models import Empresas\n'), ((11038, 11093), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (11061, 11093), False, 'from maestros_generales.models import Empresas\n'), ((11583, 11638), 'maestros_generales.models.Empresas.objects.filter', 'Empresas.objects.filter', ([], {'usuario__username': 'request.user'}), '(usuario__username=request.user)\n', (11606, 11638), False, 'from maestros_generales.models import Empresas\n')] |
dew-uff/julynter | julynter/oldcmd.py | f4657aba4fa3e17af2cd241f0c3170b76df7c57c | """Define commands for Python 2.7"""
import argparse
import traceback
from . import util
from .cmd import run
from .cmd import extractpipenv
def main():
"""Main function"""
print("This version is not supported! It has limitted analysis features")
parser = argparse.ArgumentParser(description='Analyze Jupyter Notebooks')
subparsers = parser.add_subparsers()
run.create_subparsers(subparsers)
extractpipenv.create_subparsers(subparsers)
args, rest = parser.parse_known_args()
try:
if not getattr(args, 'func', None):
parser.print_help()
else:
args.func(args, rest)
if not util.EXITED:
util.do_exit(0)
except: # pylint: disable=bare-except
if not util.EXITED:
traceback.print_exc()
util.do_exit(1)
| [((270, 334), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {'description': '"""Analyze Jupyter Notebooks"""'}), "(description='Analyze Jupyter Notebooks')\n", (293, 334), False, 'import argparse\n'), ((778, 799), 'traceback.print_exc', 'traceback.print_exc', ([], {}), '()\n', (797, 799), False, 'import traceback\n')] |
nurikk/gpdb | gpMgmt/bin/gppylib/test/unit/test_unit_gpcrondump.py | 04fe0202c59721826d1eda2b19d73e5572893fcb | #!/usr/bin/env python
import os
import imp
gpcrondump_path = os.path.abspath('gpcrondump')
gpcrondump = imp.load_source('gpcrondump', gpcrondump_path)
import unittest2 as unittest
from datetime import datetime
from gppylib import gplog
from gpcrondump import GpCronDump
from gppylib.operations.utils import DEFAULT_NUM_WORKERS
from mock import patch, Mock
from gppylib.operations.dump import MailDumpEvent
from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file
import mock
logger = gplog.get_unittest_logger()
class GpCronDumpTestCase(unittest.TestCase):
class Options:
def __init__(self):
self.masterDataDirectory = ""
self.interactive = False
self.clear_dumps_only = False
self.post_script = None
self.dump_config = False
self.history = False
self.pre_vacuum = False
self.post_vacuum = False
self.rollback = False
self.compress = True
self.free_space_percent = None
self.clear_dumps = False
self.cleanup_date = None
self.cleanup_total = None
self.dump_schema = False
self.dump_databases = ['testdb']
self.bypass_disk_check = True
self.backup_set = None
self.dump_global = False
self.clear_catalog_dumps = False
self.batch_default = DEFAULT_NUM_WORKERS
self.include_dump_tables = None
self.exclude_dump_tables = None
self.include_dump_tables_file = None
self.exclude_dump_tables_file = None
self.backup_dir = None
self.encoding = None
self.output_options = None
self.report_dir = None
self.timestamp_key = None
self.list_backup_files = None
self.quiet = False
self.verbose = False
self.local_dump_prefix = ''
self.list_filter_tables = None
self.include_email_file = None
self.email_details = None
self.include_schema_file = None
self.exclude_schema_file = None
self.exclude_dump_schema = None
self.dump_stats = None
## Enterprise init
self.incremental = False
self.ddboost = False
self.ddboost_hosts = None
self.ddboost_user = None
self.ddboost_config_remove = False
self.ddboost_verify = False
self.ddboost_remote = None
self.ddboost_ping = None
self.ddboost_backupdir = None
self.replicate = None
self.max_streams = None
self.netbackup_service_host = None
self.netbackup_policy = None
self.netbackup_schedule = None
self.netbackup_block_size = None
self.netbackup_keyword = None
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.GpCronDump.validate_dump_schema')
@patch('gpcrondump.validate_current_timestamp')
def test_option_schema_filter_1(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.include_schema_file = '/tmp/foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, '--schema-file option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.GpCronDump.validate_dump_schema')
@patch('gpcrondump.validate_current_timestamp')
def test_option_schema_filter_2(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = '/tmp/foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, '--exclude-schema-file option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_3(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, '-S option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_4(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, '-s option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_5(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.exclude_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-s can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_6(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.include_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-s can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_7(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.exclude_dump_schema = 'foo'
with self.assertRaisesRegexp(Exception, '-s can not be selected with -S option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_8(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.exclude_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-S can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_9(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.include_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-S can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_10(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = 'foo'
options.include_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--exclude-schema-file can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_11(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = 'foo'
options.include_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_12(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = 'foo'
options.exclude_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_13(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_schema_file = 'foo'
options.exclude_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_14(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_schema_file = 'foo'
options.include_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_15(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.include_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with -s option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_16(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.exclude_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with -s option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_17(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.include_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with -S option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_18(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.exclude_dump_tables_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '--table-file and --exclude-table-file can not be selected with -S option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_19(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = 'foo'
options.exclude_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_20(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = 'foo'
options.include_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with --exclude-schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_21(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_schema_file = 'foo'
options.exclude_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_22(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_schema_file = 'foo'
options.include_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with --schema-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_23(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.exclude_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with -s option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_24(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.include_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with -s option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_25(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.exclude_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with -S option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_26(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'foo'
options.include_dump_tables = '/tmp/foo'
with self.assertRaisesRegexp(Exception, '-t and -T can not be selected with -S option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_27(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = ['information_schema']
with self.assertRaisesRegexp(Exception, "can not specify catalog schema 'information_schema' using -s option"):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_28(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = ['information_schema']
with self.assertRaisesRegexp(Exception, "can not specify catalog schema 'information_schema' using -S option"):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_lines_from_file', return_value=['public', 'information_schema'])
def test_options_schema_filter_29(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, "can not exclude catalog schema 'information_schema' in schema file '/tmp/foo'"):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_lines_from_file', return_value=['public', 'information_schema'])
def test_options_schema_filter_30(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.include_schema_file = '/tmp/foo'
with self.assertRaisesRegexp(Exception, "can not include catalog schema 'information_schema' in schema file '/tmp/foo'"):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_31(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
gpcd = GpCronDump(options, None)
dbname = 'foo'
timestamp = '20141016010101'
file = gpcd.get_schema_list_file(dbname)
self.assertEquals(file, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_32(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = ['public']
gpcd = GpCronDump(options, None)
dbname = 'foo'
timestamp = '20141016010101'
file = gpcd.get_schema_list_file(dbname)
self.assertTrue(file.startswith('/tmp/schema_list'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_schema_filter_33(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.include_schema_file = '/tmp/foo'
write_lines_to_file('/tmp/foo', ['public'])
gpcd = GpCronDump(options, None)
dbname = 'foo'
timestamp = '20141016010101'
file = gpcd.get_schema_list_file(dbname)
self.assertTrue(file.startswith('/tmp/schema_list'))
if os.path.exists('/tmp/foo'):
os.remove('/tmp/foo')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_include_schema_list_from_exclude_schema', return_value=['public'])
def test_options_schema_filter_34(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.exclude_schema_file = '/tmp/foo'
write_lines_to_file('/tmp/foo', ['public'])
gpcd = GpCronDump(options, None)
dbname = 'foo'
timestamp = '20141016010101'
file = gpcd.get_schema_list_file(dbname)
self.assertTrue(file.startswith('/tmp/schema_list'))
if os.path.exists('/tmp/foo'):
os.remove('/tmp/foo')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_include_schema_list_from_exclude_schema', return_value=['public'])
def test_options_schema_filter_35(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.exclude_dump_schema = 'public'
gpcd = GpCronDump(options, None)
dbname = 'foo'
timestamp = '20141016010101'
file = gpcd.get_schema_list_file(dbname)
self.assertTrue(file.startswith('/tmp/schema_list'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_lines_from_file', return_value=['public'])
@patch('gpcrondump.get_user_table_list_for_schema', return_value=['public', 'table1', 'public', 'table2'])
def test_options_schema_filter_36(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
gpcd = GpCronDump(options, None)
dbname = 'foo'
schema_file = '/tmp/foo'
inc = gpcd.generate_include_table_list_from_schema_file(dbname, schema_file)
self.assertTrue(inc.startswith('/tmp/include_dump_tables_file'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options1(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, 'include table list can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options2(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_tables = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, 'exclude table list can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options3(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables_file = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, 'include table file can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options4(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_tables_file = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, 'exclude table file can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options10(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.local_dump_prefix = 'foo'
options.incremental = False
options.list_filter_tables = True
try:
with self.assertRaisesRegexp(Exception, 'list filter tables option requires --prefix and --incremental'):
cron = GpCronDump(options, None)
finally:
options.list_filter_tables = False
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_latest_full_dump_timestamp', return_value='20121225090000')
def test_options11(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.incremental = True
cron = GpCronDump(options, None)
self.assertEquals(cron.full_dump_timestamp, '20121225090000')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options12(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.incremental = True
options.dump_databases = 'bkdb,fulldb'
with self.assertRaisesRegexp(Exception, 'multi-database backup is not supported with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.get_latest_full_dump_timestamp', return_value='20120330090000')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.GpCronDump._get_master_port')
def test_options13(self, mock, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.incremental = True
options.dump_databases = ['bkdb']
#If this is successful then it should not raise an exception
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options14(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = 'bkdb'
options.incremental = False
#If this is successful then it should not raise an exception
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options15(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = 'bkdb,fulldb'
options.incremental = False
#If this is successful then it should not raise an exception
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options16(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.backup_dir = '/foo1'
gpcd = GpCronDump(options, None)
self.assertEquals(gpcd.getBackupDirectoryRoot(), '/foo1')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options17(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.backup_dir = None
gpcd = GpCronDump(options, None)
self.assertEquals(gpcd.getBackupDirectoryRoot(), '/tmp/foobar')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options18(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_schema = 'foo'
options.incremental = True
with self.assertRaisesRegexp(Exception, '-s option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options19(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.clear_dumps = True
options.incremental = True
with self.assertRaisesRegexp(Exception, '-c option can not be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options20(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = []
options.incremental = True
with self.assertRaisesRegexp(Exception, 'Must supply -x <database name> with incremental option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options21(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.ddboost = True
options.replicate = False
options.max_streams = 20
with self.assertRaisesRegexp(Exception, '--max-streams must be specified along with --replicate'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options22(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.ddboost = True
options.replicate = True
options.max_streams = None
with self.assertRaisesRegexp(Exception, '--max-streams must be specified along with --replicate'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options23(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.ddboost = True
options.replicate = True
options.max_streams = 0
with self.assertRaisesRegexp(Exception, '--max-streams must be a number greater than zero'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options24(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.ddboost = True
options.replicate = True
options.max_streams = "abc"
with self.assertRaisesRegexp(Exception, '--max-streams must be a number greater than zero'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options25(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.ddboost = False
options.replicate = False
options.max_streams = 20
with self.assertRaisesRegexp(Exception, '--replicate and --max-streams cannot be used without --ddboost'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options26(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.list_backup_files = True
options.timestamp_key = None
with self.assertRaisesRegexp(Exception, 'Must supply -K option when listing backup files'):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options27(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = 'bkdb,fulldb'
options.timestamp_key = True
with self.assertRaisesRegexp(Exception, 'multi-database backup is not supported with -K option'):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options28(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = ['bkdb']
options.timestamp_key = True
options.ddboost = True
options.list_backup_files = True
with self.assertRaisesRegexp(Exception, 'list backup files not supported with ddboost option'):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options29(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.dump_databases = ['bkdb']
options.timestamp_key = True
options.ddboost = True
options.netbackup_service_host = "mdw"
options.netbackup_policy = "test_policy"
options.netbackup_schedule = "test_schedule"
with self.assertRaisesRegexp(Exception, '--ddboost is not supported with NetBackup'):
GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_include_exclude_for_dump_database00(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertEquals(inc, None)
self.assertEquals(exc, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.expand_partitions_and_populate_filter_file', return_value='/tmp/include_dump_tables_file')
@patch('gpcrondump.get_lines_from_file', return_value=['public.t1', 'public.t2'])
def test_get_include_exclude_for_dump_database01(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.include_dump_tables_file = '/mydir/incfile'
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertTrue(inc.startswith('/tmp/include_dump_tables_file'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.expand_partitions_and_populate_filter_file', return_value='/tmp/include_dump_tables_file')
@patch('gpcrondump.get_lines_from_file')
def test_get_include_exclude_for_dump_database02(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.include_dump_tables = ['public.t1', 'public.t2', 'public.t3']
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertTrue(inc.startswith('/tmp/include_dump_tables_file'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_latest_full_dump_timestamp', return_value='20121225090000')
def test_get_include_exclude_for_dump_database03(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.incremental = True
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertEquals(inc, '/tmp/dirty')
self.assertEquals(exc, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.expand_partitions_and_populate_filter_file', return_value='/tmp/exclude_dump_tables_file')
@patch('gpcrondump.get_lines_from_file', return_value=['public.t1', 'public.t2'])
def test_get_include_exclude_for_dump_database04(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.exclude_dump_tables_file = '/odir/exfile'
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertTrue(exc.startswith('/tmp/exclude_dump_tables_file'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.expand_partitions_and_populate_filter_file', return_value='/tmp/exclude_dump_tables_file')
@patch('gpcrondump.get_lines_from_file')
def test_get_include_exclude_for_dump_database06(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.masterDataDirectory = '/tmp/foobar'
options.exclude_dump_tables = ['public.t4', 'public.t5', 'public.t6']
gpcd = GpCronDump(options, None)
dirtyfile = '/tmp/dirty'
dbname = 'foo'
(inc, exc) = gpcd.get_include_exclude_for_dump_database(dirtyfile, dbname)
self.assertTrue(exc.startswith('/tmp/exclude_dump_tables_file'))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.GpCronDump._get_table_names_from_partition_list', side_effect = [['public.aot1', 'public.aot2'], ['public.cot1', 'public.cot2']])
def test_verify_tablenames_00(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
cron = GpCronDump(options, None)
ao_partition_list = ['public, aot1, 2190', 'public, aot2, 3190']
co_partition_list = ['public, cot1, 2190', 'public, cot2, 3190']
heap_partition_list = ['public.heapt1', 'public.heapt2']
cron._verify_tablenames(ao_partition_list, co_partition_list, heap_partition_list) #Should not raise an exception
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.GpCronDump._get_table_names_from_partition_list', side_effect = [['public.aot1:asd', 'public.aot2'], ['public.cot1', 'public.cot2:asd']])
def test_verify_tablenames_00_bad(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
cron = GpCronDump(options, None)
ao_partition_list = ['public, aot1!asd, 2190', 'public, aot2, 3190']
co_partition_list = ['public, cot1, 2190', 'public, cot2\nasd, 3190']
heap_partition_list = ['public, heapt1, 2190', 'public, heapt2!asdasd , 3190']
with self.assertRaisesRegexp(Exception, ''):
cron._verify_tablenames(ao_partition_list, co_partition_list, heap_partition_list)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_inserts_with_incremental(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.output_options = ['--inserts']
options.incremental = True
with self.assertRaisesRegexp(Exception, '--inserts, --column-inserts, --oids cannot be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_oids_with_incremental(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.output_options = ['--oids']
options.incremental = True
with self.assertRaisesRegexp(Exception, '--inserts, --column-inserts, --oids cannot be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_column_inserts_with_incremental(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.output_options = ['--column-inserts']
options.incremental = True
with self.assertRaisesRegexp(Exception, '--inserts, --column-inserts, --oids cannot be selected with incremental backup'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_table_names_from_partition_list_00(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
cron = GpCronDump(options, None)
partition_list = ['public, aot1, 2190', 'public, aot2:aot, 3190']
expected_output = ['public.aot1', 'public.aot2:aot']
result = cron._get_table_names_from_partition_list(partition_list)
self.assertEqual(result, expected_output)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_table_names_from_partition_list_01(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
cron = GpCronDump(options, None)
partition_list = ['public, aot1, 2190', 'public, aot2,aot, 3190']
with self.assertRaisesRegexp(Exception, 'Invalid partition entry "public, aot2,aot, 3190"'):
cron._get_table_names_from_partition_list(partition_list)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter1(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables = 'foo'
options.include_dump_tables_file = 'foo'
with self.assertRaisesRegexp(Exception, '-t can not be selected with --table-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter2(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables = 'foo'
options.exclude_dump_tables_file = 'foo'
with self.assertRaisesRegexp(Exception, '-t can not be selected with --exclude-table-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter3(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_tables = 'foo'
options.exclude_dump_tables_file = 'foo'
with self.assertRaisesRegexp(Exception, '-T can not be selected with --exclude-table-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter4(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.exclude_dump_tables = 'foo'
options.include_dump_tables_file = 'foo'
with self.assertRaisesRegexp(Exception, '-T can not be selected with --table-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter5(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables = 'foo'
options.exclude_dump_tables = 'foo'
with self.assertRaisesRegexp(Exception, '-t can not be selected with -T option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_options_table_filter6(self, mock, mock2):
options = GpCronDumpTestCase.Options()
options.include_dump_tables_file = 'foo'
options.exclude_dump_tables_file = 'foo'
with self.assertRaisesRegexp(Exception, '--table-file can not be selected with --exclude-table-file option'):
cron = GpCronDump(options, None)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_timestamp_object1(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = '20130101010101'
gpcd = GpCronDump(options, None)
timestamp = gpcd._get_timestamp_object(options.timestamp_key)
self.assertEquals(timestamp, datetime(2013, 1, 1, 1, 1, 1))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_timestamp_object2(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = '20130101010'
gpcd = GpCronDump(options, None)
with self.assertRaisesRegexp(Exception, 'Invalid timestamp key'):
gpcd._get_timestamp_object(options.timestamp_key)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_timestamp_object3(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
gpcd = GpCronDump(options, None)
timestamp = gpcd._get_timestamp_object(options.timestamp_key)
self.assertTrue(isinstance(timestamp, datetime))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_files_file_list1(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.masterDataDirectory = '/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
files_file_list = gpcd._get_files_file_list(master, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/gp_cdatabase_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_ao_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_co_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_last_operation' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101.rpt' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_status_1_1_20130101010101' % options.masterDataDirectory]
self.assertEqual(files_file_list, expected_files_list)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_files_file_list2(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.masterDataDirectory = '/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo2'
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
files_file_list = gpcd._get_files_file_list(master, dump_dir, timestamp)
expected_files_list = ['foo2:%s/db_dumps/20130101/gp_cdatabase_1_1_20130101010101' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_20130101010101_ao_state_file' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_20130101010101_co_state_file' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_20130101010101_last_operation' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_20130101010101.rpt' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_status_1_1_20130101010101' % options.masterDataDirectory]
self.assertEqual(files_file_list, expected_files_list)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_latest_full_dump_timestamp', return_value='20130101000000')
def test_get_files_file_list3(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.timestamp_key = '20130101010101'
options.incremental = True
options.masterDataDirectory = '/data/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, None, gpcd.dump_dir, timestamp)
files_file_list = gpcd._get_files_file_list(master, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/gp_cdatabase_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_ao_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_co_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101_last_operation' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101010101.rpt' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_status_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_20130101000000_increments' % options.masterDataDirectory]
self.assertEqual(sorted(files_file_list), sorted(expected_files_list))
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gppylib.operations.backup_utils.get_latest_full_dump_timestamp', return_value='20130101000000')
def test_get_files_file_list_with_filter(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.timestamp_key = '20130101010101'
options.local_dump_prefix = 'metro'
options.include_dump_tables_file = 'bar'
options.masterDataDirectory = '/data/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
files_file_list = gpcd._get_files_file_list(master, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/metro_gp_cdatabase_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_ao_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_co_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_last_operation' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101.rpt' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_status_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_filter' % options.masterDataDirectory]
self.assertEqual(sorted(files_file_list), sorted(expected_files_list))
@patch('gpcrondump.validate_current_timestamp')
@patch('gpcrondump.get_latest_full_dump_timestamp', return_value='20130101000000')
@patch('gpcrondump.GpCronDump._get_master_port')
def test_get_files_file_list_with_prefix(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.timestamp_key = '20130101010101'
options.incremental = True
options.local_dump_prefix = 'metro'
options.masterDataDirectory = '/data/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, None, gpcd.dump_dir, timestamp)
files_file_list = gpcd._get_files_file_list(master, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/metro_gp_cdatabase_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_ao_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_co_state_file' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101_last_operation' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101010101.rpt' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_status_1_1_20130101010101' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/metro_gp_dump_20130101000000_increments' % options.masterDataDirectory]
self.assertEqual(sorted(files_file_list), sorted(expected_files_list))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_pipes_file_list1(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.masterDataDirectory = '/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo2'
mock_segs = []
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
pipes_file_list = gpcd._get_pipes_file_list(master, mock_segs, dump_dir, timestamp)
expected_files_list = ['foo2:%s/db_dumps/20130101/gp_dump_1_1_20130101010101.gz' % options.masterDataDirectory,
'foo2:%s/db_dumps/20130101/gp_dump_1_1_20130101010101_post_data.gz' % options.masterDataDirectory]
self.assertEqual(pipes_file_list, expected_files_list)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_pipes_file_list2(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.masterDataDirectory = '/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
mock_segs = [Mock(), Mock()]
for id, seg in enumerate(mock_segs):
seg.getSegmentDataDirectory.return_value = '/bar'
seg.getSegmentHostName.return_value = 'foo1'
seg.getSegmentDbId.return_value = id + 1
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
pipes_file_list = gpcd._get_pipes_file_list(master, mock_segs, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101.gz' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101_post_data.gz' % options.masterDataDirectory,
'foo1:/bar/db_dumps/20130101/gp_dump_0_1_20130101010101.gz',
'foo1:/bar/db_dumps/20130101/gp_dump_0_2_20130101010101.gz']
self.assertEqual(sorted(pipes_file_list), sorted(expected_files_list))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_pipes_file_list3(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.dump_global = True
options.masterDataDirectory = '/foo'
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
mock_segs = [Mock(), Mock()]
for id, seg in enumerate(mock_segs):
seg.getSegmentDataDirectory.return_value = '/bar'
seg.getSegmentHostName.return_value = 'foo1'
seg.getSegmentDbId.return_value = id + 1
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
pipes_file_list = gpcd._get_pipes_file_list(master, mock_segs, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101.gz' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101_post_data.gz' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_global_1_1_20130101010101' % options.masterDataDirectory,
'foo1:/bar/db_dumps/20130101/gp_dump_0_1_20130101010101.gz',
'foo1:/bar/db_dumps/20130101/gp_dump_0_2_20130101010101.gz']
self.assertEqual(sorted(pipes_file_list), sorted(expected_files_list))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_get_pipes_file_list4(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.masterDataDirectory = '/foo'
options.dump_config = True
gpcd = GpCronDump(options, None)
master = Mock()
master.getSegmentHostName.return_value = 'foo1'
mock_segs = [Mock(), Mock()]
for id, seg in enumerate(mock_segs):
seg.getSegmentDataDirectory.return_value = '/bar'
seg.getSegmentHostName.return_value = 'foo1'
seg.getSegmentDbId.return_value = id + 1
timestamp = '20130101010101'
dump_dir = get_backup_directory(options.masterDataDirectory, options.backup_dir, gpcd.dump_dir, timestamp)
pipes_file_list = gpcd._get_pipes_file_list(master, mock_segs, dump_dir, timestamp)
expected_files_list = ['foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101.gz' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_dump_1_1_20130101010101_post_data.gz' % options.masterDataDirectory,
'foo1:%s/db_dumps/20130101/gp_master_config_files_20130101010101.tar' % options.masterDataDirectory,
'foo1:/bar/db_dumps/20130101/gp_segment_config_files_0_1_20130101010101.tar',
'foo1:/bar/db_dumps/20130101/gp_segment_config_files_0_2_20130101010101.tar',
'foo1:/bar/db_dumps/20130101/gp_dump_0_1_20130101010101.gz',
'foo1:/bar/db_dumps/20130101/gp_dump_0_2_20130101010101.gz']
self.assertEqual(sorted(pipes_file_list), sorted(expected_files_list))
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.validate_current_timestamp')
def test_gpcrondump_init0(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.timestamp_key = None
options.local_dump_prefix = 'foo'
options.ddboost = False
options.ddboost_verify = False
options.ddboost_config_remove = False
options.ddboost_user = False
options.ddboost_host = False
options.max_streams = None
options.list_backup_files = False
gpcd = GpCronDump(options, None)
self.assertEqual(gpcd.dump_prefix, 'foo_')
@patch('gpcrondump.os.path.isfile', return_value=True)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.os.path.getsize', return_value=111)
@patch('gpcrondump.yaml.load', return_value={'EMAIL_DETAILS': [{'FROM': 'RRP_MPE2_DCA_1', 'DBNAME': 'testdb100', 'SUBJECT': "backup completed for Database 'testdb100'"}]})
def test_validate_parse_email_File00(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc.yaml"
m = mock.MagicMock()
with patch('__builtin__.open', m, create=True):
cron = GpCronDump(options, None)
@patch('gpcrondump.os.path.isfile', return_value=False)
@patch('gpcrondump.GpCronDump._get_master_port')
def test_validate_parse_email_File01(self, mock1, mock2):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc.yaml"
with self.assertRaisesRegexp(Exception, "\'%s\' file does not exist." % options.include_email_file):
cron = GpCronDump(options, None)
@patch('gpcrondump.os.path.isfile', return_value=True)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.os.path.getsize', return_value=111)
def test_validate_parse_email_File02(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc"
with self.assertRaisesRegexp(Exception, "'%s' is not '.yaml' file. File containing email details should be '.yaml' file." % options.include_email_file):
cron = GpCronDump(options, None)
@patch('gpcrondump.os.path.isfile', return_value=True)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.os.path.getsize', return_value=0)
def test_validate_parse_email_File03(self, mock1, mock2, mock3):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc.yaml"
with self.assertRaisesRegexp(Exception, "'%s' file is empty." % options.include_email_file):
cron = GpCronDump(options, None)
@patch('gpcrondump.os.path.isfile', return_value=True)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.os.path.getsize', return_value=111)
@patch('gpcrondump.yaml.load', return_value={'EMAIL_DETAILS': [{'FROM': 'RRP_MPE2_DCA_1', 'NAME': 'testdb100', 'SUBJECT': "backup completed for Database 'testdb100'"}]})
def test_validate_parse_email_File04(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc.yaml"
m = mock.MagicMock()
with self.assertRaisesRegexp(Exception, "\'%s\' file is not formatted properly." % options.include_email_file):
with patch('__builtin__.open', m, create=True):
cron = GpCronDump(options, None)
@patch('gpcrondump.os.path.isfile', return_value=True)
@patch('gpcrondump.GpCronDump._get_master_port')
@patch('gpcrondump.os.path.getsize', return_value=111)
@patch('gpcrondump.yaml.load', return_value={'EMAIL_DETAILS': [{'FROM': 'RRP_MPE2_DCA_1', 'DBNAME': None, 'SUBJECT': "backup completed for Database 'testdb100'"}]})
def test_validate_parse_email_File05(self, mock1, mock2, mock3, mock4):
options = GpCronDumpTestCase.Options()
options.include_email_file = "/tmp/abc.yaml"
m = mock.MagicMock()
with self.assertRaisesRegexp(Exception, "\'%s\' file is not formatted properly." % options.include_email_file):
with patch('__builtin__.open', m, create=True):
cron = GpCronDump(options, None)
@patch('gpcrondump.MailDumpEvent')
@patch('gpcrondump.GpCronDump._get_master_port')
def test_send_email00(self, mock1, MailDumpEvent):
options = GpCronDumpTestCase.Options()
dump_database = 'testdb1'
current_exit_status = 0
time_start = '12:07:09'
time_end = '12:08:18'
cron = GpCronDump(options, None)
cron._send_email(dump_database, current_exit_status, time_start, time_end)
#------------------------------- Mainline --------------------------------
if __name__ == '__main__':
unittest.main()
| [((62, 91), 'os.path.abspath', 'os.path.abspath', (['"""gpcrondump"""'], {}), "('gpcrondump')\n", (77, 91), False, 'import os\n'), ((105, 151), 'imp.load_source', 'imp.load_source', (['"""gpcrondump"""', 'gpcrondump_path'], {}), "('gpcrondump', gpcrondump_path)\n", (120, 151), False, 'import imp\n'), ((516, 543), 'gppylib.gplog.get_unittest_logger', 'gplog.get_unittest_logger', ([], {}), '()\n', (541, 543), False, 'from gppylib import gplog\n'), ((2933, 2980), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (2938, 2980), False, 'from mock import patch, Mock\n'), ((2986, 3037), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump.validate_dump_schema"""'], {}), "('gpcrondump.GpCronDump.validate_dump_schema')\n", (2991, 3037), False, 'from mock import patch, Mock\n'), ((3043, 3089), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (3048, 3089), False, 'from mock import patch, Mock\n'), ((3452, 3499), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (3457, 3499), False, 'from mock import patch, Mock\n'), ((3505, 3556), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump.validate_dump_schema"""'], {}), "('gpcrondump.GpCronDump.validate_dump_schema')\n", (3510, 3556), False, 'from mock import patch, Mock\n'), ((3562, 3608), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (3567, 3608), False, 'from mock import patch, Mock\n'), ((3979, 4026), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (3984, 4026), False, 'from mock import patch, Mock\n'), ((4032, 4078), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (4037, 4078), False, 'from mock import patch, Mock\n'), ((4419, 4466), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (4424, 4466), False, 'from mock import patch, Mock\n'), ((4472, 4518), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (4477, 4518), False, 'from mock import patch, Mock\n'), ((4851, 4898), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (4856, 4898), False, 'from mock import patch, Mock\n'), ((4904, 4950), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (4909, 4950), False, 'from mock import patch, Mock\n'), ((5300, 5347), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (5305, 5347), False, 'from mock import patch, Mock\n'), ((5353, 5399), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (5358, 5399), False, 'from mock import patch, Mock\n'), ((5741, 5788), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (5746, 5788), False, 'from mock import patch, Mock\n'), ((5794, 5840), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (5799, 5840), False, 'from mock import patch, Mock\n'), ((6166, 6213), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (6171, 6213), False, 'from mock import patch, Mock\n'), ((6219, 6265), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (6224, 6265), False, 'from mock import patch, Mock\n'), ((6623, 6670), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (6628, 6670), False, 'from mock import patch, Mock\n'), ((6676, 6722), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (6681, 6722), False, 'from mock import patch, Mock\n'), ((7072, 7119), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (7077, 7119), False, 'from mock import patch, Mock\n'), ((7125, 7171), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (7130, 7171), False, 'from mock import patch, Mock\n'), ((7541, 7588), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (7546, 7588), False, 'from mock import patch, Mock\n'), ((7594, 7640), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (7599, 7640), False, 'from mock import patch, Mock\n'), ((8039, 8086), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (8044, 8086), False, 'from mock import patch, Mock\n'), ((8092, 8138), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (8097, 8138), False, 'from mock import patch, Mock\n'), ((8537, 8584), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (8542, 8584), False, 'from mock import patch, Mock\n'), ((8590, 8636), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (8595, 8636), False, 'from mock import patch, Mock\n'), ((9027, 9074), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (9032, 9074), False, 'from mock import patch, Mock\n'), ((9080, 9126), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (9085, 9126), False, 'from mock import patch, Mock\n'), ((9517, 9564), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (9522, 9564), False, 'from mock import patch, Mock\n'), ((9570, 9616), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (9575, 9616), False, 'from mock import patch, Mock\n'), ((9988, 10035), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (9993, 10035), False, 'from mock import patch, Mock\n'), ((10041, 10087), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (10046, 10087), False, 'from mock import patch, Mock\n'), ((10459, 10506), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (10464, 10506), False, 'from mock import patch, Mock\n'), ((10512, 10558), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (10517, 10558), False, 'from mock import patch, Mock\n'), ((10938, 10985), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (10943, 10985), False, 'from mock import patch, Mock\n'), ((10991, 11037), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (10996, 11037), False, 'from mock import patch, Mock\n'), ((11417, 11464), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (11422, 11464), False, 'from mock import patch, Mock\n'), ((11470, 11516), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (11475, 11516), False, 'from mock import patch, Mock\n'), ((11882, 11929), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (11887, 11929), False, 'from mock import patch, Mock\n'), ((11935, 11981), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (11940, 11981), False, 'from mock import patch, Mock\n'), ((12347, 12394), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (12352, 12394), False, 'from mock import patch, Mock\n'), ((12400, 12446), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (12405, 12446), False, 'from mock import patch, Mock\n'), ((12804, 12851), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (12809, 12851), False, 'from mock import patch, Mock\n'), ((12857, 12903), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (12862, 12903), False, 'from mock import patch, Mock\n'), ((13261, 13308), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (13266, 13308), False, 'from mock import patch, Mock\n'), ((13314, 13360), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (13319, 13360), False, 'from mock import patch, Mock\n'), ((13699, 13746), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (13704, 13746), False, 'from mock import patch, Mock\n'), ((13752, 13798), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (13757, 13798), False, 'from mock import patch, Mock\n'), ((14137, 14184), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (14142, 14184), False, 'from mock import patch, Mock\n'), ((14190, 14236), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (14195, 14236), False, 'from mock import patch, Mock\n'), ((14583, 14630), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (14588, 14630), False, 'from mock import patch, Mock\n'), ((14636, 14682), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (14641, 14682), False, 'from mock import patch, Mock\n'), ((15029, 15076), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (15034, 15076), False, 'from mock import patch, Mock\n'), ((15082, 15128), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (15087, 15128), False, 'from mock import patch, Mock\n'), ((15451, 15498), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (15456, 15498), False, 'from mock import patch, Mock\n'), ((15504, 15550), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (15509, 15550), False, 'from mock import patch, Mock\n'), ((15881, 15928), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (15886, 15928), False, 'from mock import patch, Mock\n'), ((15934, 15980), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (15939, 15980), False, 'from mock import patch, Mock\n'), ((15986, 16076), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {'return_value': "['public', 'information_schema']"}), "('gpcrondump.get_lines_from_file', return_value=['public',\n 'information_schema'])\n", (15991, 16076), False, 'from mock import patch, Mock\n'), ((16408, 16455), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (16413, 16455), False, 'from mock import patch, Mock\n'), ((16461, 16507), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (16466, 16507), False, 'from mock import patch, Mock\n'), ((16513, 16603), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {'return_value': "['public', 'information_schema']"}), "('gpcrondump.get_lines_from_file', return_value=['public',\n 'information_schema'])\n", (16518, 16603), False, 'from mock import patch, Mock\n'), ((16935, 16982), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (16940, 16982), False, 'from mock import patch, Mock\n'), ((16988, 17034), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (16993, 17034), False, 'from mock import patch, Mock\n'), ((17386, 17433), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (17391, 17433), False, 'from mock import patch, Mock\n'), ((17439, 17485), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (17444, 17485), False, 'from mock import patch, Mock\n'), ((17850, 17897), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (17855, 17897), False, 'from mock import patch, Mock\n'), ((17903, 17949), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (17908, 17949), False, 'from mock import patch, Mock\n'), ((18447, 18494), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (18452, 18494), False, 'from mock import patch, Mock\n'), ((18500, 18546), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (18505, 18546), False, 'from mock import patch, Mock\n'), ((18552, 18644), 'mock.patch', 'patch', (['"""gpcrondump.get_include_schema_list_from_exclude_schema"""'], {'return_value': "['public']"}), "('gpcrondump.get_include_schema_list_from_exclude_schema',\n return_value=['public'])\n", (18557, 18644), False, 'from mock import patch, Mock\n'), ((19145, 19192), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (19150, 19192), False, 'from mock import patch, Mock\n'), ((19198, 19244), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (19203, 19244), False, 'from mock import patch, Mock\n'), ((19250, 19342), 'mock.patch', 'patch', (['"""gpcrondump.get_include_schema_list_from_exclude_schema"""'], {'return_value': "['public']"}), "('gpcrondump.get_include_schema_list_from_exclude_schema',\n return_value=['public'])\n", (19255, 19342), False, 'from mock import patch, Mock\n'), ((19716, 19763), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (19721, 19763), False, 'from mock import patch, Mock\n'), ((19769, 19815), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (19774, 19815), False, 'from mock import patch, Mock\n'), ((19821, 19885), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {'return_value': "['public']"}), "('gpcrondump.get_lines_from_file', return_value=['public'])\n", (19826, 19885), False, 'from mock import patch, Mock\n'), ((19891, 20000), 'mock.patch', 'patch', (['"""gpcrondump.get_user_table_list_for_schema"""'], {'return_value': "['public', 'table1', 'public', 'table2']"}), "('gpcrondump.get_user_table_list_for_schema', return_value=['public',\n 'table1', 'public', 'table2'])\n", (19896, 20000), False, 'from mock import patch, Mock\n'), ((20378, 20425), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (20383, 20425), False, 'from mock import patch, Mock\n'), ((20431, 20477), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (20436, 20477), False, 'from mock import patch, Mock\n'), ((20812, 20859), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (20817, 20859), False, 'from mock import patch, Mock\n'), ((20865, 20911), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (20870, 20911), False, 'from mock import patch, Mock\n'), ((21246, 21293), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (21251, 21293), False, 'from mock import patch, Mock\n'), ((21299, 21345), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (21304, 21345), False, 'from mock import patch, Mock\n'), ((21685, 21732), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (21690, 21732), False, 'from mock import patch, Mock\n'), ((21738, 21784), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (21743, 21784), False, 'from mock import patch, Mock\n'), ((22124, 22171), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (22129, 22171), False, 'from mock import patch, Mock\n'), ((22177, 22223), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (22182, 22223), False, 'from mock import patch, Mock\n'), ((22684, 22731), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (22689, 22731), False, 'from mock import patch, Mock\n'), ((22737, 22783), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (22742, 22783), False, 'from mock import patch, Mock\n'), ((22789, 22875), 'mock.patch', 'patch', (['"""gpcrondump.get_latest_full_dump_timestamp"""'], {'return_value': '"""20121225090000"""'}), "('gpcrondump.get_latest_full_dump_timestamp', return_value=\n '20121225090000')\n", (22794, 22875), False, 'from mock import patch, Mock\n'), ((23120, 23167), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (23125, 23167), False, 'from mock import patch, Mock\n'), ((23173, 23219), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (23178, 23219), False, 'from mock import patch, Mock\n'), ((23558, 23644), 'mock.patch', 'patch', (['"""gpcrondump.get_latest_full_dump_timestamp"""'], {'return_value': '"""20120330090000"""'}), "('gpcrondump.get_latest_full_dump_timestamp', return_value=\n '20120330090000')\n", (23563, 23644), False, 'from mock import patch, Mock\n'), ((23645, 23691), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (23650, 23691), False, 'from mock import patch, Mock\n'), ((23697, 23744), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (23702, 23744), False, 'from mock import patch, Mock\n'), ((24029, 24076), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (24034, 24076), False, 'from mock import patch, Mock\n'), ((24082, 24128), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (24087, 24128), False, 'from mock import patch, Mock\n'), ((24405, 24452), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (24410, 24452), False, 'from mock import patch, Mock\n'), ((24458, 24504), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (24463, 24504), False, 'from mock import patch, Mock\n'), ((24788, 24835), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (24793, 24835), False, 'from mock import patch, Mock\n'), ((24841, 24887), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (24846, 24887), False, 'from mock import patch, Mock\n'), ((25180, 25227), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (25185, 25227), False, 'from mock import patch, Mock\n'), ((25233, 25279), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (25238, 25279), False, 'from mock import patch, Mock\n'), ((25575, 25622), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (25580, 25622), False, 'from mock import patch, Mock\n'), ((25628, 25674), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (25633, 25674), False, 'from mock import patch, Mock\n'), ((25993, 26040), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (25998, 26040), False, 'from mock import patch, Mock\n'), ((26046, 26092), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (26051, 26092), False, 'from mock import patch, Mock\n'), ((26410, 26457), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (26415, 26457), False, 'from mock import patch, Mock\n'), ((26463, 26509), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (26468, 26509), False, 'from mock import patch, Mock\n'), ((26829, 26876), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (26834, 26876), False, 'from mock import patch, Mock\n'), ((26882, 26928), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (26887, 26928), False, 'from mock import patch, Mock\n'), ((27275, 27322), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (27280, 27322), False, 'from mock import patch, Mock\n'), ((27328, 27374), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (27333, 27374), False, 'from mock import patch, Mock\n'), ((27722, 27769), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (27727, 27769), False, 'from mock import patch, Mock\n'), ((27775, 27821), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (27780, 27821), False, 'from mock import patch, Mock\n'), ((28160, 28207), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (28165, 28207), False, 'from mock import patch, Mock\n'), ((28213, 28259), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (28218, 28259), False, 'from mock import patch, Mock\n'), ((28602, 28649), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (28607, 28649), False, 'from mock import patch, Mock\n'), ((28655, 28701), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (28660, 28701), False, 'from mock import patch, Mock\n'), ((29057, 29104), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (29062, 29104), False, 'from mock import patch, Mock\n'), ((29110, 29156), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (29115, 29156), False, 'from mock import patch, Mock\n'), ((29470, 29517), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (29475, 29517), False, 'from mock import patch, Mock\n'), ((29523, 29569), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (29528, 29569), False, 'from mock import patch, Mock\n'), ((29894, 29941), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (29899, 29941), False, 'from mock import patch, Mock\n'), ((29947, 29993), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (29952, 29993), False, 'from mock import patch, Mock\n'), ((30383, 30430), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (30388, 30430), False, 'from mock import patch, Mock\n'), ((30436, 30482), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (30441, 30482), False, 'from mock import patch, Mock\n'), ((30970, 31017), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (30975, 31017), False, 'from mock import patch, Mock\n'), ((31023, 31069), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (31028, 31069), False, 'from mock import patch, Mock\n'), ((31504, 31551), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (31509, 31551), False, 'from mock import patch, Mock\n'), ((31557, 31603), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (31562, 31603), False, 'from mock import patch, Mock\n'), ((31609, 31722), 'mock.patch', 'patch', (['"""gpcrondump.expand_partitions_and_populate_filter_file"""'], {'return_value': '"""/tmp/include_dump_tables_file"""'}), "('gpcrondump.expand_partitions_and_populate_filter_file', return_value\n ='/tmp/include_dump_tables_file')\n", (31614, 31722), False, 'from mock import patch, Mock\n'), ((31723, 31808), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {'return_value': "['public.t1', 'public.t2']"}), "('gpcrondump.get_lines_from_file', return_value=['public.t1', 'public.t2']\n )\n", (31728, 31808), False, 'from mock import patch, Mock\n'), ((32310, 32357), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (32315, 32357), False, 'from mock import patch, Mock\n'), ((32363, 32409), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (32368, 32409), False, 'from mock import patch, Mock\n'), ((32415, 32528), 'mock.patch', 'patch', (['"""gpcrondump.expand_partitions_and_populate_filter_file"""'], {'return_value': '"""/tmp/include_dump_tables_file"""'}), "('gpcrondump.expand_partitions_and_populate_filter_file', return_value\n ='/tmp/include_dump_tables_file')\n", (32420, 32528), False, 'from mock import patch, Mock\n'), ((32529, 32568), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {}), "('gpcrondump.get_lines_from_file')\n", (32534, 32568), False, 'from mock import patch, Mock\n'), ((33093, 33140), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (33098, 33140), False, 'from mock import patch, Mock\n'), ((33146, 33192), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (33151, 33192), False, 'from mock import patch, Mock\n'), ((33198, 33284), 'mock.patch', 'patch', (['"""gpcrondump.get_latest_full_dump_timestamp"""'], {'return_value': '"""20121225090000"""'}), "('gpcrondump.get_latest_full_dump_timestamp', return_value=\n '20121225090000')\n", (33203, 33284), False, 'from mock import patch, Mock\n'), ((33763, 33810), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (33768, 33810), False, 'from mock import patch, Mock\n'), ((33816, 33862), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (33821, 33862), False, 'from mock import patch, Mock\n'), ((33868, 33981), 'mock.patch', 'patch', (['"""gpcrondump.expand_partitions_and_populate_filter_file"""'], {'return_value': '"""/tmp/exclude_dump_tables_file"""'}), "('gpcrondump.expand_partitions_and_populate_filter_file', return_value\n ='/tmp/exclude_dump_tables_file')\n", (33873, 33981), False, 'from mock import patch, Mock\n'), ((33982, 34067), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {'return_value': "['public.t1', 'public.t2']"}), "('gpcrondump.get_lines_from_file', return_value=['public.t1', 'public.t2']\n )\n", (33987, 34067), False, 'from mock import patch, Mock\n'), ((34567, 34614), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (34572, 34614), False, 'from mock import patch, Mock\n'), ((34620, 34666), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (34625, 34666), False, 'from mock import patch, Mock\n'), ((34672, 34785), 'mock.patch', 'patch', (['"""gpcrondump.expand_partitions_and_populate_filter_file"""'], {'return_value': '"""/tmp/exclude_dump_tables_file"""'}), "('gpcrondump.expand_partitions_and_populate_filter_file', return_value\n ='/tmp/exclude_dump_tables_file')\n", (34677, 34785), False, 'from mock import patch, Mock\n'), ((34786, 34825), 'mock.patch', 'patch', (['"""gpcrondump.get_lines_from_file"""'], {}), "('gpcrondump.get_lines_from_file')\n", (34791, 34825), False, 'from mock import patch, Mock\n'), ((35350, 35397), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (35355, 35397), False, 'from mock import patch, Mock\n'), ((35403, 35449), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (35408, 35449), False, 'from mock import patch, Mock\n'), ((35455, 35608), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_table_names_from_partition_list"""'], {'side_effect': "[['public.aot1', 'public.aot2'], ['public.cot1', 'public.cot2']]"}), "('gpcrondump.GpCronDump._get_table_names_from_partition_list',\n side_effect=[['public.aot1', 'public.aot2'], ['public.cot1',\n 'public.cot2']])\n", (35460, 35608), False, 'from mock import patch, Mock\n'), ((36092, 36139), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (36097, 36139), False, 'from mock import patch, Mock\n'), ((36145, 36191), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (36150, 36191), False, 'from mock import patch, Mock\n'), ((36197, 36358), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_table_names_from_partition_list"""'], {'side_effect': "[['public.aot1:asd', 'public.aot2'], ['public.cot1', 'public.cot2:asd']]"}), "('gpcrondump.GpCronDump._get_table_names_from_partition_list',\n side_effect=[['public.aot1:asd', 'public.aot2'], ['public.cot1',\n 'public.cot2:asd']])\n", (36202, 36358), False, 'from mock import patch, Mock\n'), ((36903, 36950), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (36908, 36950), False, 'from mock import patch, Mock\n'), ((36956, 37002), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (36961, 37002), False, 'from mock import patch, Mock\n'), ((37380, 37427), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (37385, 37427), False, 'from mock import patch, Mock\n'), ((37433, 37479), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (37438, 37479), False, 'from mock import patch, Mock\n'), ((37851, 37898), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (37856, 37898), False, 'from mock import patch, Mock\n'), ((37904, 37950), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (37909, 37950), False, 'from mock import patch, Mock\n'), ((38342, 38389), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (38347, 38389), False, 'from mock import patch, Mock\n'), ((38395, 38441), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (38400, 38441), False, 'from mock import patch, Mock\n'), ((38869, 38916), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (38874, 38916), False, 'from mock import patch, Mock\n'), ((38922, 38968), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (38927, 38968), False, 'from mock import patch, Mock\n'), ((39381, 39428), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (39386, 39428), False, 'from mock import patch, Mock\n'), ((39434, 39480), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (39439, 39480), False, 'from mock import patch, Mock\n'), ((39827, 39874), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (39832, 39874), False, 'from mock import patch, Mock\n'), ((39880, 39926), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (39885, 39926), False, 'from mock import patch, Mock\n'), ((40281, 40328), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (40286, 40328), False, 'from mock import patch, Mock\n'), ((40334, 40380), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (40339, 40380), False, 'from mock import patch, Mock\n'), ((40735, 40782), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (40740, 40782), False, 'from mock import patch, Mock\n'), ((40788, 40834), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (40793, 40834), False, 'from mock import patch, Mock\n'), ((41181, 41228), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (41186, 41228), False, 'from mock import patch, Mock\n'), ((41234, 41280), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (41239, 41280), False, 'from mock import patch, Mock\n'), ((41612, 41659), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (41617, 41659), False, 'from mock import patch, Mock\n'), ((41665, 41711), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (41670, 41711), False, 'from mock import patch, Mock\n'), ((42081, 42128), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (42086, 42128), False, 'from mock import patch, Mock\n'), ((42134, 42180), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (42139, 42180), False, 'from mock import patch, Mock\n'), ((42518, 42565), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (42523, 42565), False, 'from mock import patch, Mock\n'), ((42571, 42617), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (42576, 42617), False, 'from mock import patch, Mock\n'), ((42950, 42997), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (42955, 42997), False, 'from mock import patch, Mock\n'), ((43003, 43049), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (43008, 43049), False, 'from mock import patch, Mock\n'), ((43364, 43411), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (43369, 43411), False, 'from mock import patch, Mock\n'), ((43417, 43463), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (43422, 43463), False, 'from mock import patch, Mock\n'), ((44816, 44863), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (44821, 44863), False, 'from mock import patch, Mock\n'), ((44869, 44915), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (44874, 44915), False, 'from mock import patch, Mock\n'), ((46268, 46315), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (46273, 46315), False, 'from mock import patch, Mock\n'), ((46321, 46367), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (46326, 46367), False, 'from mock import patch, Mock\n'), ((46373, 46459), 'mock.patch', 'patch', (['"""gpcrondump.get_latest_full_dump_timestamp"""'], {'return_value': '"""20130101000000"""'}), "('gpcrondump.get_latest_full_dump_timestamp', return_value=\n '20130101000000')\n", (46378, 46459), False, 'from mock import patch, Mock\n'), ((47993, 48039), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (47998, 48039), False, 'from mock import patch, Mock\n'), ((48045, 48092), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (48050, 48092), False, 'from mock import patch, Mock\n'), ((48098, 48204), 'mock.patch', 'patch', (['"""gppylib.operations.backup_utils.get_latest_full_dump_timestamp"""'], {'return_value': '"""20130101000000"""'}), "('gppylib.operations.backup_utils.get_latest_full_dump_timestamp',\n return_value='20130101000000')\n", (48103, 48204), False, 'from mock import patch, Mock\n'), ((49859, 49905), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (49864, 49905), False, 'from mock import patch, Mock\n'), ((49911, 49997), 'mock.patch', 'patch', (['"""gpcrondump.get_latest_full_dump_timestamp"""'], {'return_value': '"""20130101000000"""'}), "('gpcrondump.get_latest_full_dump_timestamp', return_value=\n '20130101000000')\n", (49916, 49997), False, 'from mock import patch, Mock\n'), ((49998, 50045), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (50003, 50045), False, 'from mock import patch, Mock\n'), ((51682, 51729), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (51687, 51729), False, 'from mock import patch, Mock\n'), ((51735, 51781), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (51740, 51781), False, 'from mock import patch, Mock\n'), ((52673, 52720), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (52678, 52720), False, 'from mock import patch, Mock\n'), ((52726, 52772), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (52731, 52772), False, 'from mock import patch, Mock\n'), ((54095, 54142), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (54100, 54142), False, 'from mock import patch, Mock\n'), ((54148, 54194), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (54153, 54194), False, 'from mock import patch, Mock\n'), ((55671, 55718), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (55676, 55718), False, 'from mock import patch, Mock\n'), ((55724, 55770), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (55729, 55770), False, 'from mock import patch, Mock\n'), ((57478, 57525), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (57483, 57525), False, 'from mock import patch, Mock\n'), ((57531, 57577), 'mock.patch', 'patch', (['"""gpcrondump.validate_current_timestamp"""'], {}), "('gpcrondump.validate_current_timestamp')\n", (57536, 57577), False, 'from mock import patch, Mock\n'), ((58121, 58174), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(True)'}), "('gpcrondump.os.path.isfile', return_value=True)\n", (58126, 58174), False, 'from mock import patch, Mock\n'), ((58180, 58227), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (58185, 58227), False, 'from mock import patch, Mock\n'), ((58233, 58286), 'mock.patch', 'patch', (['"""gpcrondump.os.path.getsize"""'], {'return_value': '(111)'}), "('gpcrondump.os.path.getsize', return_value=111)\n", (58238, 58286), False, 'from mock import patch, Mock\n'), ((58292, 58470), 'mock.patch', 'patch', (['"""gpcrondump.yaml.load"""'], {'return_value': '{\'EMAIL_DETAILS\': [{\'FROM\': \'RRP_MPE2_DCA_1\', \'DBNAME\': \'testdb100\',\n \'SUBJECT\': "backup completed for Database \'testdb100\'"}]}'}), '(\'gpcrondump.yaml.load\', return_value={\'EMAIL_DETAILS\': [{\'FROM\':\n \'RRP_MPE2_DCA_1\', \'DBNAME\': \'testdb100\', \'SUBJECT\':\n "backup completed for Database \'testdb100\'"}]})\n', (58297, 58470), False, 'from mock import patch, Mock\n'), ((58775, 58829), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(False)'}), "('gpcrondump.os.path.isfile', return_value=False)\n", (58780, 58829), False, 'from mock import patch, Mock\n'), ((58835, 58882), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (58840, 58882), False, 'from mock import patch, Mock\n'), ((59205, 59258), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(True)'}), "('gpcrondump.os.path.isfile', return_value=True)\n", (59210, 59258), False, 'from mock import patch, Mock\n'), ((59264, 59311), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (59269, 59311), False, 'from mock import patch, Mock\n'), ((59317, 59370), 'mock.patch', 'patch', (['"""gpcrondump.os.path.getsize"""'], {'return_value': '(111)'}), "('gpcrondump.os.path.getsize', return_value=111)\n", (59322, 59370), False, 'from mock import patch, Mock\n'), ((59748, 59801), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(True)'}), "('gpcrondump.os.path.isfile', return_value=True)\n", (59753, 59801), False, 'from mock import patch, Mock\n'), ((59807, 59854), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (59812, 59854), False, 'from mock import patch, Mock\n'), ((59860, 59911), 'mock.patch', 'patch', (['"""gpcrondump.os.path.getsize"""'], {'return_value': '(0)'}), "('gpcrondump.os.path.getsize', return_value=0)\n", (59865, 59911), False, 'from mock import patch, Mock\n'), ((60233, 60286), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(True)'}), "('gpcrondump.os.path.isfile', return_value=True)\n", (60238, 60286), False, 'from mock import patch, Mock\n'), ((60292, 60339), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (60297, 60339), False, 'from mock import patch, Mock\n'), ((60345, 60398), 'mock.patch', 'patch', (['"""gpcrondump.os.path.getsize"""'], {'return_value': '(111)'}), "('gpcrondump.os.path.getsize', return_value=111)\n", (60350, 60398), False, 'from mock import patch, Mock\n'), ((60404, 60580), 'mock.patch', 'patch', (['"""gpcrondump.yaml.load"""'], {'return_value': '{\'EMAIL_DETAILS\': [{\'FROM\': \'RRP_MPE2_DCA_1\', \'NAME\': \'testdb100\',\n \'SUBJECT\': "backup completed for Database \'testdb100\'"}]}'}), '(\'gpcrondump.yaml.load\', return_value={\'EMAIL_DETAILS\': [{\'FROM\':\n \'RRP_MPE2_DCA_1\', \'NAME\': \'testdb100\', \'SUBJECT\':\n "backup completed for Database \'testdb100\'"}]})\n', (60409, 60580), False, 'from mock import patch, Mock\n'), ((61013, 61066), 'mock.patch', 'patch', (['"""gpcrondump.os.path.isfile"""'], {'return_value': '(True)'}), "('gpcrondump.os.path.isfile', return_value=True)\n", (61018, 61066), False, 'from mock import patch, Mock\n'), ((61072, 61119), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (61077, 61119), False, 'from mock import patch, Mock\n'), ((61125, 61178), 'mock.patch', 'patch', (['"""gpcrondump.os.path.getsize"""'], {'return_value': '(111)'}), "('gpcrondump.os.path.getsize', return_value=111)\n", (61130, 61178), False, 'from mock import patch, Mock\n'), ((61184, 61355), 'mock.patch', 'patch', (['"""gpcrondump.yaml.load"""'], {'return_value': '{\'EMAIL_DETAILS\': [{\'FROM\': \'RRP_MPE2_DCA_1\', \'DBNAME\': None, \'SUBJECT\':\n "backup completed for Database \'testdb100\'"}]}'}), '(\'gpcrondump.yaml.load\', return_value={\'EMAIL_DETAILS\': [{\'FROM\':\n \'RRP_MPE2_DCA_1\', \'DBNAME\': None, \'SUBJECT\':\n "backup completed for Database \'testdb100\'"}]})\n', (61189, 61355), False, 'from mock import patch, Mock\n'), ((61788, 61821), 'mock.patch', 'patch', (['"""gpcrondump.MailDumpEvent"""'], {}), "('gpcrondump.MailDumpEvent')\n", (61793, 61821), False, 'from mock import patch, Mock\n'), ((61827, 61874), 'mock.patch', 'patch', (['"""gpcrondump.GpCronDump._get_master_port"""'], {}), "('gpcrondump.GpCronDump._get_master_port')\n", (61832, 61874), False, 'from mock import patch, Mock\n'), ((62336, 62351), 'unittest2.main', 'unittest.main', ([], {}), '()\n', (62349, 62351), True, 'import unittest2 as unittest\n'), ((17207, 17232), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (17217, 17232), False, 'from gpcrondump import GpCronDump\n'), ((17648, 17673), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (17658, 17673), False, 'from gpcrondump import GpCronDump\n'), ((18113, 18156), 'gppylib.operations.backup_utils.write_lines_to_file', 'write_lines_to_file', (['"""/tmp/foo"""', "['public']"], {}), "('/tmp/foo', ['public'])\n", (18132, 18156), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((18172, 18197), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (18182, 18197), False, 'from gpcrondump import GpCronDump\n'), ((18379, 18405), 'os.path.exists', 'os.path.exists', (['"""/tmp/foo"""'], {}), "('/tmp/foo')\n", (18393, 18405), False, 'import os\n'), ((18811, 18854), 'gppylib.operations.backup_utils.write_lines_to_file', 'write_lines_to_file', (['"""/tmp/foo"""', "['public']"], {}), "('/tmp/foo', ['public'])\n", (18830, 18854), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((18870, 18895), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (18880, 18895), False, 'from gpcrondump import GpCronDump\n'), ((19077, 19103), 'os.path.exists', 'os.path.exists', (['"""/tmp/foo"""'], {}), "('/tmp/foo')\n", (19091, 19103), False, 'import os\n'), ((19514, 19539), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (19524, 19539), False, 'from gpcrondump import GpCronDump\n'), ((20132, 20157), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (20142, 20157), False, 'from gpcrondump import GpCronDump\n'), ((23018, 23043), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (23028, 23043), False, 'from gpcrondump import GpCronDump\n'), ((23997, 24022), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (24007, 24022), False, 'from gpcrondump import GpCronDump\n'), ((24373, 24398), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (24383, 24398), False, 'from gpcrondump import GpCronDump\n'), ((24756, 24781), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (24766, 24781), False, 'from gpcrondump import GpCronDump\n'), ((25082, 25107), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (25092, 25107), False, 'from gpcrondump import GpCronDump\n'), ((25471, 25496), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (25481, 25496), False, 'from gpcrondump import GpCronDump\n'), ((31258, 31283), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (31268, 31283), False, 'from gpcrondump import GpCronDump\n'), ((32066, 32091), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (32076, 32091), False, 'from gpcrondump import GpCronDump\n'), ((32849, 32874), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (32859, 32874), False, 'from gpcrondump import GpCronDump\n'), ((33510, 33535), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (33520, 33535), False, 'from gpcrondump import GpCronDump\n'), ((34323, 34348), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (34333, 34348), False, 'from gpcrondump import GpCronDump\n'), ((35106, 35131), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (35116, 35131), False, 'from gpcrondump import GpCronDump\n'), ((35727, 35752), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (35737, 35752), False, 'from gpcrondump import GpCronDump\n'), ((36481, 36506), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (36491, 36506), False, 'from gpcrondump import GpCronDump\n'), ((38577, 38602), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (38587, 38602), False, 'from gpcrondump import GpCronDump\n'), ((39104, 39129), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (39114, 39129), False, 'from gpcrondump import GpCronDump\n'), ((42348, 42373), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (42358, 42373), False, 'from gpcrondump import GpCronDump\n'), ((42782, 42807), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (42792, 42807), False, 'from gpcrondump import GpCronDump\n'), ((43205, 43230), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (43215, 43230), False, 'from gpcrondump import GpCronDump\n'), ((43663, 43688), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (43673, 43688), False, 'from gpcrondump import GpCronDump\n'), ((43706, 43712), 'mock.Mock', 'Mock', ([], {}), '()\n', (43710, 43712), False, 'from mock import patch, Mock\n'), ((43825, 43925), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (43845, 43925), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((45115, 45140), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (45125, 45140), False, 'from gpcrondump import GpCronDump\n'), ((45158, 45164), 'mock.Mock', 'Mock', ([], {}), '()\n', (45162, 45164), False, 'from mock import patch, Mock\n'), ((45277, 45377), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (45297, 45377), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((46713, 46738), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (46723, 46738), False, 'from gpcrondump import GpCronDump\n'), ((46756, 46762), 'mock.Mock', 'Mock', ([], {}), '()\n', (46760, 46762), False, 'from mock import patch, Mock\n'), ((46876, 46961), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'None', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, None, gpcd.dump_dir,\n timestamp)\n', (46896, 46961), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((48528, 48553), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (48538, 48553), False, 'from gpcrondump import GpCronDump\n'), ((48571, 48577), 'mock.Mock', 'Mock', ([], {}), '()\n', (48575, 48577), False, 'from mock import patch, Mock\n'), ((48690, 48790), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (48710, 48790), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((50359, 50384), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (50369, 50384), False, 'from gpcrondump import GpCronDump\n'), ((50402, 50408), 'mock.Mock', 'Mock', ([], {}), '()\n', (50406, 50408), False, 'from mock import patch, Mock\n'), ((50522, 50607), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'None', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, None, gpcd.dump_dir,\n timestamp)\n', (50542, 50607), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((51981, 52006), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (51991, 52006), False, 'from gpcrondump import GpCronDump\n'), ((52024, 52030), 'mock.Mock', 'Mock', ([], {}), '()\n', (52028, 52030), False, 'from mock import patch, Mock\n'), ((52166, 52266), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (52186, 52266), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((52972, 52997), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (52982, 52997), False, 'from gpcrondump import GpCronDump\n'), ((53015, 53021), 'mock.Mock', 'Mock', ([], {}), '()\n', (53019, 53021), False, 'from mock import patch, Mock\n'), ((53388, 53488), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (53408, 53488), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((54429, 54454), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (54439, 54454), False, 'from gpcrondump import GpCronDump\n'), ((54472, 54478), 'mock.Mock', 'Mock', ([], {}), '()\n', (54476, 54478), False, 'from mock import patch, Mock\n'), ((54845, 54945), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (54865, 54945), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((56005, 56030), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (56015, 56030), False, 'from gpcrondump import GpCronDump\n'), ((56048, 56054), 'mock.Mock', 'Mock', ([], {}), '()\n', (56052, 56054), False, 'from mock import patch, Mock\n'), ((56421, 56521), 'gppylib.operations.backup_utils.get_backup_directory', 'get_backup_directory', (['options.masterDataDirectory', 'options.backup_dir', 'gpcd.dump_dir', 'timestamp'], {}), '(options.masterDataDirectory, options.backup_dir, gpcd.\n dump_dir, timestamp)\n', (56441, 56521), False, 'from gppylib.operations.backup_utils import get_backup_directory, write_lines_to_file\n'), ((58038, 58063), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (58048, 58063), False, 'from gpcrondump import GpCronDump\n'), ((58651, 58667), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (58665, 58667), False, 'import mock\n'), ((60761, 60777), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (60775, 60777), False, 'import mock\n'), ((61536, 61552), 'mock.MagicMock', 'mock.MagicMock', ([], {}), '()\n', (61550, 61552), False, 'import mock\n'), ((62120, 62145), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (62130, 62145), False, 'from gpcrondump import GpCronDump\n'), ((3420, 3445), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (3430, 3445), False, 'from gpcrondump import GpCronDump\n'), ((3947, 3972), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (3957, 3972), False, 'from gpcrondump import GpCronDump\n'), ((4387, 4412), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (4397, 4412), False, 'from gpcrondump import GpCronDump\n'), ((4819, 4844), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (4829, 4844), False, 'from gpcrondump import GpCronDump\n'), ((5268, 5293), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (5278, 5293), False, 'from gpcrondump import GpCronDump\n'), ((5709, 5734), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (5719, 5734), False, 'from gpcrondump import GpCronDump\n'), ((6134, 6159), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (6144, 6159), False, 'from gpcrondump import GpCronDump\n'), ((6591, 6616), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (6601, 6616), False, 'from gpcrondump import GpCronDump\n'), ((7040, 7065), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (7050, 7065), False, 'from gpcrondump import GpCronDump\n'), ((7509, 7534), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (7519, 7534), False, 'from gpcrondump import GpCronDump\n'), ((8007, 8032), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (8017, 8032), False, 'from gpcrondump import GpCronDump\n'), ((8505, 8530), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (8515, 8530), False, 'from gpcrondump import GpCronDump\n'), ((8995, 9020), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (9005, 9020), False, 'from gpcrondump import GpCronDump\n'), ((9485, 9510), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (9495, 9510), False, 'from gpcrondump import GpCronDump\n'), ((9956, 9981), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (9966, 9981), False, 'from gpcrondump import GpCronDump\n'), ((10427, 10452), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (10437, 10452), False, 'from gpcrondump import GpCronDump\n'), ((10906, 10931), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (10916, 10931), False, 'from gpcrondump import GpCronDump\n'), ((11385, 11410), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (11395, 11410), False, 'from gpcrondump import GpCronDump\n'), ((11850, 11875), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (11860, 11875), False, 'from gpcrondump import GpCronDump\n'), ((12315, 12340), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (12325, 12340), False, 'from gpcrondump import GpCronDump\n'), ((12772, 12797), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (12782, 12797), False, 'from gpcrondump import GpCronDump\n'), ((13229, 13254), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (13239, 13254), False, 'from gpcrondump import GpCronDump\n'), ((13667, 13692), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (13677, 13692), False, 'from gpcrondump import GpCronDump\n'), ((14105, 14130), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (14115, 14130), False, 'from gpcrondump import GpCronDump\n'), ((14551, 14576), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (14561, 14576), False, 'from gpcrondump import GpCronDump\n'), ((14997, 15022), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (15007, 15022), False, 'from gpcrondump import GpCronDump\n'), ((15419, 15444), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (15429, 15444), False, 'from gpcrondump import GpCronDump\n'), ((15849, 15874), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (15859, 15874), False, 'from gpcrondump import GpCronDump\n'), ((16376, 16401), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (16386, 16401), False, 'from gpcrondump import GpCronDump\n'), ((16903, 16928), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (16913, 16928), False, 'from gpcrondump import GpCronDump\n'), ((18419, 18440), 'os.remove', 'os.remove', (['"""/tmp/foo"""'], {}), "('/tmp/foo')\n", (18428, 18440), False, 'import os\n'), ((19117, 19138), 'os.remove', 'os.remove', (['"""/tmp/foo"""'], {}), "('/tmp/foo')\n", (19126, 19138), False, 'import os\n'), ((20780, 20805), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (20790, 20805), False, 'from gpcrondump import GpCronDump\n'), ((21214, 21239), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (21224, 21239), False, 'from gpcrondump import GpCronDump\n'), ((21653, 21678), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (21663, 21678), False, 'from gpcrondump import GpCronDump\n'), ((22092, 22117), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (22102, 22117), False, 'from gpcrondump import GpCronDump\n'), ((23526, 23551), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (23536, 23551), False, 'from gpcrondump import GpCronDump\n'), ((25961, 25986), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (25971, 25986), False, 'from gpcrondump import GpCronDump\n'), ((26378, 26403), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (26388, 26403), False, 'from gpcrondump import GpCronDump\n'), ((26797, 26822), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (26807, 26822), False, 'from gpcrondump import GpCronDump\n'), ((27243, 27268), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (27253, 27268), False, 'from gpcrondump import GpCronDump\n'), ((27690, 27715), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (27700, 27715), False, 'from gpcrondump import GpCronDump\n'), ((28128, 28153), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (28138, 28153), False, 'from gpcrondump import GpCronDump\n'), ((28570, 28595), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (28580, 28595), False, 'from gpcrondump import GpCronDump\n'), ((29025, 29050), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (29035, 29050), False, 'from gpcrondump import GpCronDump\n'), ((29438, 29463), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (29448, 29463), False, 'from gpcrondump import GpCronDump\n'), ((29862, 29887), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (29872, 29887), False, 'from gpcrondump import GpCronDump\n'), ((30351, 30376), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (30361, 30376), False, 'from gpcrondump import GpCronDump\n'), ((30938, 30963), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (30948, 30963), False, 'from gpcrondump import GpCronDump\n'), ((37348, 37373), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (37358, 37373), False, 'from gpcrondump import GpCronDump\n'), ((37819, 37844), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (37829, 37844), False, 'from gpcrondump import GpCronDump\n'), ((38310, 38335), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (38320, 38335), False, 'from gpcrondump import GpCronDump\n'), ((39795, 39820), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (39805, 39820), False, 'from gpcrondump import GpCronDump\n'), ((40249, 40274), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (40259, 40274), False, 'from gpcrondump import GpCronDump\n'), ((40703, 40728), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (40713, 40728), False, 'from gpcrondump import GpCronDump\n'), ((41149, 41174), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (41159, 41174), False, 'from gpcrondump import GpCronDump\n'), ((41580, 41605), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (41590, 41605), False, 'from gpcrondump import GpCronDump\n'), ((42049, 42074), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (42059, 42074), False, 'from gpcrondump import GpCronDump\n'), ((42481, 42510), 'datetime.datetime', 'datetime', (['(2013)', '(1)', '(1)', '(1)', '(1)', '(1)'], {}), '(2013, 1, 1, 1, 1, 1)\n', (42489, 42510), False, 'from datetime import datetime\n'), ((53099, 53105), 'mock.Mock', 'Mock', ([], {}), '()\n', (53103, 53105), False, 'from mock import patch, Mock\n'), ((53107, 53113), 'mock.Mock', 'Mock', ([], {}), '()\n', (53111, 53113), False, 'from mock import patch, Mock\n'), ((54556, 54562), 'mock.Mock', 'Mock', ([], {}), '()\n', (54560, 54562), False, 'from mock import patch, Mock\n'), ((54564, 54570), 'mock.Mock', 'Mock', ([], {}), '()\n', (54568, 54570), False, 'from mock import patch, Mock\n'), ((56132, 56138), 'mock.Mock', 'Mock', ([], {}), '()\n', (56136, 56138), False, 'from mock import patch, Mock\n'), ((56140, 56146), 'mock.Mock', 'Mock', ([], {}), '()\n', (56144, 56146), False, 'from mock import patch, Mock\n'), ((58681, 58722), 'mock.patch', 'patch', (['"""__builtin__.open"""', 'm'], {'create': '(True)'}), "('__builtin__.open', m, create=True)\n", (58686, 58722), False, 'from mock import patch, Mock\n'), ((58743, 58768), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (58753, 58768), False, 'from gpcrondump import GpCronDump\n'), ((59173, 59198), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (59183, 59198), False, 'from gpcrondump import GpCronDump\n'), ((59716, 59741), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (59726, 59741), False, 'from gpcrondump import GpCronDump\n'), ((60201, 60226), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (60211, 60226), False, 'from gpcrondump import GpCronDump\n'), ((22588, 22613), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (22598, 22613), False, 'from gpcrondump import GpCronDump\n'), ((60915, 60956), 'mock.patch', 'patch', (['"""__builtin__.open"""', 'm'], {'create': '(True)'}), "('__builtin__.open', m, create=True)\n", (60920, 60956), False, 'from mock import patch, Mock\n'), ((60981, 61006), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (60991, 61006), False, 'from gpcrondump import GpCronDump\n'), ((61690, 61731), 'mock.patch', 'patch', (['"""__builtin__.open"""', 'm'], {'create': '(True)'}), "('__builtin__.open', m, create=True)\n", (61695, 61731), False, 'from mock import patch, Mock\n'), ((61756, 61781), 'gpcrondump.GpCronDump', 'GpCronDump', (['options', 'None'], {}), '(options, None)\n', (61766, 61781), False, 'from gpcrondump import GpCronDump\n')] |
vsosrc/ambari | ambari-server/src/test/python/stacks/2.0.6/HIVE/test_hive_service_check.py | e3cc898672707bedf7597f2e16d684c8a00bba3b | #!/usr/bin/env python
'''
Licensed to the Apache Software Foundation (ASF) under one
or more contributor license agreements. See the NOTICE file
distributed with this work for additional information
regarding copyright ownership. The ASF licenses this file
to you under the Apache License, Version 2.0 (the
"License"); you may not use this file except in compliance
with the License. You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
'''
import os
from mock.mock import MagicMock, call, patch
from stacks.utils.RMFTestCase import *
import datetime, sys, socket
import resource_management.libraries.functions
@patch.object(resource_management.libraries.functions, "get_unique_id_and_date", new = MagicMock(return_value=''))
@patch("socket.socket", new = MagicMock())
class TestServiceCheck(RMFTestCase):
@patch("sys.exit")
def test_service_check_default(self, sys_exit_mock):
self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
classname="HiveServiceCheck",
command="service_check",
config_file="default.json"
)
self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
content = StaticFile('hcatSmoke.sh'),
mode = 0755,
)
self.assertResourceCalled('Execute', 'env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
logoutput = True,
path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
tries = 3,
user = 'ambari-qa',
environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
try_sleep = 5,
)
self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
logoutput = True,
user = 'hdfs',
conf_dir = '/etc/hadoop/conf',
keytab=UnknownConfigurationMock(),
kinit_path_local='/usr/bin/kinit',
bin_dir = '/usr/lib/hive/bin',
security_enabled=False
)
self.assertResourceCalled('Execute', ' /tmp/hcatSmoke.sh hcatsmoke cleanup',
logoutput = True,
path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
tries = 3,
user = 'ambari-qa',
environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
try_sleep = 5,
)
self.assertNoMoreResources()
@patch("sys.exit")
def test_service_check_secured(self, sys_exit_mock):
self.executeScript("2.0.6/services/HIVE/package/scripts/service_check.py",
classname="HiveServiceCheck",
command="service_check",
config_file="secured.json"
)
self.assertResourceCalled('File', '/tmp/hcatSmoke.sh',
content = StaticFile('hcatSmoke.sh'),
mode = 0755,
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; env JAVA_HOME=/usr/jdk64/jdk1.7.0_45 /tmp/hcatSmoke.sh hcatsmoke prepare',
logoutput = True,
path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
tries = 3,
user = 'ambari-qa',
environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
try_sleep = 5,
)
self.assertResourceCalled('ExecuteHadoop', 'fs -test -e /apps/hive/warehouse/hcatsmoke',
logoutput = True,
user = 'hdfs',
conf_dir = '/etc/hadoop/conf',
keytab='/etc/security/keytabs/hdfs.headless.keytab',
kinit_path_local='/usr/bin/kinit',
security_enabled=True,
bin_dir = '/usr/lib/hive/bin',
principal='hdfs'
)
self.assertResourceCalled('Execute', '/usr/bin/kinit -kt /etc/security/keytabs/smokeuser.headless.keytab ambari-qa; /tmp/hcatSmoke.sh hcatsmoke cleanup',
logoutput = True,
path = ['/usr/sbin', '/usr/local/nin', '/bin', '/usr/bin'],
tries = 3,
user = 'ambari-qa',
environment = {'PATH' : os.environ['PATH'] + os.pathsep + "/usr/lib/hive/bin"},
try_sleep = 5,
)
self.assertNoMoreResources()
| [] |
chlorm-forks/gyp | test/linux/gyptest-ldflags-from-environment.py | a8921fcaab1a18c8cf7e4ab09ceb940e336918ec | #!/usr/bin/env python
# Copyright (c) 2017 Google Inc. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""
Verifies the use of linker flags in environment variables.
In this test, gyp and build both run in same local environment.
"""
import TestGyp
import re
import subprocess
import sys
FORMATS = ('make', 'ninja')
if sys.platform.startswith('linux'):
test = TestGyp.TestGyp(formats=FORMATS)
CHDIR = 'ldflags-from-environment'
with TestGyp.LocalEnv({'LDFLAGS': '-Wl,--dynamic-linker=/target',
'LDFLAGS_host': '-Wl,--dynamic-linker=/host',
'GYP_CROSSCOMPILE': '1'}):
test.run_gyp('test.gyp', chdir=CHDIR)
test.build('test.gyp', chdir=CHDIR)
def GetDynamicLinker(p):
p = test.built_file_path(p, chdir=CHDIR)
r = re.compile(r'\[Requesting program interpreter: ([^\]]+)\]')
proc = subprocess.Popen(['readelf', '-l', p], stdout=subprocess.PIPE)
o = proc.communicate()[0].decode('utf-8')
assert not proc.returncode
return r.search(o).group(1)
if GetDynamicLinker('ldflags') != '/target':
test.fail_test()
if GetDynamicLinker('ldflags_host') != '/host':
test.fail_test()
test.pass_test()
| [((401, 433), 'sys.platform.startswith', 'sys.platform.startswith', (['"""linux"""'], {}), "('linux')\n", (424, 433), False, 'import sys\n'), ((444, 476), 'TestGyp.TestGyp', 'TestGyp.TestGyp', ([], {'formats': 'FORMATS'}), '(formats=FORMATS)\n', (459, 476), False, 'import TestGyp\n'), ((522, 658), 'TestGyp.LocalEnv', 'TestGyp.LocalEnv', (["{'LDFLAGS': '-Wl,--dynamic-linker=/target', 'LDFLAGS_host':\n '-Wl,--dynamic-linker=/host', 'GYP_CROSSCOMPILE': '1'}"], {}), "({'LDFLAGS': '-Wl,--dynamic-linker=/target', 'LDFLAGS_host':\n '-Wl,--dynamic-linker=/host', 'GYP_CROSSCOMPILE': '1'})\n", (538, 658), False, 'import TestGyp\n'), ((869, 930), 're.compile', 're.compile', (['"""\\\\[Requesting program interpreter: ([^\\\\]]+)\\\\]"""'], {}), "('\\\\[Requesting program interpreter: ([^\\\\]]+)\\\\]')\n", (879, 930), False, 'import re\n'), ((940, 1002), 'subprocess.Popen', 'subprocess.Popen', (["['readelf', '-l', p]"], {'stdout': 'subprocess.PIPE'}), "(['readelf', '-l', p], stdout=subprocess.PIPE)\n", (956, 1002), False, 'import subprocess\n')] |
dhaitz/python-package-template | tests/test_advanced.py | b4c636e48ae192e5efe30fe71af37be6f8273d29 | # -*- coding: utf-8 -*-
from .context import sample
def test_thoughts():
assert(sample.hmm() is None)
| [] |
dhrubach/python-code-recipes | binary_tree/m_post_order_traversal.py | 14356c6adb1946417482eaaf6f42dde4b8351d2f | ######################################################################
# LeetCode Problem Number : 145
# Difficulty Level : Medium
# URL : https://leetcode.com/problems/binary-tree-postorder-traversal/
######################################################################
from binary_search_tree.tree_node import TreeNode
class BinaryTree:
# runtime --> 77.59%, memory --> 50.59%
def postOrderRecursive(self, root: TreeNode) -> [int]:
if not root:
return []
res = []
""" post - order traversal
visit left sub - tree
visit right sub - tree
visit node
"""
res += self.postOrderRecursive(root.left)
res += self.postOrderRecursive(root.right)
res.append(root.val)
""" return visited node + child nodes """
return res
def postOrderIterative(self, root: TreeNode) -> [int]:
if not root:
return []
ret = []
""" on visiting a node, push 2 copies to the stack.
use 1st copy to process the child nodes
use 2nd copy to insert into result
"""
st = [root] * 2
while st:
cur = st.pop()
""" if current node is the last node in the stack,
then visit it's child nodes
if current node is not the last node in the stack,
then current node is the 2nd copy. Insert node into
result list
"""
if st and st[-1] is cur:
"""insert right child node followed by left.
this ensures processing is done from left to right.
"""
if cur.right:
st += [cur.right] * 2
if cur.left:
st += [cur.left] * 2
else:
ret.append(cur.val)
return ret
# runtime --> 54.35%, memory --> 5.09%
def postOrderIterativeReverse(self, root: TreeNode) -> [int]:
if not root:
return []
res, stack = [], [root]
while stack:
cur = stack.pop()
if cur:
""" visit the nodes in reverse order
i.e. node -> right child node -> left child node
similar to right-first pre-order traversal
"""
res.append(cur.val)
stack.append(cur.left)
stack.append(cur.right)
""" reversed result will give post-order traversal """
return res[::-1]
| [] |
luminisward/python-dokuwiki | dokuwiki.py | 329862e6c91a79b2ad9f0b7616f7591459f2d4fd | # -*- coding: utf-8 -*-
"""This python module aims to manage
`DokuWiki <https://www.dokuwiki.org/dokuwiki>`_ wikis by using the
provided `XML-RPC API <https://www.dokuwiki.org/devel:xmlrpc>`_. It is
compatible with python2.7 and python3+.
Installation
------------
It is on `PyPi <https://pypi.python.org/pypi/dokuwiki>`_ so you can use
the ``pip`` command to install it::
pip install dokuwiki
Otherwise sources are in `github <https://github.com/fmenabe/python-dokuwiki>`_
"""
import re
import sys
import base64
import weakref
from xml.parsers.expat import ExpatError
if sys.version_info[0] == 3:
from xmlrpc.client import ServerProxy, Binary, Fault, Transport
from urllib.parse import urlencode
else:
from xmlrpclib import ServerProxy, Binary, Fault, Transport
from urllib import urlencode
from datetime import datetime, timedelta
ERR = 'XML or text declaration not at start of entity: line 2, column 0'
_URL_RE = re.compile(r'(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?')
def date(date):
"""DokuWiki returns dates of `xmlrpclib`/`xmlrpc.client` ``DateTime``
type and the format changes between DokuWiki versions ... This function
convert *date* to a `datetime` object.
"""
date = date.value
return (datetime.strptime(date[:-5], '%Y-%m-%dT%H:%M:%S')
if len(date) == 24
else datetime.strptime(date, '%Y%m%dT%H:%M:%S'))
def utc2local(date):
"""DokuWiki returns date with a +0000 timezone. This function convert *date*
to the local time.
"""
date_offset = (datetime.now() - datetime.utcnow())
# Python < 2.7 don't have the 'total_seconds' method so calculate it by hand!
date_offset = (date_offset.microseconds +
(date_offset.seconds + date_offset.days * 24 * 3600) * 1e6) / 1e6
date_offset = int(round(date_offset / 60 / 60))
return date + timedelta(hours=date_offset)
class DokuWikiError(Exception):
"""Exception raised by this module when there is an error."""
pass
class CookiesTransport(Transport):
"""A Python3 xmlrpc.client.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_headers(self, connection, headers):
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
Transport.send_headers(self, connection, headers)
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.msg.get_all("Set-Cookie"):
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class CookiesTransport2(Transport):
"""A Python2 xmlrpclib.Transport subclass that retains cookies."""
def __init__(self):
Transport.__init__(self)
self._cookies = dict()
def send_request(self, connection, handler, request_body):
Transport.send_request(self, connection, handler, request_body)
# set cookie below handler
if self._cookies:
cookies = map(lambda x: x[0] + '=' + x[1], self._cookies.items())
connection.putheader("Cookie", "; ".join(cookies))
def parse_response(self, response):
"""parse and store cookie"""
try:
for header in response.getheader("set-cookie").split(", "):
# filter 'expire' information
if not header.startswith("D"):
continue
cookie = header.split(";", 1)[0]
cookieKey, cookieValue = cookie.split("=", 1)
self._cookies[cookieKey] = cookieValue
finally:
return Transport.parse_response(self, response)
class DokuWiki(object):
"""Initialize a connection to a DokuWiki wiki. *url*, *user* and
*password* are respectively the URL, the login and the password for
connecting to the wiki. *kwargs* are `xmlrpclib`/`xmlrpc.client`
**ServerProxy** parameters.
The exception `DokuWikiError` is raised if the authentification
fails but others exceptions (like ``gaierror`` for invalid domain,
``ProtocolError`` for an invalid wiki, ...) are not catched.
.. code::
try:
wiki = dokuwiki.DokuWiki('URL', 'USER', 'PASSWORD', cookieAuth=False)
except (DokuWikiError, Exception) as err:
print('unable to connect: %s' % err)
"""
def __init__(self, url, user, password, cookieAuth=False, **kwargs):
"""Initialize the object by connecting to the XMLRPC server."""
# Initialize XMLRPC client.
try:
params = _URL_RE.search(url).groupdict()
if cookieAuth == False:
url = '%s://%s:%s@%s%s/lib/exe/xmlrpc.php' % (
params['proto'], user, password, params['host'], params['uri'] or '')
else:
url = '%s://%s%s/lib/exe/xmlrpc.php' % (
params['proto'], params['host'], params['uri'] or '')
except AttributeError:
raise DokuWikiError("invalid url '%s'" % url)
if cookieAuth == False:
self.proxy = ServerProxy(url, **kwargs)
else:
if sys.version_info[0] == 3:
self.proxy = ServerProxy(url, CookiesTransport(), **kwargs)
else:
self.proxy = ServerProxy(url, CookiesTransport2(), **kwargs)
# Force login to check the connection.
if not self.login(user, password):
raise DokuWikiError('invalid login or password!')
# Set "namespaces" for pages and medias functions.
self.pages = _Pages(weakref.ref(self)())
self.medias = _Medias(weakref.ref(self)())
def send(self, command, *args, **kwargs):
"""Generic method for executing an XML-RPC *command*. *args* and
*kwargs* are the arguments and parameters needed by the command.
"""
args = list(args)
if kwargs:
args.append(kwargs)
method = self.proxy
for elt in command.split('.'):
method = getattr(method, elt)
try:
return method(*args)
except Fault as err:
if err.faultCode == 121:
return {}
elif err.faultCode == 321:
return []
raise DokuWikiError(err)
except ExpatError as err:
if str(err) != ERR:
raise DokuWikiError(err)
@property
def version(self):
"""Property that returns the DokuWiki version of the remote Wiki."""
return self.send('dokuwiki.getVersion')
@property
def time(self):
"""Property that returns the current time at the remote wiki server as
Unix timestamp.
"""
return self.send('dokuwiki.getTime')
@property
def xmlrpc_version(self):
"""Property that returns the XML RPC interface version of the remote
Wiki. This is DokuWiki implementation specific and independent of the
supported standard API version returned by ``wiki.getRPCVersionSupported``.
"""
return self.send('dokuwiki.getXMLRPCAPIVersion')
@property
def xmlrpc_supported_version(self):
"""Property that returns *2* with the supported RPC API version."""
return self.send('wiki.getRPCVersionSupported')
@property
def title(self):
"""Property that returns the title of the wiki."""
return self.send('dokuwiki.getTitle')
def login(self, user, password):
"""Log to the wiki using *user* and *password* credentials. It returns
a boolean that indicates if the user succesfully authenticate."""
return self.send('dokuwiki.login', user, password)
def add_acl(self, scope, user, permission):
"""Add an `ACL <https://www.dokuwiki.org/acl>`_ rule that restricts
the page/namespace *scope* to *user* (use *@group* syntax for groups)
with *permission* level. It returns a boolean that indicate if the rule
was correctly added.
"""
return self.send('plugin.acl.addAcl', scope, user, permission)
def del_acl(self, scope, user):
"""Delete any ACL matching the given *scope* and *user* (or group if
*@group* syntax is used). It returns a boolean that indicate if the rule
was correctly removed.
"""
return self.send('plugin.acl.delAcl', scope, user)
class _Pages(object):
"""This object regroup methods for managing pages of a DokuWiki. This object
is accessible from the ``pages`` property of an `DokuWiki` instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.pages.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""List all pages of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *hash*: (bool) do an md5 sum of content
* *skipacl*: (bool) list everything regardless of ACL
"""
return self._dokuwiki.send('dokuwiki.getPagelist', namespace, options)
def changes(self, timestamp):
"""Returns a list of changes since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.pages.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentChanges', timestamp)
def search(self, string):
"""Performs a fulltext search on *string* and returns the first 15
results.
"""
return self._dokuwiki.send('dokuwiki.search', string)
def versions(self, page, offset=0):
"""Returns the available versions of *page*. *offset* can be used to
list earlier versions in the history.
"""
return self._dokuwiki.send('wiki.getPageVersions', page, offset)
def info(self, page, version=None):
"""Returns informations of *page*. Informations of the last version
is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageInfoVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageInfo', page))
def get(self, page, version=None):
"""Returns the content of *page*. The content of the last version is
returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPage', page))
def append(self, page, content, **options):
"""Appends *content* text to *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
return self._dokuwiki.send('dokuwiki.appendPage', page, content, options)
def html(self, page, version=None):
"""Returns HTML content of *page*. The HTML content of the last version
of the page is returned if *version* is not set.
"""
return (self._dokuwiki.send('wiki.getPageHTMLVersion', page, version)
if version is not None
else self._dokuwiki.send('wiki.getPageHTML', page))
def set(self, page, content, **options):
"""Set/replace the *content* of *page*.
Valid *options* are:
* *sum*: (str) change summary
* *minor*: (bool) whether this is a minor change
"""
try:
return self._dokuwiki.send('wiki.putPage', page, content, options)
except ExpatError as err:
# Sometime the first line of the XML response is blank which raise
# the 'ExpatError' exception although the change has been done. This
# allow to ignore the error.
if str(err) != ERR:
raise DokuWikiError(err)
def delete(self, page):
"""Delete *page* by setting an empty content."""
return self.set(page, '')
def lock(self, page):
"""Locks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[page], unlock=[])
if result['lockfail']:
raise DokuWikiError('unable to lock page')
def unlock(self, page):
"""Unlocks *page*."""
result = self._dokuwiki.send('dokuwiki.setLocks',
lock=[], unlock=[page])
if result['unlockfail']:
raise DokuWikiError('unable to unlock page')
def permission(self, page):
"""Returns the permission level of *page*."""
return self._dokuwiki.send('wiki.aclCheck', page)
def links(self, page):
"""Returns a list of all links contained in *page*."""
return self._dokuwiki.send('wiki.listLinks', page)
def backlinks(self, page):
"""Returns a list of all links referencing *page*."""
return self._dokuwiki.send('wiki.getBackLinks', page)
class _Medias(object):
"""This object regroup methods for managing medias of a DokuWiki. This
object is accessible from the ``medias`` property of an `DokuWiki`
instance::
wiki = dokuwiki.DokuWiki('URL', 'User', 'Password')
wiki.medias.list()
"""
def __init__(self, dokuwiki):
self._dokuwiki = dokuwiki
def list(self, namespace='/', **options):
"""Returns all medias of the given *namespace*.
Valid *options* are:
* *depth*: (int) recursion level, 0 for all
* *skipacl*: (bool) skip acl checking
* *pattern*: (str) check given pattern
* *hash*: (bool) add hashes to result list
"""
return self._dokuwiki.send('wiki.getAttachments', namespace, options)
def changes(self, timestamp):
"""Returns the list of medias changed since given *timestamp*.
For example, for returning all changes since *2016-01-01*::
from datetime import datetime
wiki.medias.changes(datetime(2016, 1, 1).timestamp())
"""
return self._dokuwiki.send('wiki.getRecentMediaChanges', timestamp)
def get(self, media, dirpath=None, filename=None, overwrite=False, b64decode=False):
"""Returns the binary data of *media* or save it to a file. If *dirpath*
is not set the binary data is returned, otherwise the data is saved
to a file. By default, the filename is the name of the media but it can
be changed with *filename* parameter. *overwrite* parameter allow to
overwrite the file if it already exists locally.
"""
import os
data = self._dokuwiki.send('wiki.getAttachment', media)
data = base64.b64decode(data) if b64decode else data.data
if dirpath is None:
return data
if filename is None:
filename = media.replace('/', ':').split(':')[-1]
if not os.path.exists(dirpath):
os.makedirs(dirpath)
filepath = os.path.join(dirpath, filename)
if os.path.exists(filepath) and not overwrite:
raise FileExistsError("[Errno 17] File exists: '%s'" % filepath)
with open(filepath, 'wb') as fhandler:
fhandler.write(data)
def info(self, media):
"""Returns informations of *media*."""
return self._dokuwiki.send('wiki.getAttachmentInfo', media)
def add(self, media, filepath, overwrite=True):
"""Set *media* from local file *filepath*. *overwrite* parameter specify
if the media must be overwrite if it exists remotely.
"""
with open(filepath, 'rb') as fhandler:
self._dokuwiki.send('wiki.putAttachment', media,
Binary(fhandler.read()), ow=overwrite)
def set(self, media, _bytes, overwrite=True, b64encode=False):
"""Set *media* from *_bytes*. *overwrite* parameter specify if the media
must be overwrite if it exists remotely.
"""
data = base64.b64encode(_bytes) if b64encode else Binary(_bytes)
self._dokuwiki.send('wiki.putAttachment', media, data, ow=overwrite)
def delete(self, media):
"""Delete *media*."""
return self._dokuwiki.send('wiki.deleteAttachment', media)
class Dataentry(object):
"""Object that manage `data entries <https://www.dokuwiki.org/plugin:data>`_."""
@staticmethod
def get(content, keep_order=False):
"""Get dataentry from *content*. *keep_order* indicates whether to
return an ordered dictionnay."""
if keep_order:
from collections import OrderedDict
dataentry = OrderedDict()
else:
dataentry = {}
found = False
for line in content.split('\n'):
if line.strip().startswith('---- dataentry'):
found = True
continue
elif line == '----':
break
elif not found:
continue
line_split = line.split(':')
key = line_split[0].strip()
value = re.sub('#.*$', '', ':'.join(line_split[1:])).strip()
dataentry.setdefault(key, value)
if not found:
raise DokuWikiError('no dataentry found')
return dataentry
@staticmethod
def gen(name, data):
"""Generate dataentry *name* from *data*."""
return '---- dataentry %s ----\n%s\n----' % (name, '\n'.join(
'%s:%s' % (attr, value) for attr, value in data.items()))
@staticmethod
def ignore(content):
"""Remove dataentry from *content*."""
page_content = []
start = False
for line in content.split('\n'):
if line == '----' and not start:
start = True
continue
if start:
page_content.append(line)
return '\n'.join(page_content) if page_content else content
| [((945, 1007), 're.compile', 're.compile', (['"""(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?"""'], {}), "('(?P<proto>https?)://(?P<host>[^/]*)(?P<uri>/.*)?')\n", (955, 1007), False, 'import re\n'), ((1261, 1310), 'datetime.datetime.strptime', 'datetime.strptime', (['date[:-5]', '"""%Y-%m-%dT%H:%M:%S"""'], {}), "(date[:-5], '%Y-%m-%dT%H:%M:%S')\n", (1278, 1310), False, 'from datetime import datetime, timedelta\n'), ((1359, 1401), 'datetime.datetime.strptime', 'datetime.strptime', (['date', '"""%Y%m%dT%H:%M:%S"""'], {}), "(date, '%Y%m%dT%H:%M:%S')\n", (1376, 1401), False, 'from datetime import datetime, timedelta\n'), ((1556, 1570), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1568, 1570), False, 'from datetime import datetime, timedelta\n'), ((1573, 1590), 'datetime.datetime.utcnow', 'datetime.utcnow', ([], {}), '()\n', (1588, 1590), False, 'from datetime import datetime, timedelta\n'), ((1875, 1903), 'datetime.timedelta', 'timedelta', ([], {'hours': 'date_offset'}), '(hours=date_offset)\n', (1884, 1903), False, 'from datetime import datetime, timedelta\n'), ((2156, 2180), 'xmlrpclib.Transport.__init__', 'Transport.__init__', (['self'], {}), '(self)\n', (2174, 2180), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((2437, 2486), 'xmlrpclib.Transport.send_headers', 'Transport.send_headers', (['self', 'connection', 'headers'], {}), '(self, connection, headers)\n', (2459, 2486), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3023, 3047), 'xmlrpclib.Transport.__init__', 'Transport.__init__', (['self'], {}), '(self)\n', (3041, 3047), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3151, 3214), 'xmlrpclib.Transport.send_request', 'Transport.send_request', (['self', 'connection', 'handler', 'request_body'], {}), '(self, connection, handler, request_body)\n', (3173, 3214), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((15357, 15388), 'os.path.join', 'os.path.join', (['dirpath', 'filename'], {}), '(dirpath, filename)\n', (15369, 15388), False, 'import os\n'), ((2842, 2882), 'xmlrpclib.Transport.parse_response', 'Transport.parse_response', (['self', 'response'], {}), '(self, response)\n', (2866, 2882), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((3904, 3944), 'xmlrpclib.Transport.parse_response', 'Transport.parse_response', (['self', 'response'], {}), '(self, response)\n', (3928, 3944), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((5369, 5395), 'xmlrpclib.ServerProxy', 'ServerProxy', (['url'], {}), '(url, **kwargs)\n', (5380, 5395), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((15070, 15092), 'base64.b64decode', 'base64.b64decode', (['data'], {}), '(data)\n', (15086, 15092), False, 'import base64\n'), ((15280, 15303), 'os.path.exists', 'os.path.exists', (['dirpath'], {}), '(dirpath)\n', (15294, 15303), False, 'import os\n'), ((15317, 15337), 'os.makedirs', 'os.makedirs', (['dirpath'], {}), '(dirpath)\n', (15328, 15337), False, 'import os\n'), ((15400, 15424), 'os.path.exists', 'os.path.exists', (['filepath'], {}), '(filepath)\n', (15414, 15424), False, 'import os\n'), ((16357, 16381), 'base64.b64encode', 'base64.b64encode', (['_bytes'], {}), '(_bytes)\n', (16373, 16381), False, 'import base64\n'), ((16400, 16414), 'xmlrpclib.Binary', 'Binary', (['_bytes'], {}), '(_bytes)\n', (16406, 16414), False, 'from xmlrpclib import ServerProxy, Binary, Fault, Transport\n'), ((17001, 17014), 'collections.OrderedDict', 'OrderedDict', ([], {}), '()\n', (17012, 17014), False, 'from collections import OrderedDict\n'), ((5863, 5880), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (5874, 5880), False, 'import weakref\n'), ((5914, 5931), 'weakref.ref', 'weakref.ref', (['self'], {}), '(self)\n', (5925, 5931), False, 'import weakref\n')] |
lvgig/test-aide | setup.py | 60a9420062dd778ce9dad43993dd8ab4f300ac4e | import setuptools
import re
with open("README.md", "r") as fh:
long_description = fh.read()
# get version from _version.py file, from below
# https://stackoverflow.com/questions/458550/standard-way-to-embed-version-into-python-package
VERSION_FILE = "test_aide/_version.py"
version_file_str = open(VERSION_FILE, "rt").read()
VERSION_STR_RE = r"^__version__ = ['\"]([^'\"]*)['\"]"
mo = re.search(VERSION_STR_RE, version_file_str, re.M)
if mo:
version = mo.group(1)
else:
raise RuntimeError("Unable to find version string in %s." % (VERSION_FILE,))
def list_reqs(fname="requirements.txt"):
with open(fname) as fd:
return fd.read().splitlines()
setuptools.setup(
name="test-aide",
version=version,
author="LV GI Data Science Team",
author_email="#[email protected]",
description="Package of helper functions to be used for unit testing",
long_description=long_description,
long_description_content_type="text/markdown",
packages=setuptools.find_packages(),
install_requires=list_reqs(),
python_requires=">=3.6",
classifiers=[
"Programming Language :: Python",
"Programming Language :: Python :: 3",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Operating System :: OS Independent",
"License :: OSI Approved :: BSD License",
],
)
| [((391, 440), 're.search', 're.search', (['VERSION_STR_RE', 'version_file_str', 're.M'], {}), '(VERSION_STR_RE, version_file_str, re.M)\n', (400, 440), False, 'import re\n'), ((999, 1025), 'setuptools.find_packages', 'setuptools.find_packages', ([], {}), '()\n', (1023, 1025), False, 'import setuptools\n')] |
jbbrokaw/matplotlib | examples/pylab_examples/matshow.py | 86ec1b6fc5628bfb2d09797c58d7eed0ca8c2427 | """Simple matshow() example."""
from matplotlib.pylab import *
def samplemat(dims):
"""Make a matrix with all zeros and increasing elements on the diagonal"""
aa = zeros(dims)
for i in range(min(dims)):
aa[i, i] = i
return aa
# Display 2 matrices of different sizes
dimlist = [(12, 12), (15, 35)]
for d in dimlist:
matshow(samplemat(d))
# Display a random matrix with a specified figure number and a grayscale
# colormap
matshow(rand(64, 64), fignum=100, cmap=cm.gray)
show()
| [] |
HeyLifeHD/rp-bp | setup.py | 9c59b1bc0267400747477467c45f96364d5528e1 | #! /usr/bin/env python3
import importlib
import logging
import os
import subprocess
from setuptools import setup
from setuptools.command.install import install as install
from setuptools.command.develop import develop as develop
logger = logging.getLogger(__name__)
stan_model_files = [
os.path.join("nonperiodic", "no-periodicity.stan"),
os.path.join("nonperiodic", "start-high-high-low.stan"),
os.path.join("nonperiodic", "start-high-low-high.stan"),
os.path.join("periodic", "start-high-low-low.stan"),
os.path.join("untranslated", "gaussian-naive-bayes.stan"),
os.path.join("translated", "periodic-gaussian-mixture.stan")
]
stan_pickle_files = [
os.path.join("nonperiodic", "no-periodicity.pkl"),
os.path.join("nonperiodic", "start-high-high-low.pkl"),
os.path.join("nonperiodic", "start-high-low-high.pkl"),
os.path.join("periodic", "start-high-low-low.pkl"),
os.path.join("untranslated", "gaussian-naive-bayes.pkl"),
os.path.join("translated", "periodic-gaussian-mixture.pkl")
]
def _pickle_it(stan, pickle):
import shlex
dirname = os.path.dirname(pickle)
if not os.path.exists(dirname):
os.makedirs(dirname)
cmd = "pickle-stan {} {}".format(shlex.quote(stan), shlex.quote(pickle))
logging.info(cmd)
subprocess.call(cmd, shell=True)
def _post_install(force_recompile):
import site
importlib.reload(site)
import pbio.ribo.ribo_filenames as filenames
import pbio.misc.shell_utils as shell_utils
smf = [os.path.join("rpbp_models", s) for s in stan_model_files]
models_base = filenames.get_default_models_base()
spf = [os.path.join(models_base, s) for s in stan_pickle_files]
# Compile and pickle the Stan models
if force_recompile:
for stan, pickle in zip(smf, spf):
_pickle_it(stan, pickle)
else: # default
for stan, pickle in zip(smf, spf):
if os.path.exists(pickle):
msg = "A model already exists at: {}. Skipping.".format(pickle)
logging.warning(msg)
continue
_pickle_it(stan, pickle)
# Check for the prerequisite programs
programs = ['flexbar']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='flexbar', logger=logger)
programs = ['STAR']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='STAR', logger=logger)
programs = ['bowtie2', 'bowtie2-build-s']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='bowtie2', logger=logger)
programs = ['samtools']
shell_utils.check_programs_exist(programs, raise_on_error=False,
package_name='SAMtools', logger=logger)
class SetupInstall(install):
user_options = install.user_options + [
('force-recompile', None, 'Set this flag to recompile the Stan models'),
]
def initialize_options(self):
install.initialize_options(self)
self.force_recompile = None
def finalize_options(self):
install.finalize_options(self)
def run(self):
force_recompile = self.force_recompile # 0 or 1
level = logging.getLevelName("INFO")
logging.basicConfig(level=level,
format='%(levelname)-8s : %(message)s')
install.run(self)
# skip if RTD
if not os.environ.get('READTHEDOCS') == 'True':
_post_install(force_recompile)
class SetupDevelop(develop):
user_options = develop.user_options + [
('force-recompile', None, 'Set this flag to recompile the Stan models'),
]
def initialize_options(self):
develop.initialize_options(self)
self.force_recompile = None
def finalize_options(self):
develop.finalize_options(self)
def run(self):
force_recompile = self.force_recompile # 0 or 1
level = logging.getLevelName("INFO")
logging.basicConfig(level=level,
format='%(levelname)-8s : %(message)s')
develop.run(self)
# skip if RTD
if not os.environ.get('READTHEDOCS') == 'True':
_post_install(force_recompile)
setup(
cmdclass={
'install': SetupInstall,
'develop': SetupDevelop
}
)
| [((242, 269), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (259, 269), False, 'import logging\n'), ((4350, 4416), 'setuptools.setup', 'setup', ([], {'cmdclass': "{'install': SetupInstall, 'develop': SetupDevelop}"}), "(cmdclass={'install': SetupInstall, 'develop': SetupDevelop})\n", (4355, 4416), False, 'from setuptools import setup\n'), ((297, 347), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""no-periodicity.stan"""'], {}), "('nonperiodic', 'no-periodicity.stan')\n", (309, 347), False, 'import os\n'), ((353, 408), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-high-low.stan"""'], {}), "('nonperiodic', 'start-high-high-low.stan')\n", (365, 408), False, 'import os\n'), ((414, 469), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-low-high.stan"""'], {}), "('nonperiodic', 'start-high-low-high.stan')\n", (426, 469), False, 'import os\n'), ((475, 526), 'os.path.join', 'os.path.join', (['"""periodic"""', '"""start-high-low-low.stan"""'], {}), "('periodic', 'start-high-low-low.stan')\n", (487, 526), False, 'import os\n'), ((532, 589), 'os.path.join', 'os.path.join', (['"""untranslated"""', '"""gaussian-naive-bayes.stan"""'], {}), "('untranslated', 'gaussian-naive-bayes.stan')\n", (544, 589), False, 'import os\n'), ((595, 655), 'os.path.join', 'os.path.join', (['"""translated"""', '"""periodic-gaussian-mixture.stan"""'], {}), "('translated', 'periodic-gaussian-mixture.stan')\n", (607, 655), False, 'import os\n'), ((686, 735), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""no-periodicity.pkl"""'], {}), "('nonperiodic', 'no-periodicity.pkl')\n", (698, 735), False, 'import os\n'), ((741, 795), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-high-low.pkl"""'], {}), "('nonperiodic', 'start-high-high-low.pkl')\n", (753, 795), False, 'import os\n'), ((801, 855), 'os.path.join', 'os.path.join', (['"""nonperiodic"""', '"""start-high-low-high.pkl"""'], {}), "('nonperiodic', 'start-high-low-high.pkl')\n", (813, 855), False, 'import os\n'), ((861, 911), 'os.path.join', 'os.path.join', (['"""periodic"""', '"""start-high-low-low.pkl"""'], {}), "('periodic', 'start-high-low-low.pkl')\n", (873, 911), False, 'import os\n'), ((917, 973), 'os.path.join', 'os.path.join', (['"""untranslated"""', '"""gaussian-naive-bayes.pkl"""'], {}), "('untranslated', 'gaussian-naive-bayes.pkl')\n", (929, 973), False, 'import os\n'), ((979, 1038), 'os.path.join', 'os.path.join', (['"""translated"""', '"""periodic-gaussian-mixture.pkl"""'], {}), "('translated', 'periodic-gaussian-mixture.pkl')\n", (991, 1038), False, 'import os\n'), ((1106, 1129), 'os.path.dirname', 'os.path.dirname', (['pickle'], {}), '(pickle)\n', (1121, 1129), False, 'import os\n'), ((1277, 1294), 'logging.info', 'logging.info', (['cmd'], {}), '(cmd)\n', (1289, 1294), False, 'import logging\n'), ((1299, 1331), 'subprocess.call', 'subprocess.call', (['cmd'], {'shell': '(True)'}), '(cmd, shell=True)\n', (1314, 1331), False, 'import subprocess\n'), ((1391, 1413), 'importlib.reload', 'importlib.reload', (['site'], {}), '(site)\n', (1407, 1413), False, 'import importlib\n'), ((1605, 1640), 'pbio.ribo.ribo_filenames.get_default_models_base', 'filenames.get_default_models_base', ([], {}), '()\n', (1638, 1640), True, 'import pbio.ribo.ribo_filenames as filenames\n'), ((2211, 2318), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""flexbar"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='flexbar', logger=logger)\n", (2243, 2318), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2389, 2493), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""STAR"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='STAR', logger=logger)\n", (2421, 2493), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2578, 2685), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""bowtie2"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='bowtie2', logger=logger)\n", (2610, 2685), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((2752, 2860), 'pbio.misc.shell_utils.check_programs_exist', 'shell_utils.check_programs_exist', (['programs'], {'raise_on_error': '(False)', 'package_name': '"""SAMtools"""', 'logger': 'logger'}), "(programs, raise_on_error=False,\n package_name='SAMtools', logger=logger)\n", (2784, 2860), True, 'import pbio.misc.shell_utils as shell_utils\n'), ((1141, 1164), 'os.path.exists', 'os.path.exists', (['dirname'], {}), '(dirname)\n', (1155, 1164), False, 'import os\n'), ((1174, 1194), 'os.makedirs', 'os.makedirs', (['dirname'], {}), '(dirname)\n', (1185, 1194), False, 'import os\n'), ((1233, 1250), 'shlex.quote', 'shlex.quote', (['stan'], {}), '(stan)\n', (1244, 1250), False, 'import shlex\n'), ((1252, 1271), 'shlex.quote', 'shlex.quote', (['pickle'], {}), '(pickle)\n', (1263, 1271), False, 'import shlex\n'), ((1528, 1558), 'os.path.join', 'os.path.join', (['"""rpbp_models"""', 's'], {}), "('rpbp_models', s)\n", (1540, 1558), False, 'import os\n'), ((1652, 1680), 'os.path.join', 'os.path.join', (['models_base', 's'], {}), '(models_base, s)\n', (1664, 1680), False, 'import os\n'), ((3100, 3132), 'setuptools.command.install.install.initialize_options', 'install.initialize_options', (['self'], {}), '(self)\n', (3126, 3132), True, 'from setuptools.command.install import install as install\n'), ((3210, 3240), 'setuptools.command.install.install.finalize_options', 'install.finalize_options', (['self'], {}), '(self)\n', (3234, 3240), True, 'from setuptools.command.install import install as install\n'), ((3335, 3363), 'logging.getLevelName', 'logging.getLevelName', (['"""INFO"""'], {}), "('INFO')\n", (3355, 3363), False, 'import logging\n'), ((3372, 3444), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level', 'format': '"""%(levelname)-8s : %(message)s"""'}), "(level=level, format='%(levelname)-8s : %(message)s')\n", (3391, 3444), False, 'import logging\n'), ((3482, 3499), 'setuptools.command.install.install.run', 'install.run', (['self'], {}), '(self)\n', (3493, 3499), True, 'from setuptools.command.install import install as install\n'), ((3827, 3859), 'setuptools.command.develop.develop.initialize_options', 'develop.initialize_options', (['self'], {}), '(self)\n', (3853, 3859), True, 'from setuptools.command.develop import develop as develop\n'), ((3937, 3967), 'setuptools.command.develop.develop.finalize_options', 'develop.finalize_options', (['self'], {}), '(self)\n', (3961, 3967), True, 'from setuptools.command.develop import develop as develop\n'), ((4062, 4090), 'logging.getLevelName', 'logging.getLevelName', (['"""INFO"""'], {}), "('INFO')\n", (4082, 4090), False, 'import logging\n'), ((4099, 4171), 'logging.basicConfig', 'logging.basicConfig', ([], {'level': 'level', 'format': '"""%(levelname)-8s : %(message)s"""'}), "(level=level, format='%(levelname)-8s : %(message)s')\n", (4118, 4171), False, 'import logging\n'), ((4209, 4226), 'setuptools.command.develop.develop.run', 'develop.run', (['self'], {}), '(self)\n', (4220, 4226), True, 'from setuptools.command.develop import develop as develop\n'), ((1934, 1956), 'os.path.exists', 'os.path.exists', (['pickle'], {}), '(pickle)\n', (1948, 1956), False, 'import os\n'), ((2054, 2074), 'logging.warning', 'logging.warning', (['msg'], {}), '(msg)\n', (2069, 2074), False, 'import logging\n'), ((3537, 3566), 'os.environ.get', 'os.environ.get', (['"""READTHEDOCS"""'], {}), "('READTHEDOCS')\n", (3551, 3566), False, 'import os\n'), ((4264, 4293), 'os.environ.get', 'os.environ.get', (['"""READTHEDOCS"""'], {}), "('READTHEDOCS')\n", (4278, 4293), False, 'import os\n')] |
BorisMansencal/quickNAT_pytorch | utils/data_utils.py | 1853afbe409f2fec6db298c70a3dd0ae088091f0 | import os
import h5py
import nibabel as nb
import numpy as np
import torch
import torch.utils.data as data
from torchvision import transforms
import utils.preprocessor as preprocessor
# transform_train = transforms.Compose([
# transforms.RandomCrop(200, padding=56),
# transforms.ToTensor(),
# ])
class ImdbData(data.Dataset):
def __init__(self, X, y, w, transforms=None):
self.X = X if len(X.shape) == 4 else X[:, np.newaxis, :, :]
self.y = y
self.w = w
self.transforms = transforms
def __getitem__(self, index):
img = torch.from_numpy(self.X[index])
label = torch.from_numpy(self.y[index])
weight = torch.from_numpy(self.w[index])
return img, label, weight
def __len__(self):
return len(self.y)
def get_imdb_dataset(data_params):
data_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_data_file']), 'r')
label_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_label_file']), 'r')
class_weight_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_class_weights_file']), 'r')
weight_train = h5py.File(os.path.join(data_params['data_dir'], data_params['train_weights_file']), 'r')
data_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_data_file']), 'r')
label_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_label_file']), 'r')
class_weight_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_class_weights_file']), 'r')
weight_test = h5py.File(os.path.join(data_params['data_dir'], data_params['test_weights_file']), 'r')
return (ImdbData(data_train['data'][()], label_train['label'][()], class_weight_train['class_weights'][()]),
ImdbData(data_test['data'][()], label_test['label'][()], class_weight_test['class_weights'][()]))
def load_dataset(file_paths,
orientation,
remap_config,
return_weights=False,
reduce_slices=False,
remove_black=False):
print("Loading and preprocessing data...")
volume_list, labelmap_list, headers, class_weights_list, weights_list = [], [], [], [], []
for file_path in file_paths:
volume, labelmap, class_weights, weights, header = load_and_preprocess(file_path, orientation,
remap_config=remap_config,
reduce_slices=reduce_slices,
remove_black=remove_black,
return_weights=return_weights)
volume_list.append(volume)
labelmap_list.append(labelmap)
if return_weights:
class_weights_list.append(class_weights)
weights_list.append(weights)
headers.append(header)
print("#", end='', flush=True)
print("100%", flush=True)
if return_weights:
return volume_list, labelmap_list, class_weights_list, weights_list, headers
else:
return volume_list, labelmap_list, headers
def load_and_preprocess(file_path, orientation, remap_config, reduce_slices=False,
remove_black=False,
return_weights=False):
volume, labelmap, header = load_data(file_path, orientation)
volume, labelmap, class_weights, weights = preprocess(volume, labelmap, remap_config=remap_config,
reduce_slices=reduce_slices,
remove_black=remove_black,
return_weights=return_weights)
return volume, labelmap, class_weights, weights, header
def load_and_preprocess_eval(file_path, orientation, notlabel=True):
volume_nifty = nb.load(file_path[0])
header = volume_nifty.header
volume = volume_nifty.get_fdata()
if notlabel:
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
else:
volume = np.round(volume)
if orientation == "COR":
volume = volume.transpose((2, 0, 1))
elif orientation == "AXI":
volume = volume.transpose((1, 2, 0))
return volume, header
def load_data(file_path, orientation):
volume_nifty, labelmap_nifty = nb.load(file_path[0]), nb.load(file_path[1])
volume, labelmap = volume_nifty.get_fdata(), labelmap_nifty.get_fdata()
volume = (volume - np.min(volume)) / (np.max(volume) - np.min(volume))
volume, labelmap = preprocessor.rotate_orientation(volume, labelmap, orientation)
return volume, labelmap, volume_nifty.header
def preprocess(volume, labelmap, remap_config, reduce_slices=False, remove_black=False, return_weights=False):
if reduce_slices:
volume, labelmap = preprocessor.reduce_slices(volume, labelmap)
if remap_config:
labelmap = preprocessor.remap_labels(labelmap, remap_config)
if remove_black:
volume, labelmap = preprocessor.remove_black(volume, labelmap)
if return_weights:
class_weights, weights = preprocessor.estimate_weights_mfb(labelmap)
return volume, labelmap, class_weights, weights
else:
return volume, labelmap, None, None
# def load_file_paths(data_dir, label_dir, volumes_txt_file=None):
# """
# This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
# It should be modified to suit the need of the project
# :param data_dir: Directory which contains the data files
# :param label_dir: Directory which contains the label files
# :param volumes_txt_file: (Optional) Path to the a csv file, when provided only these data points will be read
# :return: list of file paths as string
# """
#
# volume_exclude_list = ['IXI290', 'IXI423']
# if volumes_txt_file:
# with open(volumes_txt_file) as file_handle:
# volumes_to_use = file_handle.read().splitlines()
# else:
# volumes_to_use = [name for name in os.listdir(data_dir) if
# name.startswith('IXI') and name not in volume_exclude_list]
#
# file_paths = [
# [os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol, 'mri/aseg.auto_noCCseg.mgz')]
# for
# vol in volumes_to_use]
# return file_paths
def load_file_paths(data_dir, label_dir, data_id, volumes_txt_file=None):
"""
This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
It should be modified to suit the need of the project
:param data_dir: Directory which contains the data files
:param label_dir: Directory which contains the label files
:param data_id: A flag indicates the name of Dataset for proper file reading
:param volumes_txt_file: (Optional) Path to the a csv file, when provided only these data points will be read
:return: list of file paths as string
"""
if volumes_txt_file:
with open(volumes_txt_file) as file_handle:
volumes_to_use = file_handle.read().splitlines()
else:
volumes_to_use = [name for name in os.listdir(data_dir)]
if data_id == "MALC":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol + '_glm.mgz')]
for
vol in volumes_to_use]
elif data_id == "ADNI":
file_paths = [
[os.path.join(data_dir, vol, 'orig.mgz'), os.path.join(label_dir, vol, 'Lab_con.mgz')]
for
vol in volumes_to_use]
elif data_id == "CANDI":
file_paths = [
[os.path.join(data_dir, vol + '/' + vol + '_1.mgz'),
os.path.join(label_dir, vol + '/' + vol + '_1_seg.mgz')]
for
vol in volumes_to_use]
elif data_id == "IBSR":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz'), os.path.join(label_dir, vol + '_map.nii.gz')]
for
vol in volumes_to_use]
elif data_id == "BORIS": #BORIS
file_paths = [
[os.path.join(data_dir, vol), os.path.join(label_dir, vol.replace('.nii', '_seg.nii'))]
for
vol in volumes_to_use]
else:
raise ValueError("Invalid entry, valid options are MALC, ADNI, CANDI and IBSR")
return file_paths
def load_file_paths_eval(data_dir, volumes_txt_file, dir_struct):
"""
This function returns the file paths combined as a list where each element is a 2 element tuple, 0th being data and 1st being label.
It should be modified to suit the need of the project
:param data_dir: Directory which contains the data files
:param volumes_txt_file: Path to the a csv file, when provided only these data points will be read
:param dir_struct: If the id_list is in FreeSurfer style or normal
:return: list of file paths as string
"""
with open(volumes_txt_file) as file_handle:
volumes_to_use = file_handle.read().splitlines()
if dir_struct == "FS":
file_paths = [
[os.path.join(data_dir, vol, 'mri/orig.mgz')]
for
vol in volumes_to_use]
elif dir_struct == "Linear":
file_paths = [
[os.path.join(data_dir, vol)]
for
vol in volumes_to_use]
elif dir_struct == "part_FS":
file_paths = [
[os.path.join(data_dir, vol, 'orig.mgz')]
for
vol in volumes_to_use]
else:
raise ValueError("Invalid entry, valid options are FS and Linear")
return file_paths
| [((4062, 4083), 'nibabel.load', 'nb.load', (['file_path[0]'], {}), '(file_path[0])\n', (4069, 4083), True, 'import nibabel as nb\n'), ((4766, 4828), 'utils.preprocessor.rotate_orientation', 'preprocessor.rotate_orientation', (['volume', 'labelmap', 'orientation'], {}), '(volume, labelmap, orientation)\n', (4797, 4828), True, 'import utils.preprocessor as preprocessor\n'), ((582, 613), 'torch.from_numpy', 'torch.from_numpy', (['self.X[index]'], {}), '(self.X[index])\n', (598, 613), False, 'import torch\n'), ((630, 661), 'torch.from_numpy', 'torch.from_numpy', (['self.y[index]'], {}), '(self.y[index])\n', (646, 661), False, 'import torch\n'), ((679, 710), 'torch.from_numpy', 'torch.from_numpy', (['self.w[index]'], {}), '(self.w[index])\n', (695, 710), False, 'import torch\n'), ((860, 929), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_data_file']"], {}), "(data_params['data_dir'], data_params['train_data_file'])\n", (872, 929), False, 'import os\n'), ((964, 1034), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_label_file']"], {}), "(data_params['data_dir'], data_params['train_label_file'])\n", (976, 1034), False, 'import os\n'), ((1076, 1154), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_class_weights_file']"], {}), "(data_params['data_dir'], data_params['train_class_weights_file'])\n", (1088, 1154), False, 'import os\n'), ((1190, 1262), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['train_weights_file']"], {}), "(data_params['data_dir'], data_params['train_weights_file'])\n", (1202, 1262), False, 'import os\n'), ((1296, 1364), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_data_file']"], {}), "(data_params['data_dir'], data_params['test_data_file'])\n", (1308, 1364), False, 'import os\n'), ((1398, 1467), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_label_file']"], {}), "(data_params['data_dir'], data_params['test_label_file'])\n", (1410, 1467), False, 'import os\n'), ((1508, 1585), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_class_weights_file']"], {}), "(data_params['data_dir'], data_params['test_class_weights_file'])\n", (1520, 1585), False, 'import os\n'), ((1620, 1691), 'os.path.join', 'os.path.join', (["data_params['data_dir']", "data_params['test_weights_file']"], {}), "(data_params['data_dir'], data_params['test_weights_file'])\n", (1632, 1691), False, 'import os\n'), ((4278, 4294), 'numpy.round', 'np.round', (['volume'], {}), '(volume)\n', (4286, 4294), True, 'import numpy as np\n'), ((4547, 4568), 'nibabel.load', 'nb.load', (['file_path[0]'], {}), '(file_path[0])\n', (4554, 4568), True, 'import nibabel as nb\n'), ((4570, 4591), 'nibabel.load', 'nb.load', (['file_path[1]'], {}), '(file_path[1])\n', (4577, 4591), True, 'import nibabel as nb\n'), ((5040, 5084), 'utils.preprocessor.reduce_slices', 'preprocessor.reduce_slices', (['volume', 'labelmap'], {}), '(volume, labelmap)\n', (5066, 5084), True, 'import utils.preprocessor as preprocessor\n'), ((5126, 5175), 'utils.preprocessor.remap_labels', 'preprocessor.remap_labels', (['labelmap', 'remap_config'], {}), '(labelmap, remap_config)\n', (5151, 5175), True, 'import utils.preprocessor as preprocessor\n'), ((5225, 5268), 'utils.preprocessor.remove_black', 'preprocessor.remove_black', (['volume', 'labelmap'], {}), '(volume, labelmap)\n', (5250, 5268), True, 'import utils.preprocessor as preprocessor\n'), ((5326, 5369), 'utils.preprocessor.estimate_weights_mfb', 'preprocessor.estimate_weights_mfb', (['labelmap'], {}), '(labelmap)\n', (5359, 5369), True, 'import utils.preprocessor as preprocessor\n'), ((4691, 4705), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4697, 4705), True, 'import numpy as np\n'), ((4710, 4724), 'numpy.max', 'np.max', (['volume'], {}), '(volume)\n', (4716, 4724), True, 'import numpy as np\n'), ((4727, 4741), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4733, 4741), True, 'import numpy as np\n'), ((4199, 4213), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4205, 4213), True, 'import numpy as np\n'), ((4218, 4232), 'numpy.max', 'np.max', (['volume'], {}), '(volume)\n', (4224, 4232), True, 'import numpy as np\n'), ((4235, 4249), 'numpy.min', 'np.min', (['volume'], {}), '(volume)\n', (4241, 4249), True, 'import numpy as np\n'), ((7469, 7489), 'os.listdir', 'os.listdir', (['data_dir'], {}), '(data_dir)\n', (7479, 7489), False, 'import os\n'), ((7554, 7597), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (7566, 7597), False, 'import os\n'), ((7599, 7640), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '_glm.mgz')"], {}), "(label_dir, vol + '_glm.mgz')\n", (7611, 7640), False, 'import os\n'), ((9395, 9438), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (9407, 9438), False, 'import os\n'), ((7757, 7796), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""orig.mgz"""'], {}), "(data_dir, vol, 'orig.mgz')\n", (7769, 7796), False, 'import os\n'), ((7798, 7841), 'os.path.join', 'os.path.join', (['label_dir', 'vol', '"""Lab_con.mgz"""'], {}), "(label_dir, vol, 'Lab_con.mgz')\n", (7810, 7841), False, 'import os\n'), ((9560, 9587), 'os.path.join', 'os.path.join', (['data_dir', 'vol'], {}), '(data_dir, vol)\n', (9572, 9587), False, 'import os\n'), ((7959, 8009), 'os.path.join', 'os.path.join', (['data_dir', "(vol + '/' + vol + '_1.mgz')"], {}), "(data_dir, vol + '/' + vol + '_1.mgz')\n", (7971, 8009), False, 'import os\n'), ((8024, 8079), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '/' + vol + '_1_seg.mgz')"], {}), "(label_dir, vol + '/' + vol + '_1_seg.mgz')\n", (8036, 8079), False, 'import os\n'), ((9710, 9749), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""orig.mgz"""'], {}), "(data_dir, vol, 'orig.mgz')\n", (9722, 9749), False, 'import os\n'), ((8196, 8239), 'os.path.join', 'os.path.join', (['data_dir', 'vol', '"""mri/orig.mgz"""'], {}), "(data_dir, vol, 'mri/orig.mgz')\n", (8208, 8239), False, 'import os\n'), ((8241, 8285), 'os.path.join', 'os.path.join', (['label_dir', "(vol + '_map.nii.gz')"], {}), "(label_dir, vol + '_map.nii.gz')\n", (8253, 8285), False, 'import os\n'), ((8410, 8437), 'os.path.join', 'os.path.join', (['data_dir', 'vol'], {}), '(data_dir, vol)\n', (8422, 8437), False, 'import os\n')] |
auho/python-ETL | lib/common/app.py | 761589814b04e076ba6fa1c0e64b83ce62ce8556 | import argparse
import yaml
import sys
from .conf import MysqlConf
from lib.db import mysql
parser = argparse.ArgumentParser()
parser.add_argument("--config", help="config file name", type=str, required=False, default='office')
input_args = parser.parse_args()
class PartConfig:
def __init__(self):
self._mysqlDbConf = MysqlConf()
self._yamlConfig = None
def parse(self, conf_name, module_path):
self._parse_yaml(conf_name, module_path)
self._mysqlDbConf.load(self.get('mysql'))
def get(self, name):
return self._yamlConfig[name]
def _parse_yaml(self, conf_name, module_path):
yaml_file = module_path + f"/conf/db_{conf_name}.yml"
f = open(yaml_file, 'r', encoding='utf-8')
yaml_content = f.read()
self._yamlConfig = yaml.safe_load(yaml_content)
@property
def mysql_db_conf(self):
return self._mysqlDbConf
class App:
configName = None
moduleImport = None
moduleName = None
modulePath = None
mysqlDb = None
mysqlDbConf = None
ENV = 'dev'
DEBUG = True
ENV_DEBUG = False
def __init__(self, module_path, root_path):
self.configName = input_args.config
self.modulePath = module_path
self.moduleName = self.modulePath.replace(root_path + '/', '')
self.moduleImport = self.moduleName.replace('/', '.')
part_conf = PartConfig() # type:PartConfig
part_conf.parse(conf_name=input_args.config, module_path=module_path)
self.mysqlDbConf = part_conf.mysql_db_conf # type:MysqlConf
self.mysqlDb = mysql.Mysql(self.mysqlDbConf) # type: mysql.Mysql
self.mysqlDb.connect()
self.DEBUG = bool(part_conf.get('debug'))
self.ENV = part_conf.get('env')
if self.ENV == 'dev':
self.ENV_DEBUG = True
def get_db(self):
return self.mysqlDb
def get_sub_import(self, sub):
return self.moduleImport + '.' + sub
def get_sub_path(self, sub):
return self.modulePath + '/' + sub
def get_conf_path(self):
return self.get_sub_path(sub='conf')
def get_data_path(self):
return self.get_sub_path(sub='data')
def get_data_file_path(self, file):
return self.get_data_path() + '/' + file
def log(self):
self._init_info()
def _init_info(self):
print("=" * 50)
print("=" * 2 + f" MODULE PATH:: {self.modulePath}")
print("=" * 2 + f" FILE PATH:: {' '.join(sys.argv)}")
print(f" config file: {self.configName}")
print(f" db:: {self.mysqlDbConf.db}")
print(f" debug:: {str(int(self.DEBUG))}")
print(f" env_debug:: {str(int(self.ENV_DEBUG))}")
print("=" * 50)
print("\n")
| [((102, 127), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (125, 127), False, 'import argparse\n'), ((811, 839), 'yaml.safe_load', 'yaml.safe_load', (['yaml_content'], {}), '(yaml_content)\n', (825, 839), False, 'import yaml\n'), ((1606, 1635), 'lib.db.mysql.Mysql', 'mysql.Mysql', (['self.mysqlDbConf'], {}), '(self.mysqlDbConf)\n', (1617, 1635), False, 'from lib.db import mysql\n')] |
StrangeArcturus/QtAndRequestParser-Project | design.py | 5205420ff06c91917ce0c1d890da85e9d72a06ea | # -*- coding: utf-8 -*-
# Form implementation generated from reading ui file 'design.ui'
#
# Created by: PyQt5 UI code generator 5.15.4
#
# WARNING: Any manual changes made to this file will be lost when pyuic5 is
# run again. Do not edit this file unless you know what you are doing.
from PyQt5 import QtCore, QtGui, QtWidgets
class Ui_MainWindow(object):
def setupUi(self, MainWindow):
MainWindow.setObjectName("MainWindow")
MainWindow.resize(650, 550)
self.centralwidget = QtWidgets.QWidget(MainWindow)
self.centralwidget.setObjectName("centralwidget")
self.label = QtWidgets.QLabel(self.centralwidget)
self.label.setGeometry(QtCore.QRect(20, 10, 140, 13))
self.label.setObjectName("label")
self.song_title = QtWidgets.QLineEdit(self.centralwidget)
self.song_title.setGeometry(QtCore.QRect(90, 30, 113, 20))
self.song_title.setObjectName("song_title")
self.label_2 = QtWidgets.QLabel(self.centralwidget)
self.label_2.setGeometry(QtCore.QRect(20, 30, 60, 13))
self.label_2.setObjectName("label_2")
self.label_3 = QtWidgets.QLabel(self.centralwidget)
self.label_3.setGeometry(QtCore.QRect(220, 30, 80, 13))
self.label_3.setObjectName("label_3")
self.song_autor = QtWidgets.QLineEdit(self.centralwidget)
self.song_autor.setGeometry(QtCore.QRect(310, 30, 113, 20))
self.song_autor.setObjectName("song_autor")
self.label_4 = QtWidgets.QLabel(self.centralwidget)
self.label_4.setGeometry(QtCore.QRect(20, 90, 140, 13))
self.label_4.setObjectName("label_4")
self.orig_text = QtWidgets.QPlainTextEdit(self.centralwidget)
self.orig_text.setGeometry(QtCore.QRect(20, 150, 270, 340))
self.orig_text.setObjectName("orig_text")
self.label_5 = QtWidgets.QLabel(self.centralwidget)
self.label_5.setGeometry(QtCore.QRect(20, 120, 60, 13))
self.label_5.setObjectName("label_5")
self.trans_text = QtWidgets.QPlainTextEdit(self.centralwidget)
self.trans_text.setGeometry(QtCore.QRect(320, 150, 270, 340))
self.trans_text.setObjectName("trans_text")
self.label_6 = QtWidgets.QLabel(self.centralwidget)
self.label_6.setGeometry(QtCore.QRect(320, 120, 120, 13))
self.label_6.setObjectName("label_6")
self.get_text = QtWidgets.QPushButton(self.centralwidget)
self.get_text.setGeometry(QtCore.QRect(310, 70, 100, 23))
self.get_text.setObjectName("get_text")
self.pretty_flag = QtWidgets.QCheckBox(self.centralwidget)
self.pretty_flag.setGeometry(QtCore.QRect(20, 60, 250, 20))
self.pretty_flag.setObjectName("pretty_flag")
self.info = QtWidgets.QLabel(self.centralwidget)
self.info.setGeometry(QtCore.QRect(30, 500, 560, 13))
self.info.setText("")
self.info.setObjectName("info")
self.error_text = QtWidgets.QLabel(self.centralwidget)
self.error_text.setGeometry(QtCore.QRect(30, 520, 560, 20))
self.error_text.setText("")
self.error_text.setObjectName("error_text")
MainWindow.setCentralWidget(self.centralwidget)
self.retranslateUi(MainWindow)
QtCore.QMetaObject.connectSlotsByName(MainWindow)
def retranslateUi(self, MainWindow):
_translate = QtCore.QCoreApplication.translate
MainWindow.setWindowTitle(_translate("MainWindow", "Проект 1"))
self.label.setText(_translate("MainWindow", "Введите данные о песне:"))
self.label_2.setText(_translate("MainWindow", "Название:"))
self.label_3.setText(_translate("MainWindow", "Исполнитель:"))
self.label_4.setText(_translate("MainWindow", "Полученный текст песни:"))
self.label_5.setText(_translate("MainWindow", "Оригинал:"))
self.label_6.setText(_translate("MainWindow", "Перевод на русский:"))
self.get_text.setText(_translate("MainWindow", "Запрос текста"))
self.pretty_flag.setText(_translate("MainWindow", "Красивый текст (без указания на припев)"))
| [((527, 556), 'PyQt5.QtWidgets.QWidget', 'QtWidgets.QWidget', (['MainWindow'], {}), '(MainWindow)\n', (544, 556), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((638, 674), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (654, 674), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((808, 847), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (827, 847), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((993, 1029), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1009, 1029), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1165, 1201), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1181, 1201), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1341, 1380), 'PyQt5.QtWidgets.QLineEdit', 'QtWidgets.QLineEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1360, 1380), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1527, 1563), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1543, 1563), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1702, 1746), 'PyQt5.QtWidgets.QPlainTextEdit', 'QtWidgets.QPlainTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1726, 1746), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1891, 1927), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (1907, 1927), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2067, 2111), 'PyQt5.QtWidgets.QPlainTextEdit', 'QtWidgets.QPlainTextEdit', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2091, 2111), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2260, 2296), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2276, 2296), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2436, 2477), 'PyQt5.QtWidgets.QPushButton', 'QtWidgets.QPushButton', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2457, 2477), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2622, 2661), 'PyQt5.QtWidgets.QCheckBox', 'QtWidgets.QCheckBox', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2641, 2661), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2807, 2843), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (2823, 2843), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3006, 3042), 'PyQt5.QtWidgets.QLabel', 'QtWidgets.QLabel', (['self.centralwidget'], {}), '(self.centralwidget)\n', (3022, 3042), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3310, 3359), 'PyQt5.QtCore.QMetaObject.connectSlotsByName', 'QtCore.QMetaObject.connectSlotsByName', (['MainWindow'], {}), '(MainWindow)\n', (3347, 3359), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((707, 736), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(10)', '(140)', '(13)'], {}), '(20, 10, 140, 13)\n', (719, 736), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((885, 914), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(90)', '(30)', '(113)', '(20)'], {}), '(90, 30, 113, 20)\n', (897, 914), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1064, 1092), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(30)', '(60)', '(13)'], {}), '(20, 30, 60, 13)\n', (1076, 1092), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1236, 1265), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(220)', '(30)', '(80)', '(13)'], {}), '(220, 30, 80, 13)\n', (1248, 1265), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1418, 1448), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(310)', '(30)', '(113)', '(20)'], {}), '(310, 30, 113, 20)\n', (1430, 1448), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1598, 1627), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(90)', '(140)', '(13)'], {}), '(20, 90, 140, 13)\n', (1610, 1627), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1783, 1814), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(150)', '(270)', '(340)'], {}), '(20, 150, 270, 340)\n', (1795, 1814), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((1962, 1991), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(120)', '(60)', '(13)'], {}), '(20, 120, 60, 13)\n', (1974, 1991), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2149, 2181), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(150)', '(270)', '(340)'], {}), '(320, 150, 270, 340)\n', (2161, 2181), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2331, 2362), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(320)', '(120)', '(120)', '(13)'], {}), '(320, 120, 120, 13)\n', (2343, 2362), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2513, 2543), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(310)', '(70)', '(100)', '(23)'], {}), '(310, 70, 100, 23)\n', (2525, 2543), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2700, 2729), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(20)', '(60)', '(250)', '(20)'], {}), '(20, 60, 250, 20)\n', (2712, 2729), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((2875, 2905), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(30)', '(500)', '(560)', '(13)'], {}), '(30, 500, 560, 13)\n', (2887, 2905), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n'), ((3080, 3110), 'PyQt5.QtCore.QRect', 'QtCore.QRect', (['(30)', '(520)', '(560)', '(20)'], {}), '(30, 520, 560, 20)\n', (3092, 3110), False, 'from PyQt5 import QtCore, QtGui, QtWidgets\n')] |
Alisa-lisa/conferences | EP_2019/py_impl/main.py | d93014747dc9d18493295dbc33fa51c8fb9467dc | from simulation.car import spawn_drivers
from simulation.passenger import spawn_passengers
from simulation.core import World, Clock
conf = {
"x": 100,
"y": 100,
"drivers": 200,
"users": 1000,
"start": "2019-07-08T00:00:00",
"end": "2019-07-08T00:01:00"
}
clock = Clock(conf["start"], conf["end"])
if __name__ == '__main__':
world = World([conf['x'], conf['y']], clock=clock)
world.register_drivers(spawn_drivers(conf["drivers"], conf['x'], conf['y']))
world.register_passengers(spawn_passengers(conf["users"], conf['x'], conf['y']))
world.run(log=False)
| [((289, 322), 'simulation.core.Clock', 'Clock', (["conf['start']", "conf['end']"], {}), "(conf['start'], conf['end'])\n", (294, 322), False, 'from simulation.core import World, Clock\n'), ((363, 405), 'simulation.core.World', 'World', (["[conf['x'], conf['y']]"], {'clock': 'clock'}), "([conf['x'], conf['y']], clock=clock)\n", (368, 405), False, 'from simulation.core import World, Clock\n'), ((433, 485), 'simulation.car.spawn_drivers', 'spawn_drivers', (["conf['drivers']", "conf['x']", "conf['y']"], {}), "(conf['drivers'], conf['x'], conf['y'])\n", (446, 485), False, 'from simulation.car import spawn_drivers\n'), ((517, 570), 'simulation.passenger.spawn_passengers', 'spawn_passengers', (["conf['users']", "conf['x']", "conf['y']"], {}), "(conf['users'], conf['x'], conf['y'])\n", (533, 570), False, 'from simulation.passenger import spawn_passengers\n')] |
avulaankith/Python | Python/reverse_with_swap.py | 71269b1a36b45150edb7834c559386a91618e723 | #!/bin/python3
import math
import os
import random
import re
import sys
#
# Complete the 'reverse_words_order_and_swap_cases' function below.
#
# The function is expected to return a STRING.
# The function accepts STRING sentence as parameter.
#
def reverse_words_order_and_swap_cases(sentence):
# Write your code here
l = []
st = ""
for i in sentence:
if i == " ":
l.append(st)
st = ""
else:
st += i.swapcase()
# continue
l.append(st)
st = ""
l.reverse()
news = ""
for i in range(len(l)):
if i != (len(l) - 1):
news += l[i] + " "
else:
news += l[i]
return news
sentence = input()
news = reverse_words_order_and_swap_cases(sentence)
print(news)
| [] |
INK-USC/hypter | playground/check_equal.py | 732551e1e717b66ad26ba538593ed184957ecdea | import json
d1 = {}
with open("/home/qinyuan/zs/out/bart-large-with-description-grouped-1e-5-outerbsz4-innerbsz32-adapterdim4-unfreeze-dec29/test_predictions.jsonl") as fin:
for line in fin:
d = json.loads(line)
d1[d["id"]] = d["output"][0]["answer"]
d2 = {}
dq = {}
with open("/home/qinyuan/zs/out/bart-large-zsre-with-description-LR2e-5-FREQ32-dec27/test_predictions_submitted.jsonl") as fin:
for line in fin:
d = json.loads(line)
d2[d["id"]] = d["output"][0]["answer"]
dq[d["id"]] = d["input"]
d3 = {}
with open("/home/qinyuan/zs/data/structured_zeroshot-test.jsonl") as fin:
for line in fin:
d = json.loads(line)
d3[d["id"]] = [item["answer"] for item in d["output"]]
count = 0
win1 = 0
win2 = 0
for key in d1.keys():
if d1[key]!= d2[key]:
print("{}. {}. {}. {}. {}".format(key, dq[key], d1[key], d2[key], d3[key]))
count += 1
if d1[key] in d3[key] and d2[key] not in d3[key]:
win1 += 1
print(d1[key])
print(d2[key])
if d2[key] in d3[key] and d1[key] not in d3[key]:
win2 += 1
print(d1[key])
print(d2[key])
print(count)
print(win1)
print(win2)
| [((208, 224), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (218, 224), False, 'import json\n'), ((450, 466), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (460, 466), False, 'import json\n'), ((663, 679), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (673, 679), False, 'import json\n')] |
MaayanLab/creeds | creeds/static/api1.py | 7d580c91ca45c03e34bbc0d1928668f266ff13d9 | import json, requests
from pprint import pprint
CREEDS_URL = 'http://amp.pharm.mssm.edu/CREEDS/'
response = requests.get(CREEDS_URL + 'search', params={'q':'STAT3'})
if response.status_code == 200:
pprint(response.json())
json.dump(response.json(), open('api1_result.json', 'wb'), indent=4)
| [((109, 167), 'requests.get', 'requests.get', (["(CREEDS_URL + 'search')"], {'params': "{'q': 'STAT3'}"}), "(CREEDS_URL + 'search', params={'q': 'STAT3'})\n", (121, 167), False, 'import json, requests\n')] |
rodlukas/UP-admin | admin/migrations/0041_course_color.py | 08f36de0773f39c6222da82016bf1384af2cce18 | # Generated by Django 2.2.3 on 2019-07-31 13:54
from django.db import migrations, models
class Migration(migrations.Migration):
dependencies = [("admin", "0040_auto_20190718_0938")]
operations = [
migrations.AddField(
model_name="course", name="color", field=models.CharField(default="#000", max_length=7)
)
]
| [((292, 338), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""#000"""', 'max_length': '(7)'}), "(default='#000', max_length=7)\n", (308, 338), False, 'from django.db import migrations, models\n')] |
pedrosimoes-programmer/exercicios-python | exercicios-Python/ex083.py | 150de037496d63d76086678d87425a8ccfc74573 | # Forma sem bugs
expressao = (str(input('Digite a expressão: ')))
pilhaParenteses = []
for v in expressao:
if v == '(':
pilhaParenteses.append('(')
elif v == ')':
if len(pilhaParenteses) > 0:
pilhaParenteses.pop()
else:
pilhaParenteses.append(')')
break
if len(pilhaParenteses) == 0:
print(f'A expressão {expressao} está válida.')
else:
print(f'A expressão {expressao} está inválida!')
# Forma com bugs
#expressao = (str(input('Digite a expressão: ')))
#if expressao.count('(') == expressao.count(')'):
# print('Sua expressão está válida.')
#else:
# print('Sua expressão está inválida!')
| [] |
code-acrobat/InspectorTodo | src/inspectortodo/todo.py | 342bd0840d4f087cf2914f906ebc69bf2b21d9ce | # Copyright 2018 TNG Technology Consulting GmbH, Unterföhring, Germany
# Licensed under the Apache License, Version 2.0 - see LICENSE.md in project root directory
import logging
from xml.sax.saxutils import escape
log = logging.getLogger()
class Todo:
def __init__(self, file_path, line_number, content):
self.file_path = file_path
self.line_number = line_number
self.content = content
self.is_valid = True
self.error_reason = None
def __str__(self):
return 'Todo in file ' + self.file_path + ':' + str(self.line_number) + ' | ' + self.content
def mark_as_valid(self):
self.is_valid = True
self.error_reason = None
def mark_as_invalid(self, error_reason):
self.is_valid = False
self.error_reason = error_reason
def print(self, show_valid=False):
if not show_valid and self.is_valid:
return
log.error('[REASON] %s' % self.error_reason)
log.error('[FILE] %s' % self.file_path)
log.error('[LINE] %s' % self.line_number)
log.error('[CONTENT] %s' % self.content)
def print_xml(self, xml_file):
if self.is_valid:
xml_file.write('\t<testcase classname="{}" name="line {}" />\n'.format(self.file_path, self.line_number))
else:
xml_file.write('\t<testcase classname="{}" name="line {}" >\n'.format(self.file_path, self.line_number))
xml_file.write('\t\t<failure message="{}">{}</failure>\n'.format(self.error_reason, escape(self.content)))
xml_file.write('\t</testcase>\n')
| [((223, 242), 'logging.getLogger', 'logging.getLogger', ([], {}), '()\n', (240, 242), False, 'import logging\n'), ((1538, 1558), 'xml.sax.saxutils.escape', 'escape', (['self.content'], {}), '(self.content)\n', (1544, 1558), False, 'from xml.sax.saxutils import escape\n')] |
FabLabUTFSM/fusm_usage_report | generators.py | 92b18ad81f97482d6e8428b6c7cbdfc23d0ca440 | import dash_core_components as dcc
import dash_html_components as html
import plotly.graph_objs as go
import plotly.express as px
from plotly.subplots import make_subplots
import pandas as pd
import math
from datetime import datetime, time
from utils import MONTH_NAMES, month_range
def section(title, content, gray=False):
return html.Section(className=f'hero is-fullheight is-medium {"has-background-grey-lighter" if gray else ""}', children=[
html.Div(className='hero-body', children=[
html.Div(className='container', children=[
html.Div(className='columns is-centered', children=[
html.Div(className='column is-four-fifths is-full-mobile', children=[
html.Div(className='level', children=[
html.H2(title, className='title')
]),
] + content)
])
])
])
])
def quality_index(df):
indexes = df.sort_values('Valor', ascending=False).fillna('?').values
return html.Div(className='columns is-multiline is-4 is-variable', children=[
html.Div(className=f'column is-one-quarter index-container {"unknown-data" if i[1] == "?" else ""}', children=[
html.H1(i[1], className='title'),
html.H2(i[0], className='subtitle')
]) for i in indexes
])
def month_selector(df, first_month=None):
current_month = datetime.now().month
return html.Div(dcc.RangeSlider(
id='month-range-slider',
marks={i+1: MONTH_NAMES[i] for i in range(first_month-1, current_month)},
min=first_month, max=current_month,
value=[current_month-2,current_month],
pushable=1
), className='slider-frame')
def point_list(items):
return html.Ul([html.Li(item) for item in items])
def first():
return html.Section(className='hero is-fullheight', children=[
html.Div(className='hero-body', children=[
html.Div(className='container', children=[
html.Div(className='columns is-vcentered is-centered', children=[
html.Div(className='column is-5', children=[
html.Figure(className='image is-4by4', children=[
html.Img(src='/indicadores/assets/logo.png', alt='FabLab UTFSM'),
]),
]),
html.Div(className='column is-5 main-title', children=[
html.H1('Informe de Gestión de Operaciones', className='title')
])
])
]),
])
])
def last():
return html.Footer(className='footer has-background-white', children=[
html.Div(className='content has-text-centered', children=[
html.Img(src='/indicadores/assets/footer.png', alt='FabLab UTFSM'),
html.P(className='is-size-7', children=[
'FabLab UTFSM 2019', html.Br(),
'UTFSM Campus San Joaquín, Edificio C', html.Br(),
'Av. Vicuña Mackenna 3939, Santiago de Chile', html.Br(),
'Desarrollado bajo licencia MIT'
])
])
])
def fig_records(df, months=None, stacked=False):
machine_list = df['Tipo Máquina'].unique()
months = month_range(months)
def create_frame(df, serie_name):
count = df['Tipo Máquina'].value_counts()
frame = pd.DataFrame({'Tipo de Máquina': machine_list})
frame[serie_name] = [count.get(machine, 0) for machine in machine_list]
return frame
extras = {'barmode': 'relative' if stacked else 'group'}
figure = go.Figure()
for m in months:
name = MONTH_NAMES[m-1]
frame = create_frame(df[df.index.month == m], name)
figure.add_trace(go.Bar(x=frame['Tipo de Máquina'], y=frame[name], name=name, hoverinfo='name+y'))
if stacked and months:
frame = create_frame(df[df.index.month.isin(months)], 'Total')
figure.add_trace(go.Scatter(
x=frame['Tipo de Máquina'],
y=frame['Total'],
text=frame['Total'],
textposition='top center',
mode='text',
showlegend=False,
hoverinfo='skip'
))
figure.update_layout(yaxis={ 'title': 'Número de registros'}, **extras)
return figure
def fig_hours(df, months=None, stacked=False):
machine_list = df['Tipo Máquina'].unique()
months=month_range(months)
def create_frame(df, serie_name):
count = df.groupby('Tipo Máquina').sum()['Tiempo de uso en minutos'].divide(60).round(0)
frame = pd.DataFrame({'Tipo de Máquina': machine_list})
frame[serie_name] = [count.get(machine, 0) for machine in machine_list]
return frame
if months and type(months) == list:
df = df[df.index.month.isin(months)]
frame = create_frame(df, 'Total')
figure = go.Figure()
extras = {'barmode': 'relative' if stacked else 'group'}
for m in months:
name = MONTH_NAMES[m-1]
frame = create_frame(df[df.index.month == m], name)
figure.add_trace(go.Bar(y=frame['Tipo de Máquina'], x=frame[name], name=name, hoverinfo='name+x', orientation='h'))
if stacked and months:
frame = create_frame(df[df.index.month.isin(months)], 'Total')
figure.add_trace(go.Scatter(
y=frame['Tipo de Máquina'],
x=frame['Total'],
text=frame['Total'],
textposition='middle right',
mode='text',
showlegend=False,
hoverinfo='skip'
))
figure.update_layout(xaxis={ 'title': f'Horas de uso {"total" if stacked else ""}'}, **extras)
return figure
def cap_per_machine_per_month(month_caps, machine, month):
this_month = month_caps[month_caps['Mes'] == month]
machine_count = {'Impresora 3D': 5, 'Cortadora Láser': 2, 'Router CNC': 3, 'Torno': 1, 'Cirqoid': 1}
return (this_month['Dias'] * this_month['Horas']).values[0] * 60 * machine_count[machine]
def fig_total_capacity_2(df, month_caps, months):
machine_list = df['Tipo Máquina'].unique()
months = month_range(months)
month_names = [MONTH_NAMES[m-1] for m in months]
figure = go.Figure()
for machine in machine_list:
texts = []
caps = []
for month in months:
total_cap = cap_per_machine_per_month(month_caps, machine, month)
hours = total_cap // 60
used_cap = df[df.index.month==month].groupby('Tipo Máquina')['Tiempo de uso en minutos'].sum().divide(total_cap).multiply(100).round(2).get(machine, 0)
caps.append(used_cap)
texts.append(f'{used_cap}% utilizado de una capacidad total de {hours} horas.')
figure.add_trace(go.Bar(x=month_names, y=caps, name=machine, hovertext=texts))
figure.update_layout(barmode='group', yaxis=dict(type='linear', ticksuffix='%', title='Capacidad Utilizada'))
return figure
"""
TODO: Terminar el heatmap de alguna manera...
def fig_uses(df, months):
dias = ['Lunes', 'Martes', 'Miércoles', 'Jueves', 'Viernes']
days = ['Monday', 'Tuesday', 'Wednesday', 'Thursday', 'Friday']
data = df[df.index.month.isin(month_range(months))]
figure = go.Figure()
times = data.groupby([data.index.weekday_name, pd.Grouper(freq='60min', key='Hora Inicio')]).fillna(0).sum().reset_index()
day_times = times[times['Marca temporal'] == 'Monday']['Hora Inicio'].dt.time
z_dict = dict()
for i, d in enumerate(days):
z_dict.update({dias[i]: times[times['Marca temporal'] == d]['Tiempo de uso en minutos'].fillna(0).values})
z_values = pd.DataFrame(z_dict).values
figure.add_trace(go.Heatmap(
x=dias,
y=day_times,
z=z_values))
return figure
"""
def trace_context_use(df, level=None, **kwargs):
grouped = None
if not level:
grouped = df.groupby('Contexto 1')
else:
grouped = df[df['Contexto 1'] == level].groupby('Contexto 2')
context_data = grouped.sum()['Tiempo de uso en minutos']
return go.Pie(labels=context_data.index, values=context_data.values, **kwargs)
def fig_contexts_use(df, months, level, **kwargs):
col_count = 3
row_count = math.ceil(len(month_range(months))/col_count)
figure = make_subplots(row_count, col_count, specs=[[{'type':'domain'} for c in range(col_count)] for r in range(row_count)],
subplot_titles=[MONTH_NAMES[m-1] for m in month_range(months)])
def take_month(months):
for m in month_range(months):
yield trace_context_use(df[df.index.month == m], level, name=MONTH_NAMES[m-1])
pie_factory = take_month(months)
try:
for r in range(row_count):
for c in range(col_count):
figure.add_trace(next(pie_factory), r+1, c+1)
except StopIteration as stop:
pass
return figure
def records_per_machine(df, months=None, stacked=False):
return dcc.Graph(figure=fig_records(df, months=months, stacked=stacked), style={'height': '80vh'})
def time_per_machine(df, months=None, stacked=False):
return dcc.Graph(figure=fig_hours(df, months=months, stacked=stacked), style={'height': '80vh'})
def machine_capacity(df, caps, months=None):
return dcc.Graph(figure=fig_total_capacity_2(df, caps, months), style={'height': '80vh'})
#def uses(df, months):
# return dcc.Graph(figure=fig_uses(df, months), style={'height': '80vh'})
def contexts(df, months, level=None):
return dcc.Graph(figure=fig_contexts_use(df, months, level), style={'height': '80vh'}) | [((3304, 3323), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (3315, 3323), False, 'from utils import MONTH_NAMES, month_range\n'), ((3654, 3665), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (3663, 3665), True, 'import plotly.graph_objs as go\n'), ((4464, 4483), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (4475, 4483), False, 'from utils import MONTH_NAMES, month_range\n'), ((4924, 4935), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (4933, 4935), True, 'import plotly.graph_objs as go\n'), ((6159, 6178), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (6170, 6178), False, 'from utils import MONTH_NAMES, month_range\n'), ((6245, 6256), 'plotly.graph_objs.Figure', 'go.Figure', ([], {}), '()\n', (6254, 6256), True, 'import plotly.graph_objs as go\n'), ((8086, 8157), 'plotly.graph_objs.Pie', 'go.Pie', ([], {'labels': 'context_data.index', 'values': 'context_data.values'}), '(labels=context_data.index, values=context_data.values, **kwargs)\n', (8092, 8157), True, 'import plotly.graph_objs as go\n'), ((1447, 1461), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (1459, 1461), False, 'from datetime import datetime, time\n'), ((3429, 3476), 'pandas.DataFrame', 'pd.DataFrame', (["{'Tipo de Máquina': machine_list}"], {}), "({'Tipo de Máquina': machine_list})\n", (3441, 3476), True, 'import pandas as pd\n'), ((4636, 4683), 'pandas.DataFrame', 'pd.DataFrame', (["{'Tipo de Máquina': machine_list}"], {}), "({'Tipo de Máquina': machine_list})\n", (4648, 4683), True, 'import pandas as pd\n'), ((8549, 8568), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (8560, 8568), False, 'from utils import MONTH_NAMES, month_range\n'), ((1807, 1820), 'dash_html_components.Li', 'html.Li', (['item'], {}), '(item)\n', (1814, 1820), True, 'import dash_html_components as html\n'), ((3804, 3889), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': "frame['Tipo de Máquina']", 'y': 'frame[name]', 'name': 'name', 'hoverinfo': '"""name+y"""'}), "(x=frame['Tipo de Máquina'], y=frame[name], name=name, hoverinfo='name+y'\n )\n", (3810, 3889), True, 'import plotly.graph_objs as go\n'), ((4014, 4177), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'x': "frame['Tipo de Máquina']", 'y': "frame['Total']", 'text': "frame['Total']", 'textposition': '"""top center"""', 'mode': '"""text"""', 'showlegend': '(False)', 'hoverinfo': '"""skip"""'}), "(x=frame['Tipo de Máquina'], y=frame['Total'], text=frame['Total'\n ], textposition='top center', mode='text', showlegend=False, hoverinfo=\n 'skip')\n", (4024, 4177), True, 'import plotly.graph_objs as go\n'), ((5137, 5239), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'y': "frame['Tipo de Máquina']", 'x': 'frame[name]', 'name': 'name', 'hoverinfo': '"""name+x"""', 'orientation': '"""h"""'}), "(y=frame['Tipo de Máquina'], x=frame[name], name=name, hoverinfo=\n 'name+x', orientation='h')\n", (5143, 5239), True, 'import plotly.graph_objs as go\n'), ((5360, 5524), 'plotly.graph_objs.Scatter', 'go.Scatter', ([], {'y': "frame['Tipo de Máquina']", 'x': "frame['Total']", 'text': "frame['Total']", 'textposition': '"""middle right"""', 'mode': '"""text"""', 'showlegend': '(False)', 'hoverinfo': '"""skip"""'}), "(y=frame['Tipo de Máquina'], x=frame['Total'], text=frame['Total'\n ], textposition='middle right', mode='text', showlegend=False,\n hoverinfo='skip')\n", (5370, 5524), True, 'import plotly.graph_objs as go\n'), ((6785, 6845), 'plotly.graph_objs.Bar', 'go.Bar', ([], {'x': 'month_names', 'y': 'caps', 'name': 'machine', 'hovertext': 'texts'}), '(x=month_names, y=caps, name=machine, hovertext=texts)\n', (6791, 6845), True, 'import plotly.graph_objs as go\n'), ((8258, 8277), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (8269, 8277), False, 'from utils import MONTH_NAMES, month_range\n'), ((8482, 8501), 'utils.month_range', 'month_range', (['months'], {}), '(months)\n', (8493, 8501), False, 'from utils import MONTH_NAMES, month_range\n'), ((1267, 1299), 'dash_html_components.H1', 'html.H1', (['i[1]'], {'className': '"""title"""'}), "(i[1], className='title')\n", (1274, 1299), True, 'import dash_html_components as html\n'), ((1313, 1348), 'dash_html_components.H2', 'html.H2', (['i[0]'], {'className': '"""subtitle"""'}), "(i[0], className='subtitle')\n", (1320, 1348), True, 'import dash_html_components as html\n'), ((2802, 2868), 'dash_html_components.Img', 'html.Img', ([], {'src': '"""/indicadores/assets/footer.png"""', 'alt': '"""FabLab UTFSM"""'}), "(src='/indicadores/assets/footer.png', alt='FabLab UTFSM')\n", (2810, 2868), True, 'import dash_html_components as html\n'), ((2960, 2969), 'dash_html_components.Br', 'html.Br', ([], {}), '()\n', (2967, 2969), True, 'import dash_html_components as html\n'), ((3028, 3037), 'dash_html_components.Br', 'html.Br', ([], {}), '()\n', (3035, 3037), True, 'import dash_html_components as html\n'), ((3102, 3111), 'dash_html_components.Br', 'html.Br', ([], {}), '()\n', (3109, 3111), True, 'import dash_html_components as html\n'), ((2495, 2558), 'dash_html_components.H1', 'html.H1', (['"""Informe de Gestión de Operaciones"""'], {'className': '"""title"""'}), "('Informe de Gestión de Operaciones', className='title')\n", (2502, 2558), True, 'import dash_html_components as html\n'), ((2277, 2341), 'dash_html_components.Img', 'html.Img', ([], {'src': '"""/indicadores/assets/logo.png"""', 'alt': '"""FabLab UTFSM"""'}), "(src='/indicadores/assets/logo.png', alt='FabLab UTFSM')\n", (2285, 2341), True, 'import dash_html_components as html\n'), ((808, 841), 'dash_html_components.H2', 'html.H2', (['title'], {'className': '"""title"""'}), "(title, className='title')\n", (815, 841), True, 'import dash_html_components as html\n')] |
greck2908/gamification-engine | gengine/app/tests_old/test_groups.py | 4a74086bde4505217e4b9ba36349a427a7042b4b | # -*- coding: utf-8 -*-
from gengine.app.tests.base import BaseDBTest
from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language
from gengine.metadata import DBSession
from gengine.app.model import AuthUser
class TestUserCreation(BaseDBTest):
def test_user_creation(self):
lang = get_or_create_language("en")
user = create_user(
lat = 12.1,
lng = 12.2,
#country = "RO",
#region = "Transylvania",
#city = "Cluj-Napoca",
timezone = "Europe/Bukarest",
language = "en",
additional_public_data = {
"first_name" : "Rudolf",
"last_name" : "Red Nose"
}
)
self.assertTrue(user.lat == 12.1)
self.assertTrue(user.lng == 12.2)
#self.assertTrue(user.country == "RO")
#self.assertTrue(user.region == "Transylvania")
#self.assertTrue(user.city == "Cluj-Napoca")
self.assertTrue(user.timezone == "Europe/Bukarest")
self.assertTrue(user.language_id == lang.id)
self.assertTrue(user.additional_public_data["first_name"] == "Rudolf")
self.assertTrue(user.additional_public_data["last_name"] == "Red Nose")
def test_user_updation(self):
lang = get_or_create_language("en")
user = create_user()
user = update_user(
user_id = user.id,
lat = 14.2,
lng = 16.3,
#country = "EN",
#region = "Transylvania",
#city = "Cluj-Napoca",
timezone = "Europe/Bukarest",
language = "en",
additional_public_data = {
"first_name" : "Rudolf",
"last_name" : "Red Nose"
}
)
# Correct cases
self.assertTrue(user.lat == 14.2)
self.assertTrue(user.lng == 16.3)
#self.assertTrue(user.country == "EN")
#self.assertTrue(user.region == "Transylvania")
#self.assertTrue(user.city == "Cluj-Napoca")
self.assertTrue(user.timezone == "Europe/Bukarest")
self.assertTrue(user.language_id == lang.id)
def test_user_deletion(self):
user1 = create_user()
# Create Second user
user2 = create_user(
lat=85.59,
lng=65.75,
#country="DE",
#region="Niedersachsen",
#city="Osnabrück",
timezone="Europe/Berlin",
language="de",
additional_public_data={
"first_name": "Michael",
"last_name": "Clarke"
},
friends=[1]
)
remaining_users = delete_user(
user_id = user1.id
)
# Correct cases
self.assertNotIn(user1.id, remaining_users)
self.assertEqual(user2.id, remaining_users[0].id)
def test_verify_password(self):
auth_user = AuthUser()
auth_user.password = "test12345"
auth_user.active = True
auth_user.email = "[email protected]"
DBSession.add(auth_user)
iscorrect = auth_user.verify_password("test12345")
self.assertEqual(iscorrect, True)
def test_create_token(self):
user = create_user()
auth_user = AuthUser()
auth_user.user_id = user.id
auth_user.password = "test12345"
auth_user.active = True
auth_user.email = "[email protected]"
DBSession.add(auth_user)
if auth_user.verify_password("test12345"):
token = auth_user.get_or_create_token()
self.assertNotEqual(token, None)
| [((337, 365), 'gengine.app.tests.helpers.get_or_create_language', 'get_or_create_language', (['"""en"""'], {}), "('en')\n", (359, 365), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((381, 533), 'gengine.app.tests.helpers.create_user', 'create_user', ([], {'lat': '(12.1)', 'lng': '(12.2)', 'timezone': '"""Europe/Bukarest"""', 'language': '"""en"""', 'additional_public_data': "{'first_name': 'Rudolf', 'last_name': 'Red Nose'}"}), "(lat=12.1, lng=12.2, timezone='Europe/Bukarest', language='en',\n additional_public_data={'first_name': 'Rudolf', 'last_name': 'Red Nose'})\n", (392, 533), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((1324, 1352), 'gengine.app.tests.helpers.get_or_create_language', 'get_or_create_language', (['"""en"""'], {}), "('en')\n", (1346, 1352), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((1368, 1381), 'gengine.app.tests.helpers.create_user', 'create_user', ([], {}), '()\n', (1379, 1381), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((1397, 1570), 'gengine.app.tests.helpers.update_user', 'update_user', ([], {'user_id': 'user.id', 'lat': '(14.2)', 'lng': '(16.3)', 'timezone': '"""Europe/Bukarest"""', 'language': '"""en"""', 'additional_public_data': "{'first_name': 'Rudolf', 'last_name': 'Red Nose'}"}), "(user_id=user.id, lat=14.2, lng=16.3, timezone='Europe/Bukarest',\n language='en', additional_public_data={'first_name': 'Rudolf',\n 'last_name': 'Red Nose'})\n", (1408, 1570), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((2237, 2250), 'gengine.app.tests.helpers.create_user', 'create_user', ([], {}), '()\n', (2248, 2250), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((2297, 2465), 'gengine.app.tests.helpers.create_user', 'create_user', ([], {'lat': '(85.59)', 'lng': '(65.75)', 'timezone': '"""Europe/Berlin"""', 'language': '"""de"""', 'additional_public_data': "{'first_name': 'Michael', 'last_name': 'Clarke'}", 'friends': '[1]'}), "(lat=85.59, lng=65.75, timezone='Europe/Berlin', language='de',\n additional_public_data={'first_name': 'Michael', 'last_name': 'Clarke'},\n friends=[1])\n", (2308, 2465), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((2708, 2737), 'gengine.app.tests.helpers.delete_user', 'delete_user', ([], {'user_id': 'user1.id'}), '(user_id=user1.id)\n', (2719, 2737), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((2954, 2964), 'gengine.app.model.AuthUser', 'AuthUser', ([], {}), '()\n', (2962, 2964), False, 'from gengine.app.model import AuthUser\n'), ((3091, 3115), 'gengine.metadata.DBSession.add', 'DBSession.add', (['auth_user'], {}), '(auth_user)\n', (3104, 3115), False, 'from gengine.metadata import DBSession\n'), ((3268, 3281), 'gengine.app.tests.helpers.create_user', 'create_user', ([], {}), '()\n', (3279, 3281), False, 'from gengine.app.tests.helpers import create_user, update_user, delete_user, get_or_create_language\n'), ((3302, 3312), 'gengine.app.model.AuthUser', 'AuthUser', ([], {}), '()\n', (3310, 3312), False, 'from gengine.app.model import AuthUser\n'), ((3475, 3499), 'gengine.metadata.DBSession.add', 'DBSession.add', (['auth_user'], {}), '(auth_user)\n', (3488, 3499), False, 'from gengine.metadata import DBSession\n')] |
guorenxi/fonttools | Lib/fontTools/designspaceLib/__init__.py | cefb41e6c261eeff0062a7b4017061982ed87aa7 | from __future__ import annotations
import collections
import copy
import itertools
import math
import os
import posixpath
from io import BytesIO, StringIO
from textwrap import indent
from typing import Any, Dict, List, MutableMapping, Optional, Tuple, Union
from fontTools.misc import etree as ET
from fontTools.misc import plistlib
from fontTools.misc.loggingTools import LogMixin
from fontTools.misc.textTools import tobytes, tostr
"""
designSpaceDocument
- read and write designspace files
"""
__all__ = [
'DesignSpaceDocumentError', 'DesignSpaceDocument', 'SourceDescriptor',
'InstanceDescriptor', 'AxisDescriptor', 'RuleDescriptor', 'BaseDocReader',
'BaseDocWriter'
]
# ElementTree allows to find namespace-prefixed elements, but not attributes
# so we have to do it ourselves for 'xml:lang'
XML_NS = "{http://www.w3.org/XML/1998/namespace}"
XML_LANG = XML_NS + "lang"
def posix(path):
"""Normalize paths using forward slash to work also on Windows."""
new_path = posixpath.join(*path.split(os.path.sep))
if path.startswith('/'):
# The above transformation loses absolute paths
new_path = '/' + new_path
elif path.startswith(r'\\'):
# The above transformation loses leading slashes of UNC path mounts
new_path = '//' + new_path
return new_path
def posixpath_property(private_name):
"""Generate a propery that holds a path always using forward slashes."""
def getter(self):
# Normal getter
return getattr(self, private_name)
def setter(self, value):
# The setter rewrites paths using forward slashes
if value is not None:
value = posix(value)
setattr(self, private_name, value)
return property(getter, setter)
class DesignSpaceDocumentError(Exception):
def __init__(self, msg, obj=None):
self.msg = msg
self.obj = obj
def __str__(self):
return str(self.msg) + (
": %r" % self.obj if self.obj is not None else "")
class AsDictMixin(object):
def asdict(self):
d = {}
for attr, value in self.__dict__.items():
if attr.startswith("_"):
continue
if hasattr(value, "asdict"):
value = value.asdict()
elif isinstance(value, list):
value = [
v.asdict() if hasattr(v, "asdict") else v for v in value
]
d[attr] = value
return d
class SimpleDescriptor(AsDictMixin):
""" Containers for a bunch of attributes"""
# XXX this is ugly. The 'print' is inappropriate here, and instead of
# assert, it should simply return True/False
def compare(self, other):
# test if this object contains the same data as the other
for attr in self._attrs:
try:
assert(getattr(self, attr) == getattr(other, attr))
except AssertionError:
print("failed attribute", attr, getattr(self, attr), "!=", getattr(other, attr))
def __repr__(self):
attrs = [f"{a}={repr(getattr(self, a))}," for a in self._attrs]
attrs = indent('\n'.join(attrs), ' ')
return f"{self.__class__.__name__}(\n{attrs}\n)"
class SourceDescriptor(SimpleDescriptor):
"""Simple container for data related to the source
.. code:: python
doc = DesignSpaceDocument()
s1 = SourceDescriptor()
s1.path = masterPath1
s1.name = "master.ufo1"
s1.font = defcon.Font("master.ufo1")
s1.location = dict(weight=0)
s1.familyName = "MasterFamilyName"
s1.styleName = "MasterStyleNameOne"
s1.localisedFamilyName = dict(fr="Caractère")
s1.mutedGlyphNames.append("A")
s1.mutedGlyphNames.append("Z")
doc.addSource(s1)
"""
flavor = "source"
_attrs = ['filename', 'path', 'name', 'layerName',
'location', 'copyLib',
'copyGroups', 'copyFeatures',
'muteKerning', 'muteInfo',
'mutedGlyphNames',
'familyName', 'styleName', 'localisedFamilyName']
filename = posixpath_property("_filename")
path = posixpath_property("_path")
def __init__(
self,
*,
filename=None,
path=None,
font=None,
name=None,
location=None,
designLocation=None,
layerName=None,
familyName=None,
styleName=None,
localisedFamilyName=None,
copyLib=False,
copyInfo=False,
copyGroups=False,
copyFeatures=False,
muteKerning=False,
muteInfo=False,
mutedGlyphNames=None,
):
self.filename = filename
"""string. A relative path to the source file, **as it is in the document**.
MutatorMath + VarLib.
"""
self.path = path
"""The absolute path, calculated from filename."""
self.font = font
"""Any Python object. Optional. Points to a representation of this
source font that is loaded in memory, as a Python object (e.g. a
``defcon.Font`` or a ``fontTools.ttFont.TTFont``).
The default document reader will not fill-in this attribute, and the
default writer will not use this attribute. It is up to the user of
``designspaceLib`` to either load the resource identified by
``filename`` and store it in this field, or write the contents of
this field to the disk and make ```filename`` point to that.
"""
self.name = name
"""string. Optional. Unique identifier name for this source.
MutatorMath + Varlib.
"""
self.designLocation = designLocation if designLocation is not None else location or {}
"""dict. Axis values for this source, in design space coordinates.
MutatorMath + Varlib.
This may be only part of the full design location.
See :meth:`getFullDesignLocation()`
.. versionadded:: 5.0
"""
self.layerName = layerName
"""string. The name of the layer in the source to look for
outline data. Default ``None`` which means ``foreground``.
"""
self.familyName = familyName
"""string. Family name of this source. Though this data
can be extracted from the font, it can be efficient to have it right
here.
Varlib.
"""
self.styleName = styleName
"""string. Style name of this source. Though this data
can be extracted from the font, it can be efficient to have it right
here.
Varlib.
"""
self.localisedFamilyName = localisedFamilyName or {}
"""dict. A dictionary of localised family name strings, keyed by
language code.
If present, will be used to build localized names for all instances.
.. versionadded:: 5.0
"""
self.copyLib = copyLib
"""bool. Indicates if the contents of the font.lib need to
be copied to the instances.
MutatorMath.
.. deprecated:: 5.0
"""
self.copyInfo = copyInfo
"""bool. Indicates if the non-interpolating font.info needs
to be copied to the instances.
MutatorMath.
.. deprecated:: 5.0
"""
self.copyGroups = copyGroups
"""bool. Indicates if the groups need to be copied to the
instances.
MutatorMath.
.. deprecated:: 5.0
"""
self.copyFeatures = copyFeatures
"""bool. Indicates if the feature text needs to be
copied to the instances.
MutatorMath.
.. deprecated:: 5.0
"""
self.muteKerning = muteKerning
"""bool. Indicates if the kerning data from this source
needs to be muted (i.e. not be part of the calculations).
MutatorMath only.
"""
self.muteInfo = muteInfo
"""bool. Indicated if the interpolating font.info data for
this source needs to be muted.
MutatorMath only.
"""
self.mutedGlyphNames = mutedGlyphNames or []
"""list. Glyphnames that need to be muted in the
instances.
MutatorMath only.
"""
@property
def location(self):
"""dict. Axis values for this source, in design space coordinates.
MutatorMath + Varlib.
.. deprecated:: 5.0
Use the more explicit alias for this property :attr:`designLocation`.
"""
return self.designLocation
@location.setter
def location(self, location: Optional[AnisotropicLocationDict]):
self.designLocation = location or {}
def setFamilyName(self, familyName, languageCode="en"):
"""Setter for :attr:`localisedFamilyName`
.. versionadded:: 5.0
"""
self.localisedFamilyName[languageCode] = tostr(familyName)
def getFamilyName(self, languageCode="en"):
"""Getter for :attr:`localisedFamilyName`
.. versionadded:: 5.0
"""
return self.localisedFamilyName.get(languageCode)
def getFullDesignLocation(self, doc: 'DesignSpaceDocument') -> AnisotropicLocationDict:
"""Get the complete design location of this source, from its
:attr:`designLocation` and the document's axis defaults.
.. versionadded:: 5.0
"""
result: AnisotropicLocationDict = {}
for axis in doc.axes:
if axis.name in self.designLocation:
result[axis.name] = self.designLocation[axis.name]
else:
result[axis.name] = axis.map_forward(axis.default)
return result
class RuleDescriptor(SimpleDescriptor):
"""Represents the rule descriptor element: a set of glyph substitutions to
trigger conditionally in some parts of the designspace.
.. code:: python
r1 = RuleDescriptor()
r1.name = "unique.rule.name"
r1.conditionSets.append([dict(name="weight", minimum=-10, maximum=10), dict(...)])
r1.conditionSets.append([dict(...), dict(...)])
r1.subs.append(("a", "a.alt"))
.. code:: xml
<!-- optional: list of substitution rules -->
<rules>
<rule name="vertical.bars">
<conditionset>
<condition minimum="250.000000" maximum="750.000000" name="weight"/>
<condition minimum="100" name="width"/>
<condition minimum="10" maximum="40" name="optical"/>
</conditionset>
<sub name="cent" with="cent.alt"/>
<sub name="dollar" with="dollar.alt"/>
</rule>
</rules>
"""
_attrs = ['name', 'conditionSets', 'subs'] # what do we need here
def __init__(self, *, name=None, conditionSets=None, subs=None):
self.name = name
"""string. Unique name for this rule. Can be used to reference this rule data."""
# list of lists of dict(name='aaaa', minimum=0, maximum=1000)
self.conditionSets = conditionSets or []
"""a list of conditionsets.
- Each conditionset is a list of conditions.
- Each condition is a dict with ``name``, ``minimum`` and ``maximum`` keys.
"""
# list of substitutions stored as tuples of glyphnames ("a", "a.alt")
self.subs = subs or []
"""list of substitutions.
- Each substitution is stored as tuples of glyphnames, e.g. ("a", "a.alt").
- Note: By default, rules are applied first, before other text
shaping/OpenType layout, as they are part of the
`Required Variation Alternates OpenType feature <https://docs.microsoft.com/en-us/typography/opentype/spec/features_pt#-tag-rvrn>`_.
See ref:`rules-element` § Attributes.
"""
def evaluateRule(rule, location):
"""Return True if any of the rule's conditionsets matches the given location."""
return any(evaluateConditions(c, location) for c in rule.conditionSets)
def evaluateConditions(conditions, location):
"""Return True if all the conditions matches the given location.
- If a condition has no minimum, check for < maximum.
- If a condition has no maximum, check for > minimum.
"""
for cd in conditions:
value = location[cd['name']]
if cd.get('minimum') is None:
if value > cd['maximum']:
return False
elif cd.get('maximum') is None:
if cd['minimum'] > value:
return False
elif not cd['minimum'] <= value <= cd['maximum']:
return False
return True
def processRules(rules, location, glyphNames):
"""Apply these rules at this location to these glyphnames.
Return a new list of glyphNames with substitutions applied.
- rule order matters
"""
newNames = []
for rule in rules:
if evaluateRule(rule, location):
for name in glyphNames:
swap = False
for a, b in rule.subs:
if name == a:
swap = True
break
if swap:
newNames.append(b)
else:
newNames.append(name)
glyphNames = newNames
newNames = []
return glyphNames
AnisotropicLocationDict = Dict[str, Union[float, Tuple[float, float]]]
SimpleLocationDict = Dict[str, float]
class InstanceDescriptor(SimpleDescriptor):
"""Simple container for data related to the instance
.. code:: python
i2 = InstanceDescriptor()
i2.path = instancePath2
i2.familyName = "InstanceFamilyName"
i2.styleName = "InstanceStyleName"
i2.name = "instance.ufo2"
# anisotropic location
i2.designLocation = dict(weight=500, width=(400,300))
i2.postScriptFontName = "InstancePostscriptName"
i2.styleMapFamilyName = "InstanceStyleMapFamilyName"
i2.styleMapStyleName = "InstanceStyleMapStyleName"
i2.lib['com.coolDesignspaceApp.specimenText'] = 'Hamburgerwhatever'
doc.addInstance(i2)
"""
flavor = "instance"
_defaultLanguageCode = "en"
_attrs = ['filename',
'path',
'name',
'locationLabel',
'designLocation',
'userLocation',
'familyName',
'styleName',
'postScriptFontName',
'styleMapFamilyName',
'styleMapStyleName',
'localisedFamilyName',
'localisedStyleName',
'localisedStyleMapFamilyName',
'localisedStyleMapStyleName',
'glyphs',
'kerning',
'info',
'lib']
filename = posixpath_property("_filename")
path = posixpath_property("_path")
def __init__(
self,
*,
filename=None,
path=None,
font=None,
name=None,
location=None,
locationLabel=None,
designLocation=None,
userLocation=None,
familyName=None,
styleName=None,
postScriptFontName=None,
styleMapFamilyName=None,
styleMapStyleName=None,
localisedFamilyName=None,
localisedStyleName=None,
localisedStyleMapFamilyName=None,
localisedStyleMapStyleName=None,
glyphs=None,
kerning=True,
info=True,
lib=None,
):
self.filename = filename
"""string. Relative path to the instance file, **as it is
in the document**. The file may or may not exist.
MutatorMath + VarLib.
"""
self.path = path
"""string. Absolute path to the instance file, calculated from
the document path and the string in the filename attr. The file may
or may not exist.
MutatorMath.
"""
self.font = font
"""Same as :attr:`SourceDescriptor.font`
.. seealso:: :attr:`SourceDescriptor.font`
"""
self.name = name
"""string. Unique identifier name of the instance, used to
identify it if it needs to be referenced from elsewhere in the
document.
"""
self.locationLabel = locationLabel
"""Name of a :class:`LocationLabelDescriptor`. If
provided, the instance should have the same location as the
LocationLabel.
.. seealso::
:meth:`getFullDesignLocation`
:meth:`getFullUserLocation`
.. versionadded:: 5.0
"""
self.designLocation: AnisotropicLocationDict = designLocation if designLocation is not None else (location or {})
"""dict. Axis values for this instance, in design space coordinates.
MutatorMath + Varlib.
.. seealso:: This may be only part of the full location. See:
:meth:`getFullDesignLocation`
:meth:`getFullUserLocation`
.. versionadded:: 5.0
"""
self.userLocation: SimpleLocationDict = userLocation or {}
"""dict. Axis values for this instance, in user space coordinates.
MutatorMath + Varlib.
.. seealso:: This may be only part of the full location. See:
:meth:`getFullDesignLocation`
:meth:`getFullUserLocation`
.. versionadded:: 5.0
"""
self.familyName = familyName
"""string. Family name of this instance.
MutatorMath + Varlib.
"""
self.styleName = styleName
"""string. Style name of this instance.
MutatorMath + Varlib.
"""
self.postScriptFontName = postScriptFontName
"""string. Postscript fontname for this instance.
MutatorMath + Varlib.
"""
self.styleMapFamilyName = styleMapFamilyName
"""string. StyleMap familyname for this instance.
MutatorMath + Varlib.
"""
self.styleMapStyleName = styleMapStyleName
"""string. StyleMap stylename for this instance.
MutatorMath + Varlib.
"""
self.localisedFamilyName = localisedFamilyName or {}
"""dict. A dictionary of localised family name
strings, keyed by language code.
"""
self.localisedStyleName = localisedStyleName or {}
"""dict. A dictionary of localised stylename
strings, keyed by language code.
"""
self.localisedStyleMapFamilyName = localisedStyleMapFamilyName or {}
"""A dictionary of localised style map
familyname strings, keyed by language code.
"""
self.localisedStyleMapStyleName = localisedStyleMapStyleName or {}
"""A dictionary of localised style map
stylename strings, keyed by language code.
"""
self.glyphs = glyphs or {}
"""dict for special master definitions for glyphs. If glyphs
need special masters (to record the results of executed rules for
example).
MutatorMath.
.. deprecated:: 5.0
Use rules or sparse sources instead.
"""
self.kerning = kerning
""" bool. Indicates if this instance needs its kerning
calculated.
MutatorMath.
.. deprecated:: 5.0
"""
self.info = info
"""bool. Indicated if this instance needs the interpolating
font.info calculated.
.. deprecated:: 5.0
"""
self.lib = lib or {}
"""Custom data associated with this instance."""
@property
def location(self):
"""dict. Axis values for this instance.
MutatorMath + Varlib.
.. deprecated:: 5.0
Use the more explicit alias for this property :attr:`designLocation`.
"""
return self.designLocation
@location.setter
def location(self, location: Optional[AnisotropicLocationDict]):
self.designLocation = location or {}
def setStyleName(self, styleName, languageCode="en"):
"""These methods give easier access to the localised names."""
self.localisedStyleName[languageCode] = tostr(styleName)
def getStyleName(self, languageCode="en"):
return self.localisedStyleName.get(languageCode)
def setFamilyName(self, familyName, languageCode="en"):
self.localisedFamilyName[languageCode] = tostr(familyName)
def getFamilyName(self, languageCode="en"):
return self.localisedFamilyName.get(languageCode)
def setStyleMapStyleName(self, styleMapStyleName, languageCode="en"):
self.localisedStyleMapStyleName[languageCode] = tostr(styleMapStyleName)
def getStyleMapStyleName(self, languageCode="en"):
return self.localisedStyleMapStyleName.get(languageCode)
def setStyleMapFamilyName(self, styleMapFamilyName, languageCode="en"):
self.localisedStyleMapFamilyName[languageCode] = tostr(styleMapFamilyName)
def getStyleMapFamilyName(self, languageCode="en"):
return self.localisedStyleMapFamilyName.get(languageCode)
def clearLocation(self, axisName: Optional[str] = None):
"""Clear all location-related fields. Ensures that
:attr:``designLocation`` and :attr:``userLocation`` are dictionaries
(possibly empty if clearing everything).
In order to update the location of this instance wholesale, a user
should first clear all the fields, then change the field(s) for which
they have data.
.. code:: python
instance.clearLocation()
instance.designLocation = {'Weight': (34, 36.5), 'Width': 100}
instance.userLocation = {'Opsz': 16}
In order to update a single axis location, the user should only clear
that axis, then edit the values:
.. code:: python
instance.clearLocation('Weight')
instance.designLocation['Weight'] = (34, 36.5)
Args:
axisName: if provided, only clear the location for that axis.
.. versionadded:: 5.0
"""
self.locationLabel = None
if axisName is None:
self.designLocation = {}
self.userLocation = {}
else:
if self.designLocation is None:
self.designLocation = {}
if axisName in self.designLocation:
del self.designLocation[axisName]
if self.userLocation is None:
self.userLocation = {}
if axisName in self.userLocation:
del self.userLocation[axisName]
def getLocationLabelDescriptor(self, doc: 'DesignSpaceDocument') -> Optional[LocationLabelDescriptor]:
"""Get the :class:`LocationLabelDescriptor` instance that matches
this instances's :attr:`locationLabel`.
Raises if the named label can't be found.
.. versionadded:: 5.0
"""
if self.locationLabel is None:
return None
label = doc.getLocationLabel(self.locationLabel)
if label is None:
raise DesignSpaceDocumentError(
'InstanceDescriptor.getLocationLabelDescriptor(): '
f'unknown location label `{self.locationLabel}` in instance `{self.name}`.'
)
return label
def getFullDesignLocation(self, doc: 'DesignSpaceDocument') -> AnisotropicLocationDict:
"""Get the complete design location of this instance, by combining data
from the various location fields, default axis values and mappings, and
top-level location labels.
The source of truth for this instance's location is determined for each
axis independently by taking the first not-None field in this list:
- ``locationLabel``: the location along this axis is the same as the
matching STAT format 4 label. No anisotropy.
- ``designLocation[axisName]``: the explicit design location along this
axis, possibly anisotropic.
- ``userLocation[axisName]``: the explicit user location along this
axis. No anisotropy.
- ``axis.default``: default axis value. No anisotropy.
.. versionadded:: 5.0
"""
label = self.getLocationLabelDescriptor(doc)
if label is not None:
return doc.map_forward(label.userLocation) # type: ignore
result: AnisotropicLocationDict = {}
for axis in doc.axes:
if axis.name in self.designLocation:
result[axis.name] = self.designLocation[axis.name]
elif axis.name in self.userLocation:
result[axis.name] = axis.map_forward(self.userLocation[axis.name])
else:
result[axis.name] = axis.map_forward(axis.default)
return result
def getFullUserLocation(self, doc: 'DesignSpaceDocument') -> SimpleLocationDict:
"""Get the complete user location for this instance.
.. seealso:: :meth:`getFullDesignLocation`
.. versionadded:: 5.0
"""
return doc.map_backward(self.getFullDesignLocation(doc))
def tagForAxisName(name):
# try to find or make a tag name for this axis name
names = {
'weight': ('wght', dict(en = 'Weight')),
'width': ('wdth', dict(en = 'Width')),
'optical': ('opsz', dict(en = 'Optical Size')),
'slant': ('slnt', dict(en = 'Slant')),
'italic': ('ital', dict(en = 'Italic')),
}
if name.lower() in names:
return names[name.lower()]
if len(name) < 4:
tag = name + "*" * (4 - len(name))
else:
tag = name[:4]
return tag, dict(en=name)
class AbstractAxisDescriptor(SimpleDescriptor):
flavor = "axis"
def __init__(
self,
*,
tag=None,
name=None,
labelNames=None,
hidden=False,
map=None,
axisOrdering=None,
axisLabels=None,
):
# opentype tag for this axis
self.tag = tag
"""string. Four letter tag for this axis. Some might be
registered at the `OpenType
specification <https://www.microsoft.com/typography/otspec/fvar.htm#VAT>`__.
Privately-defined axis tags must begin with an uppercase letter and
use only uppercase letters or digits.
"""
# name of the axis used in locations
self.name = name
"""string. Name of the axis as it is used in the location dicts.
MutatorMath + Varlib.
"""
# names for UI purposes, if this is not a standard axis,
self.labelNames = labelNames or {}
"""dict. When defining a non-registered axis, it will be
necessary to define user-facing readable names for the axis. Keyed by
xml:lang code. Values are required to be ``unicode`` strings, even if
they only contain ASCII characters.
"""
self.hidden = hidden
"""bool. Whether this axis should be hidden in user interfaces.
"""
self.map = map or []
"""list of input / output values that can describe a warp of user space
to design space coordinates. If no map values are present, it is assumed
user space is the same as design space, as in [(minimum, minimum),
(maximum, maximum)].
Varlib.
"""
self.axisOrdering = axisOrdering
"""STAT table field ``axisOrdering``.
See: `OTSpec STAT Axis Record <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#axis-records>`_
.. versionadded:: 5.0
"""
self.axisLabels: List[AxisLabelDescriptor] = axisLabels or []
"""STAT table entries for Axis Value Tables format 1, 2, 3.
See: `OTSpec STAT Axis Value Tables <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#axis-value-tables>`_
.. versionadded:: 5.0
"""
class AxisDescriptor(AbstractAxisDescriptor):
""" Simple container for the axis data.
Add more localisations?
.. code:: python
a1 = AxisDescriptor()
a1.minimum = 1
a1.maximum = 1000
a1.default = 400
a1.name = "weight"
a1.tag = "wght"
a1.labelNames['fa-IR'] = "قطر"
a1.labelNames['en'] = "Wéíght"
a1.map = [(1.0, 10.0), (400.0, 66.0), (1000.0, 990.0)]
a1.axisOrdering = 1
a1.axisLabels = [
AxisLabelDescriptor(name="Regular", userValue=400, elidable=True)
]
doc.addAxis(a1)
"""
_attrs = ['tag', 'name', 'maximum', 'minimum', 'default', 'map', 'axisOrdering', 'axisLabels']
def __init__(
self,
*,
tag=None,
name=None,
labelNames=None,
minimum=None,
default=None,
maximum=None,
hidden=False,
map=None,
axisOrdering=None,
axisLabels=None,
):
super().__init__(
tag=tag,
name=name,
labelNames=labelNames,
hidden=hidden,
map=map,
axisOrdering=axisOrdering,
axisLabels=axisLabels,
)
self.minimum = minimum
"""number. The minimum value for this axis in user space.
MutatorMath + Varlib.
"""
self.maximum = maximum
"""number. The maximum value for this axis in user space.
MutatorMath + Varlib.
"""
self.default = default
"""number. The default value for this axis, i.e. when a new location is
created, this is the value this axis will get in user space.
MutatorMath + Varlib.
"""
def serialize(self):
# output to a dict, used in testing
return dict(
tag=self.tag,
name=self.name,
labelNames=self.labelNames,
maximum=self.maximum,
minimum=self.minimum,
default=self.default,
hidden=self.hidden,
map=self.map,
axisOrdering=self.axisOrdering,
axisLabels=self.axisLabels,
)
def map_forward(self, v):
"""Maps value from axis mapping's input (user) to output (design)."""
from fontTools.varLib.models import piecewiseLinearMap
if not self.map:
return v
return piecewiseLinearMap(v, {k: v for k, v in self.map})
def map_backward(self, v):
"""Maps value from axis mapping's output (design) to input (user)."""
from fontTools.varLib.models import piecewiseLinearMap
if isinstance(v, tuple):
v = v[0]
if not self.map:
return v
return piecewiseLinearMap(v, {v: k for k, v in self.map})
class DiscreteAxisDescriptor(AbstractAxisDescriptor):
"""Container for discrete axis data.
Use this for axes that do not interpolate. The main difference from a
continuous axis is that a continuous axis has a ``minimum`` and ``maximum``,
while a discrete axis has a list of ``values``.
Example: an Italic axis with 2 stops, Roman and Italic, that are not
compatible. The axis still allows to bind together the full font family,
which is useful for the STAT table, however it can't become a variation
axis in a VF.
.. code:: python
a2 = DiscreteAxisDescriptor()
a2.values = [0, 1]
a2.name = "Italic"
a2.tag = "ITAL"
a2.labelNames['fr'] = "Italique"
a2.map = [(0, 0), (1, -11)]
a2.axisOrdering = 2
a2.axisLabels = [
AxisLabelDescriptor(name="Roman", userValue=0, elidable=True)
]
doc.addAxis(a2)
.. versionadded:: 5.0
"""
flavor = "axis"
_attrs = ('tag', 'name', 'values', 'default', 'map', 'axisOrdering', 'axisLabels')
def __init__(
self,
*,
tag=None,
name=None,
labelNames=None,
values=None,
default=None,
hidden=False,
map=None,
axisOrdering=None,
axisLabels=None,
):
super().__init__(
tag=tag,
name=name,
labelNames=labelNames,
hidden=hidden,
map=map,
axisOrdering=axisOrdering,
axisLabels=axisLabels,
)
self.default: float = default
"""The default value for this axis, i.e. when a new location is
created, this is the value this axis will get in user space.
However, this default value is less important than in continuous axes:
- it doesn't define the "neutral" version of outlines from which
deltas would apply, as this axis does not interpolate.
- it doesn't provide the reference glyph set for the designspace, as
fonts at each value can have different glyph sets.
"""
self.values: List[float] = values or []
"""List of possible values for this axis. Contrary to continuous axes,
only the values in this list can be taken by the axis, nothing in-between.
"""
def map_forward(self, value):
"""Maps value from axis mapping's input to output.
Returns value unchanged if no mapping entry is found.
Note: for discrete axes, each value must have its mapping entry, if
you intend that value to be mapped.
"""
return next((v for k, v in self.map if k == value), value)
def map_backward(self, value):
"""Maps value from axis mapping's output to input.
Returns value unchanged if no mapping entry is found.
Note: for discrete axes, each value must have its mapping entry, if
you intend that value to be mapped.
"""
if isinstance(value, tuple):
value = value[0]
return next((k for k, v in self.map if v == value), value)
class AxisLabelDescriptor(SimpleDescriptor):
"""Container for axis label data.
Analogue of OpenType's STAT data for a single axis (formats 1, 2 and 3).
All values are user values.
See: `OTSpec STAT Axis value table, format 1, 2, 3 <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#axis-value-table-format-1>`_
The STAT format of the Axis value depends on which field are filled-in,
see :meth:`getFormat`
.. versionadded:: 5.0
"""
flavor = "label"
_attrs = ('userMinimum', 'userValue', 'userMaximum', 'name', 'elidable', 'olderSibling', 'linkedUserValue', 'labelNames')
def __init__(
self,
*,
name,
userValue,
userMinimum=None,
userMaximum=None,
elidable=False,
olderSibling=False,
linkedUserValue=None,
labelNames=None,
):
self.userMinimum: Optional[float] = userMinimum
"""STAT field ``rangeMinValue`` (format 2)."""
self.userValue: float = userValue
"""STAT field ``value`` (format 1, 3) or ``nominalValue`` (format 2)."""
self.userMaximum: Optional[float] = userMaximum
"""STAT field ``rangeMaxValue`` (format 2)."""
self.name: str = name
"""Label for this axis location, STAT field ``valueNameID``."""
self.elidable: bool = elidable
"""STAT flag ``ELIDABLE_AXIS_VALUE_NAME``.
See: `OTSpec STAT Flags <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#flags>`_
"""
self.olderSibling: bool = olderSibling
"""STAT flag ``OLDER_SIBLING_FONT_ATTRIBUTE``.
See: `OTSpec STAT Flags <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#flags>`_
"""
self.linkedUserValue: Optional[float] = linkedUserValue
"""STAT field ``linkedValue`` (format 3)."""
self.labelNames: MutableMapping[str, str] = labelNames or {}
"""User-facing translations of this location's label. Keyed by
``xml:lang`` code.
"""
def getFormat(self) -> int:
"""Determine which format of STAT Axis value to use to encode this label.
=========== ========= =========== =========== ===============
STAT Format userValue userMinimum userMaximum linkedUserValue
=========== ========= =========== =========== ===============
1 ✅ ❌ ❌ ❌
2 ✅ ✅ ✅ ❌
3 ✅ ❌ ❌ ✅
=========== ========= =========== =========== ===============
"""
if self.linkedUserValue is not None:
return 3
if self.userMinimum is not None or self.userMaximum is not None:
return 2
return 1
@property
def defaultName(self) -> str:
"""Return the English name from :attr:`labelNames` or the :attr:`name`."""
return self.labelNames.get("en") or self.name
class LocationLabelDescriptor(SimpleDescriptor):
"""Container for location label data.
Analogue of OpenType's STAT data for a free-floating location (format 4).
All values are user values.
See: `OTSpec STAT Axis value table, format 4 <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#axis-value-table-format-4>`_
.. versionadded:: 5.0
"""
flavor = "label"
_attrs = ('name', 'elidable', 'olderSibling', 'userLocation', 'labelNames')
def __init__(
self,
*,
name,
userLocation,
elidable=False,
olderSibling=False,
labelNames=None,
):
self.name: str = name
"""Label for this named location, STAT field ``valueNameID``."""
self.userLocation: SimpleLocationDict = userLocation or {}
"""Location in user coordinates along each axis.
If an axis is not mentioned, it is assumed to be at its default location.
.. seealso:: This may be only part of the full location. See:
:meth:`getFullUserLocation`
"""
self.elidable: bool = elidable
"""STAT flag ``ELIDABLE_AXIS_VALUE_NAME``.
See: `OTSpec STAT Flags <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#flags>`_
"""
self.olderSibling: bool = olderSibling
"""STAT flag ``OLDER_SIBLING_FONT_ATTRIBUTE``.
See: `OTSpec STAT Flags <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#flags>`_
"""
self.labelNames: Dict[str, str] = labelNames or {}
"""User-facing translations of this location's label. Keyed by
xml:lang code.
"""
@property
def defaultName(self) -> str:
"""Return the English name from :attr:`labelNames` or the :attr:`name`."""
return self.labelNames.get("en") or self.name
def getFullUserLocation(self, doc: 'DesignSpaceDocument') -> SimpleLocationDict:
"""Get the complete user location of this label, by combining data
from the explicit user location and default axis values.
.. versionadded:: 5.0
"""
return {
axis.name: self.userLocation.get(axis.name, axis.default)
for axis in doc.axes
}
class VariableFontDescriptor(SimpleDescriptor):
"""Container for variable fonts, sub-spaces of the Designspace.
Use-cases:
- From a single DesignSpace with discrete axes, define 1 variable font
per value on the discrete axes. Before version 5, you would have needed
1 DesignSpace per such variable font, and a lot of data duplication.
- From a big variable font with many axes, define subsets of that variable
font that only include some axes and freeze other axes at a given location.
.. versionadded:: 5.0
"""
flavor = "variable-font"
_attrs = ('filename', 'axisSubsets', 'lib')
filename = posixpath_property("_filename")
def __init__(self, *, name, filename=None, axisSubsets=None, lib=None):
self.name: str = name
"""string, required. Name of this variable to identify it during the
build process and from other parts of the document, and also as a
filename in case the filename property is empty.
VarLib.
"""
self.filename: str = filename
"""string, optional. Relative path to the variable font file, **as it is
in the document**. The file may or may not exist.
If not specified, the :attr:`name` will be used as a basename for the file.
"""
self.axisSubsets: List[Union[RangeAxisSubsetDescriptor, ValueAxisSubsetDescriptor]] = axisSubsets or []
"""Axis subsets to include in this variable font.
If an axis is not mentioned, assume that we only want the default
location of that axis (same as a :class:`ValueAxisSubsetDescriptor`).
"""
self.lib: MutableMapping[str, Any] = lib or {}
"""Custom data associated with this variable font."""
class RangeAxisSubsetDescriptor(SimpleDescriptor):
"""Subset of a continuous axis to include in a variable font.
.. versionadded:: 5.0
"""
flavor = "axis-subset"
_attrs = ('name', 'userMinimum', 'userDefault', 'userMaximum')
def __init__(self, *, name, userMinimum=-math.inf, userDefault=None, userMaximum=math.inf):
self.name: str = name
"""Name of the :class:`AxisDescriptor` to subset."""
self.userMinimum: float = userMinimum
"""New minimum value of the axis in the target variable font.
If not specified, assume the same minimum value as the full axis.
(default = ``-math.inf``)
"""
self.userDefault: Optional[float] = userDefault
"""New default value of the axis in the target variable font.
If not specified, assume the same default value as the full axis.
(default = ``None``)
"""
self.userMaximum: float = userMaximum
"""New maximum value of the axis in the target variable font.
If not specified, assume the same maximum value as the full axis.
(default = ``math.inf``)
"""
class ValueAxisSubsetDescriptor(SimpleDescriptor):
"""Single value of a discrete or continuous axis to use in a variable font.
.. versionadded:: 5.0
"""
flavor = "axis-subset"
_attrs = ('name', 'userValue')
def __init__(self, *, name, userValue):
self.name: str = name
"""Name of the :class:`AxisDescriptor` or :class:`DiscreteAxisDescriptor`
to "snapshot" or "freeze".
"""
self.userValue: float = userValue
"""Value in user coordinates at which to freeze the given axis."""
class BaseDocWriter(object):
_whiteSpace = " "
axisDescriptorClass = AxisDescriptor
discreteAxisDescriptorClass = DiscreteAxisDescriptor
axisLabelDescriptorClass = AxisLabelDescriptor
locationLabelDescriptorClass = LocationLabelDescriptor
ruleDescriptorClass = RuleDescriptor
sourceDescriptorClass = SourceDescriptor
variableFontDescriptorClass = VariableFontDescriptor
valueAxisSubsetDescriptorClass = ValueAxisSubsetDescriptor
rangeAxisSubsetDescriptorClass = RangeAxisSubsetDescriptor
instanceDescriptorClass = InstanceDescriptor
@classmethod
def getAxisDecriptor(cls):
return cls.axisDescriptorClass()
@classmethod
def getSourceDescriptor(cls):
return cls.sourceDescriptorClass()
@classmethod
def getInstanceDescriptor(cls):
return cls.instanceDescriptorClass()
@classmethod
def getRuleDescriptor(cls):
return cls.ruleDescriptorClass()
def __init__(self, documentPath, documentObject: DesignSpaceDocument):
self.path = documentPath
self.documentObject = documentObject
self.effectiveFormatTuple = self._getEffectiveFormatTuple()
self.root = ET.Element("designspace")
def write(self, pretty=True, encoding="UTF-8", xml_declaration=True):
self.root.attrib['format'] = ".".join(str(i) for i in self.effectiveFormatTuple)
if self.documentObject.axes or self.documentObject.elidedFallbackName is not None:
axesElement = ET.Element("axes")
if self.documentObject.elidedFallbackName is not None:
axesElement.attrib['elidedfallbackname'] = self.documentObject.elidedFallbackName
self.root.append(axesElement)
for axisObject in self.documentObject.axes:
self._addAxis(axisObject)
if self.documentObject.locationLabels:
labelsElement = ET.Element("labels")
for labelObject in self.documentObject.locationLabels:
self._addLocationLabel(labelsElement, labelObject)
self.root.append(labelsElement)
if self.documentObject.rules:
if getattr(self.documentObject, "rulesProcessingLast", False):
attributes = {"processing": "last"}
else:
attributes = {}
self.root.append(ET.Element("rules", attributes))
for ruleObject in self.documentObject.rules:
self._addRule(ruleObject)
if self.documentObject.sources:
self.root.append(ET.Element("sources"))
for sourceObject in self.documentObject.sources:
self._addSource(sourceObject)
if self.documentObject.variableFonts:
variableFontsElement = ET.Element("variable-fonts")
for variableFont in self.documentObject.variableFonts:
self._addVariableFont(variableFontsElement, variableFont)
self.root.append(variableFontsElement)
if self.documentObject.instances:
self.root.append(ET.Element("instances"))
for instanceObject in self.documentObject.instances:
self._addInstance(instanceObject)
if self.documentObject.lib:
self._addLib(self.root, self.documentObject.lib, 2)
tree = ET.ElementTree(self.root)
tree.write(
self.path,
encoding=encoding,
method='xml',
xml_declaration=xml_declaration,
pretty_print=pretty,
)
def _getEffectiveFormatTuple(self):
"""Try to use the version specified in the document, or a sufficiently
recent version to be able to encode what the document contains.
"""
minVersion = self.documentObject.formatTuple
if (
any(
isinstance(axis, DiscreteAxisDescriptor) or
axis.axisOrdering is not None or
axis.axisLabels
for axis in self.documentObject.axes
) or
self.documentObject.locationLabels or
any(
source.localisedFamilyName
for source in self.documentObject.sources
) or
self.documentObject.variableFonts or
any(
instance.locationLabel or
instance.userLocation
for instance in self.documentObject.instances
)
):
if minVersion < (5, 0):
minVersion = (5, 0)
return minVersion
def _makeLocationElement(self, locationObject, name=None):
""" Convert Location dict to a locationElement."""
locElement = ET.Element("location")
if name is not None:
locElement.attrib['name'] = name
validatedLocation = self.documentObject.newDefaultLocation()
for axisName, axisValue in locationObject.items():
if axisName in validatedLocation:
# only accept values we know
validatedLocation[axisName] = axisValue
for dimensionName, dimensionValue in validatedLocation.items():
dimElement = ET.Element('dimension')
dimElement.attrib['name'] = dimensionName
if type(dimensionValue) == tuple:
dimElement.attrib['xvalue'] = self.intOrFloat(dimensionValue[0])
dimElement.attrib['yvalue'] = self.intOrFloat(dimensionValue[1])
else:
dimElement.attrib['xvalue'] = self.intOrFloat(dimensionValue)
locElement.append(dimElement)
return locElement, validatedLocation
def intOrFloat(self, num):
if int(num) == num:
return "%d" % num
return ("%f" % num).rstrip('0').rstrip('.')
def _addRule(self, ruleObject):
# if none of the conditions have minimum or maximum values, do not add the rule.
ruleElement = ET.Element('rule')
if ruleObject.name is not None:
ruleElement.attrib['name'] = ruleObject.name
for conditions in ruleObject.conditionSets:
conditionsetElement = ET.Element('conditionset')
for cond in conditions:
if cond.get('minimum') is None and cond.get('maximum') is None:
# neither is defined, don't add this condition
continue
conditionElement = ET.Element('condition')
conditionElement.attrib['name'] = cond.get('name')
if cond.get('minimum') is not None:
conditionElement.attrib['minimum'] = self.intOrFloat(cond.get('minimum'))
if cond.get('maximum') is not None:
conditionElement.attrib['maximum'] = self.intOrFloat(cond.get('maximum'))
conditionsetElement.append(conditionElement)
if len(conditionsetElement):
ruleElement.append(conditionsetElement)
for sub in ruleObject.subs:
subElement = ET.Element('sub')
subElement.attrib['name'] = sub[0]
subElement.attrib['with'] = sub[1]
ruleElement.append(subElement)
if len(ruleElement):
self.root.findall('.rules')[0].append(ruleElement)
def _addAxis(self, axisObject):
axisElement = ET.Element('axis')
axisElement.attrib['tag'] = axisObject.tag
axisElement.attrib['name'] = axisObject.name
self._addLabelNames(axisElement, axisObject.labelNames)
if axisObject.map:
for inputValue, outputValue in axisObject.map:
mapElement = ET.Element('map')
mapElement.attrib['input'] = self.intOrFloat(inputValue)
mapElement.attrib['output'] = self.intOrFloat(outputValue)
axisElement.append(mapElement)
if axisObject.axisOrdering or axisObject.axisLabels:
labelsElement = ET.Element('labels')
if axisObject.axisOrdering is not None:
labelsElement.attrib['ordering'] = str(axisObject.axisOrdering)
for label in axisObject.axisLabels:
self._addAxisLabel(labelsElement, label)
axisElement.append(labelsElement)
if isinstance(axisObject, AxisDescriptor):
axisElement.attrib['minimum'] = self.intOrFloat(axisObject.minimum)
axisElement.attrib['maximum'] = self.intOrFloat(axisObject.maximum)
elif isinstance(axisObject, DiscreteAxisDescriptor):
axisElement.attrib['values'] = " ".join(self.intOrFloat(v) for v in axisObject.values)
axisElement.attrib['default'] = self.intOrFloat(axisObject.default)
if axisObject.hidden:
axisElement.attrib['hidden'] = "1"
self.root.findall('.axes')[0].append(axisElement)
def _addAxisLabel(self, axisElement: ET.Element, label: AxisLabelDescriptor) -> None:
labelElement = ET.Element('label')
labelElement.attrib['uservalue'] = self.intOrFloat(label.userValue)
if label.userMinimum is not None:
labelElement.attrib['userminimum'] = self.intOrFloat(label.userMinimum)
if label.userMaximum is not None:
labelElement.attrib['usermaximum'] = self.intOrFloat(label.userMaximum)
labelElement.attrib['name'] = label.name
if label.elidable:
labelElement.attrib['elidable'] = "true"
if label.olderSibling:
labelElement.attrib['oldersibling'] = "true"
if label.linkedUserValue is not None:
labelElement.attrib['linkeduservalue'] = self.intOrFloat(label.linkedUserValue)
self._addLabelNames(labelElement, label.labelNames)
axisElement.append(labelElement)
def _addLabelNames(self, parentElement, labelNames):
for languageCode, labelName in sorted(labelNames.items()):
languageElement = ET.Element('labelname')
languageElement.attrib[XML_LANG] = languageCode
languageElement.text = labelName
parentElement.append(languageElement)
def _addLocationLabel(self, parentElement: ET.Element, label: LocationLabelDescriptor) -> None:
labelElement = ET.Element('label')
labelElement.attrib['name'] = label.name
if label.elidable:
labelElement.attrib['elidable'] = "true"
if label.olderSibling:
labelElement.attrib['oldersibling'] = "true"
self._addLabelNames(labelElement, label.labelNames)
self._addLocationElement(labelElement, userLocation=label.userLocation)
parentElement.append(labelElement)
def _addLocationElement(
self,
parentElement,
*,
designLocation: AnisotropicLocationDict = None,
userLocation: SimpleLocationDict = None
):
locElement = ET.Element("location")
for axis in self.documentObject.axes:
if designLocation is not None and axis.name in designLocation:
dimElement = ET.Element('dimension')
dimElement.attrib['name'] = axis.name
value = designLocation[axis.name]
if isinstance(value, tuple):
dimElement.attrib['xvalue'] = self.intOrFloat(value[0])
dimElement.attrib['yvalue'] = self.intOrFloat(value[1])
else:
dimElement.attrib['xvalue'] = self.intOrFloat(value)
locElement.append(dimElement)
elif userLocation is not None and axis.name in userLocation:
dimElement = ET.Element('dimension')
dimElement.attrib['name'] = axis.name
value = userLocation[axis.name]
dimElement.attrib['uservalue'] = self.intOrFloat(value)
locElement.append(dimElement)
if len(locElement) > 0:
parentElement.append(locElement)
def _addInstance(self, instanceObject):
instanceElement = ET.Element('instance')
if instanceObject.name is not None:
instanceElement.attrib['name'] = instanceObject.name
if instanceObject.locationLabel is not None:
instanceElement.attrib['location'] = instanceObject.locationLabel
if instanceObject.familyName is not None:
instanceElement.attrib['familyname'] = instanceObject.familyName
if instanceObject.styleName is not None:
instanceElement.attrib['stylename'] = instanceObject.styleName
# add localisations
if instanceObject.localisedStyleName:
languageCodes = list(instanceObject.localisedStyleName.keys())
languageCodes.sort()
for code in languageCodes:
if code == "en":
continue # already stored in the element attribute
localisedStyleNameElement = ET.Element('stylename')
localisedStyleNameElement.attrib[XML_LANG] = code
localisedStyleNameElement.text = instanceObject.getStyleName(code)
instanceElement.append(localisedStyleNameElement)
if instanceObject.localisedFamilyName:
languageCodes = list(instanceObject.localisedFamilyName.keys())
languageCodes.sort()
for code in languageCodes:
if code == "en":
continue # already stored in the element attribute
localisedFamilyNameElement = ET.Element('familyname')
localisedFamilyNameElement.attrib[XML_LANG] = code
localisedFamilyNameElement.text = instanceObject.getFamilyName(code)
instanceElement.append(localisedFamilyNameElement)
if instanceObject.localisedStyleMapStyleName:
languageCodes = list(instanceObject.localisedStyleMapStyleName.keys())
languageCodes.sort()
for code in languageCodes:
if code == "en":
continue
localisedStyleMapStyleNameElement = ET.Element('stylemapstylename')
localisedStyleMapStyleNameElement.attrib[XML_LANG] = code
localisedStyleMapStyleNameElement.text = instanceObject.getStyleMapStyleName(code)
instanceElement.append(localisedStyleMapStyleNameElement)
if instanceObject.localisedStyleMapFamilyName:
languageCodes = list(instanceObject.localisedStyleMapFamilyName.keys())
languageCodes.sort()
for code in languageCodes:
if code == "en":
continue
localisedStyleMapFamilyNameElement = ET.Element('stylemapfamilyname')
localisedStyleMapFamilyNameElement.attrib[XML_LANG] = code
localisedStyleMapFamilyNameElement.text = instanceObject.getStyleMapFamilyName(code)
instanceElement.append(localisedStyleMapFamilyNameElement)
if self.effectiveFormatTuple >= (5, 0):
if instanceObject.locationLabel is None:
self._addLocationElement(
instanceElement,
designLocation=instanceObject.designLocation,
userLocation=instanceObject.userLocation
)
else:
# Pre-version 5.0 code was validating and filling in the location
# dict while writing it out, as preserved below.
if instanceObject.location is not None:
locationElement, instanceObject.location = self._makeLocationElement(instanceObject.location)
instanceElement.append(locationElement)
if instanceObject.filename is not None:
instanceElement.attrib['filename'] = instanceObject.filename
if instanceObject.postScriptFontName is not None:
instanceElement.attrib['postscriptfontname'] = instanceObject.postScriptFontName
if instanceObject.styleMapFamilyName is not None:
instanceElement.attrib['stylemapfamilyname'] = instanceObject.styleMapFamilyName
if instanceObject.styleMapStyleName is not None:
instanceElement.attrib['stylemapstylename'] = instanceObject.styleMapStyleName
if self.effectiveFormatTuple < (5, 0):
# Deprecated members as of version 5.0
if instanceObject.glyphs:
if instanceElement.findall('.glyphs') == []:
glyphsElement = ET.Element('glyphs')
instanceElement.append(glyphsElement)
glyphsElement = instanceElement.findall('.glyphs')[0]
for glyphName, data in sorted(instanceObject.glyphs.items()):
glyphElement = self._writeGlyphElement(instanceElement, instanceObject, glyphName, data)
glyphsElement.append(glyphElement)
if instanceObject.kerning:
kerningElement = ET.Element('kerning')
instanceElement.append(kerningElement)
if instanceObject.info:
infoElement = ET.Element('info')
instanceElement.append(infoElement)
self._addLib(instanceElement, instanceObject.lib, 4)
self.root.findall('.instances')[0].append(instanceElement)
def _addSource(self, sourceObject):
sourceElement = ET.Element("source")
if sourceObject.filename is not None:
sourceElement.attrib['filename'] = sourceObject.filename
if sourceObject.name is not None:
if sourceObject.name.find("temp_master") != 0:
# do not save temporary source names
sourceElement.attrib['name'] = sourceObject.name
if sourceObject.familyName is not None:
sourceElement.attrib['familyname'] = sourceObject.familyName
if sourceObject.styleName is not None:
sourceElement.attrib['stylename'] = sourceObject.styleName
if sourceObject.layerName is not None:
sourceElement.attrib['layer'] = sourceObject.layerName
if sourceObject.localisedFamilyName:
languageCodes = list(sourceObject.localisedFamilyName.keys())
languageCodes.sort()
for code in languageCodes:
if code == "en":
continue # already stored in the element attribute
localisedFamilyNameElement = ET.Element('familyname')
localisedFamilyNameElement.attrib[XML_LANG] = code
localisedFamilyNameElement.text = sourceObject.getFamilyName(code)
sourceElement.append(localisedFamilyNameElement)
if sourceObject.copyLib:
libElement = ET.Element('lib')
libElement.attrib['copy'] = "1"
sourceElement.append(libElement)
if sourceObject.copyGroups:
groupsElement = ET.Element('groups')
groupsElement.attrib['copy'] = "1"
sourceElement.append(groupsElement)
if sourceObject.copyFeatures:
featuresElement = ET.Element('features')
featuresElement.attrib['copy'] = "1"
sourceElement.append(featuresElement)
if sourceObject.copyInfo or sourceObject.muteInfo:
infoElement = ET.Element('info')
if sourceObject.copyInfo:
infoElement.attrib['copy'] = "1"
if sourceObject.muteInfo:
infoElement.attrib['mute'] = "1"
sourceElement.append(infoElement)
if sourceObject.muteKerning:
kerningElement = ET.Element("kerning")
kerningElement.attrib["mute"] = '1'
sourceElement.append(kerningElement)
if sourceObject.mutedGlyphNames:
for name in sourceObject.mutedGlyphNames:
glyphElement = ET.Element("glyph")
glyphElement.attrib["name"] = name
glyphElement.attrib["mute"] = '1'
sourceElement.append(glyphElement)
if self.effectiveFormatTuple >= (5, 0):
self._addLocationElement(sourceElement, designLocation=sourceObject.location)
else:
# Pre-version 5.0 code was validating and filling in the location
# dict while writing it out, as preserved below.
locationElement, sourceObject.location = self._makeLocationElement(sourceObject.location)
sourceElement.append(locationElement)
self.root.findall('.sources')[0].append(sourceElement)
def _addVariableFont(self, parentElement: ET.Element, vf: VariableFontDescriptor) -> None:
vfElement = ET.Element('variable-font')
vfElement.attrib['name'] = vf.name
if vf.filename is not None:
vfElement.attrib['filename'] = vf.filename
if vf.axisSubsets:
subsetsElement = ET.Element('axis-subsets')
for subset in vf.axisSubsets:
subsetElement = ET.Element('axis-subset')
subsetElement.attrib['name'] = subset.name
if isinstance(subset, RangeAxisSubsetDescriptor):
if subset.userMinimum != -math.inf:
subsetElement.attrib['userminimum'] = self.intOrFloat(subset.userMinimum)
if subset.userMaximum != math.inf:
subsetElement.attrib['usermaximum'] = self.intOrFloat(subset.userMaximum)
if subset.userDefault is not None:
subsetElement.attrib['userdefault'] = self.intOrFloat(subset.userDefault)
elif isinstance(subset, ValueAxisSubsetDescriptor):
subsetElement.attrib['uservalue'] = self.intOrFloat(subset.userValue)
subsetsElement.append(subsetElement)
vfElement.append(subsetsElement)
self._addLib(vfElement, vf.lib, 4)
parentElement.append(vfElement)
def _addLib(self, parentElement: ET.Element, data: Any, indent_level: int) -> None:
if not data:
return
libElement = ET.Element('lib')
libElement.append(plistlib.totree(data, indent_level=indent_level))
parentElement.append(libElement)
def _writeGlyphElement(self, instanceElement, instanceObject, glyphName, data):
glyphElement = ET.Element('glyph')
if data.get('mute'):
glyphElement.attrib['mute'] = "1"
if data.get('unicodes') is not None:
glyphElement.attrib['unicode'] = " ".join([hex(u) for u in data.get('unicodes')])
if data.get('instanceLocation') is not None:
locationElement, data['instanceLocation'] = self._makeLocationElement(data.get('instanceLocation'))
glyphElement.append(locationElement)
if glyphName is not None:
glyphElement.attrib['name'] = glyphName
if data.get('note') is not None:
noteElement = ET.Element('note')
noteElement.text = data.get('note')
glyphElement.append(noteElement)
if data.get('masters') is not None:
mastersElement = ET.Element("masters")
for m in data.get('masters'):
masterElement = ET.Element("master")
if m.get('glyphName') is not None:
masterElement.attrib['glyphname'] = m.get('glyphName')
if m.get('font') is not None:
masterElement.attrib['source'] = m.get('font')
if m.get('location') is not None:
locationElement, m['location'] = self._makeLocationElement(m.get('location'))
masterElement.append(locationElement)
mastersElement.append(masterElement)
glyphElement.append(mastersElement)
return glyphElement
class BaseDocReader(LogMixin):
axisDescriptorClass = AxisDescriptor
discreteAxisDescriptorClass = DiscreteAxisDescriptor
axisLabelDescriptorClass = AxisLabelDescriptor
locationLabelDescriptorClass = LocationLabelDescriptor
ruleDescriptorClass = RuleDescriptor
sourceDescriptorClass = SourceDescriptor
variableFontsDescriptorClass = VariableFontDescriptor
valueAxisSubsetDescriptorClass = ValueAxisSubsetDescriptor
rangeAxisSubsetDescriptorClass = RangeAxisSubsetDescriptor
instanceDescriptorClass = InstanceDescriptor
def __init__(self, documentPath, documentObject):
self.path = documentPath
self.documentObject = documentObject
tree = ET.parse(self.path)
self.root = tree.getroot()
self.documentObject.formatVersion = self.root.attrib.get("format", "3.0")
self._axes = []
self.rules = []
self.sources = []
self.instances = []
self.axisDefaults = {}
self._strictAxisNames = True
@classmethod
def fromstring(cls, string, documentObject):
f = BytesIO(tobytes(string, encoding="utf-8"))
self = cls(f, documentObject)
self.path = None
return self
def read(self):
self.readAxes()
self.readLabels()
self.readRules()
self.readVariableFonts()
self.readSources()
self.readInstances()
self.readLib()
def readRules(self):
# we also need to read any conditions that are outside of a condition set.
rules = []
rulesElement = self.root.find(".rules")
if rulesElement is not None:
processingValue = rulesElement.attrib.get("processing", "first")
if processingValue not in {"first", "last"}:
raise DesignSpaceDocumentError(
"<rules> processing attribute value is not valid: %r, "
"expected 'first' or 'last'" % processingValue)
self.documentObject.rulesProcessingLast = processingValue == "last"
for ruleElement in self.root.findall(".rules/rule"):
ruleObject = self.ruleDescriptorClass()
ruleName = ruleObject.name = ruleElement.attrib.get("name")
# read any stray conditions outside a condition set
externalConditions = self._readConditionElements(
ruleElement,
ruleName,
)
if externalConditions:
ruleObject.conditionSets.append(externalConditions)
self.log.info(
"Found stray rule conditions outside a conditionset. "
"Wrapped them in a new conditionset."
)
# read the conditionsets
for conditionSetElement in ruleElement.findall('.conditionset'):
conditionSet = self._readConditionElements(
conditionSetElement,
ruleName,
)
if conditionSet is not None:
ruleObject.conditionSets.append(conditionSet)
for subElement in ruleElement.findall('.sub'):
a = subElement.attrib['name']
b = subElement.attrib['with']
ruleObject.subs.append((a, b))
rules.append(ruleObject)
self.documentObject.rules = rules
def _readConditionElements(self, parentElement, ruleName=None):
cds = []
for conditionElement in parentElement.findall('.condition'):
cd = {}
cdMin = conditionElement.attrib.get("minimum")
if cdMin is not None:
cd['minimum'] = float(cdMin)
else:
# will allow these to be None, assume axis.minimum
cd['minimum'] = None
cdMax = conditionElement.attrib.get("maximum")
if cdMax is not None:
cd['maximum'] = float(cdMax)
else:
# will allow these to be None, assume axis.maximum
cd['maximum'] = None
cd['name'] = conditionElement.attrib.get("name")
# # test for things
if cd.get('minimum') is None and cd.get('maximum') is None:
raise DesignSpaceDocumentError(
"condition missing required minimum or maximum in rule" +
(" '%s'" % ruleName if ruleName is not None else ""))
cds.append(cd)
return cds
def readAxes(self):
# read the axes elements, including the warp map.
axesElement = self.root.find(".axes")
if axesElement is not None and 'elidedfallbackname' in axesElement.attrib:
self.documentObject.elidedFallbackName = axesElement.attrib['elidedfallbackname']
axisElements = self.root.findall(".axes/axis")
if not axisElements:
return
for axisElement in axisElements:
if self.documentObject.formatTuple >= (5, 0) and "values" in axisElement.attrib:
axisObject = self.discreteAxisDescriptorClass()
axisObject.values = [float(s) for s in axisElement.attrib["values"].split(" ")]
else:
axisObject = self.axisDescriptorClass()
axisObject.minimum = float(axisElement.attrib.get("minimum"))
axisObject.maximum = float(axisElement.attrib.get("maximum"))
axisObject.default = float(axisElement.attrib.get("default"))
axisObject.name = axisElement.attrib.get("name")
if axisElement.attrib.get('hidden', False):
axisObject.hidden = True
axisObject.tag = axisElement.attrib.get("tag")
for mapElement in axisElement.findall('map'):
a = float(mapElement.attrib['input'])
b = float(mapElement.attrib['output'])
axisObject.map.append((a, b))
for labelNameElement in axisElement.findall('labelname'):
# Note: elementtree reads the "xml:lang" attribute name as
# '{http://www.w3.org/XML/1998/namespace}lang'
for key, lang in labelNameElement.items():
if key == XML_LANG:
axisObject.labelNames[lang] = tostr(labelNameElement.text)
labelElement = axisElement.find(".labels")
if labelElement is not None:
if "ordering" in labelElement.attrib:
axisObject.axisOrdering = int(labelElement.attrib["ordering"])
for label in labelElement.findall(".label"):
axisObject.axisLabels.append(self.readAxisLabel(label))
self.documentObject.axes.append(axisObject)
self.axisDefaults[axisObject.name] = axisObject.default
def readAxisLabel(self, element: ET.Element):
xml_attrs = {'userminimum', 'uservalue', 'usermaximum', 'name', 'elidable', 'oldersibling', 'linkeduservalue'}
unknown_attrs = set(element.attrib) - xml_attrs
if unknown_attrs:
raise DesignSpaceDocumentError(f"label element contains unknown attributes: {', '.join(unknown_attrs)}")
name = element.get("name")
if name is None:
raise DesignSpaceDocumentError("label element must have a name attribute.")
valueStr = element.get("uservalue")
if valueStr is None:
raise DesignSpaceDocumentError("label element must have a uservalue attribute.")
value = float(valueStr)
minimumStr = element.get("userminimum")
minimum = float(minimumStr) if minimumStr is not None else None
maximumStr = element.get("usermaximum")
maximum = float(maximumStr) if maximumStr is not None else None
linkedValueStr = element.get("linkeduservalue")
linkedValue = float(linkedValueStr) if linkedValueStr is not None else None
elidable = True if element.get("elidable") == "true" else False
olderSibling = True if element.get("oldersibling") == "true" else False
labelNames = {
lang: label_name.text or ""
for label_name in element.findall("labelname")
for attr, lang in label_name.items()
if attr == XML_LANG
# Note: elementtree reads the "xml:lang" attribute name as
# '{http://www.w3.org/XML/1998/namespace}lang'
}
return self.axisLabelDescriptorClass(
name=name,
userValue=value,
userMinimum=minimum,
userMaximum=maximum,
elidable=elidable,
olderSibling=olderSibling,
linkedUserValue=linkedValue,
labelNames=labelNames,
)
def readLabels(self):
if self.documentObject.formatTuple < (5, 0):
return
xml_attrs = {'name', 'elidable', 'oldersibling'}
for labelElement in self.root.findall(".labels/label"):
unknown_attrs = set(labelElement.attrib) - xml_attrs
if unknown_attrs:
raise DesignSpaceDocumentError(f"Label element contains unknown attributes: {', '.join(unknown_attrs)}")
name = labelElement.get("name")
if name is None:
raise DesignSpaceDocumentError("label element must have a name attribute.")
designLocation, userLocation = self.locationFromElement(labelElement)
if designLocation:
raise DesignSpaceDocumentError(f'<label> element "{name}" must only have user locations (using uservalue="").')
elidable = True if labelElement.get("elidable") == "true" else False
olderSibling = True if labelElement.get("oldersibling") == "true" else False
labelNames = {
lang: label_name.text or ""
for label_name in labelElement.findall("labelname")
for attr, lang in label_name.items()
if attr == XML_LANG
# Note: elementtree reads the "xml:lang" attribute name as
# '{http://www.w3.org/XML/1998/namespace}lang'
}
locationLabel = self.locationLabelDescriptorClass(
name=name,
userLocation=userLocation,
elidable=elidable,
olderSibling=olderSibling,
labelNames=labelNames,
)
self.documentObject.locationLabels.append(locationLabel)
def readVariableFonts(self):
if self.documentObject.formatTuple < (5, 0):
return
xml_attrs = {'name', 'filename'}
for variableFontElement in self.root.findall(".variable-fonts/variable-font"):
unknown_attrs = set(variableFontElement.attrib) - xml_attrs
if unknown_attrs:
raise DesignSpaceDocumentError(f"variable-font element contains unknown attributes: {', '.join(unknown_attrs)}")
name = variableFontElement.get("name")
if name is None:
raise DesignSpaceDocumentError("variable-font element must have a name attribute.")
filename = variableFontElement.get("filename")
axisSubsetsElement = variableFontElement.find(".axis-subsets")
if axisSubsetsElement is None:
raise DesignSpaceDocumentError("variable-font element must contain an axis-subsets element.")
axisSubsets = []
for axisSubset in axisSubsetsElement.iterfind(".axis-subset"):
axisSubsets.append(self.readAxisSubset(axisSubset))
lib = None
libElement = variableFontElement.find(".lib")
if libElement is not None:
lib = plistlib.fromtree(libElement[0])
variableFont = self.variableFontsDescriptorClass(
name=name,
filename=filename,
axisSubsets=axisSubsets,
lib=lib,
)
self.documentObject.variableFonts.append(variableFont)
def readAxisSubset(self, element: ET.Element):
if "uservalue" in element.attrib:
xml_attrs = {'name', 'uservalue'}
unknown_attrs = set(element.attrib) - xml_attrs
if unknown_attrs:
raise DesignSpaceDocumentError(f"axis-subset element contains unknown attributes: {', '.join(unknown_attrs)}")
name = element.get("name")
if name is None:
raise DesignSpaceDocumentError("axis-subset element must have a name attribute.")
userValueStr = element.get("uservalue")
if userValueStr is None:
raise DesignSpaceDocumentError(
"The axis-subset element for a discrete subset must have a uservalue attribute."
)
userValue = float(userValueStr)
return self.valueAxisSubsetDescriptorClass(name=name, userValue=userValue)
else:
xml_attrs = {'name', 'userminimum', 'userdefault', 'usermaximum'}
unknown_attrs = set(element.attrib) - xml_attrs
if unknown_attrs:
raise DesignSpaceDocumentError(f"axis-subset element contains unknown attributes: {', '.join(unknown_attrs)}")
name = element.get("name")
if name is None:
raise DesignSpaceDocumentError("axis-subset element must have a name attribute.")
userMinimum = element.get("userminimum")
userDefault = element.get("userdefault")
userMaximum = element.get("usermaximum")
if userMinimum is not None and userDefault is not None and userMaximum is not None:
return self.rangeAxisSubsetDescriptorClass(
name=name,
userMinimum=float(userMinimum),
userDefault=float(userDefault),
userMaximum=float(userMaximum),
)
if all(v is None for v in (userMinimum, userDefault, userMaximum)):
return self.rangeAxisSubsetDescriptorClass(name=name)
raise DesignSpaceDocumentError(
"axis-subset element must have min/max/default values or none at all."
)
def readSources(self):
for sourceCount, sourceElement in enumerate(self.root.findall(".sources/source")):
filename = sourceElement.attrib.get('filename')
if filename is not None and self.path is not None:
sourcePath = os.path.abspath(os.path.join(os.path.dirname(self.path), filename))
else:
sourcePath = None
sourceName = sourceElement.attrib.get('name')
if sourceName is None:
# add a temporary source name
sourceName = "temp_master.%d" % (sourceCount)
sourceObject = self.sourceDescriptorClass()
sourceObject.path = sourcePath # absolute path to the ufo source
sourceObject.filename = filename # path as it is stored in the document
sourceObject.name = sourceName
familyName = sourceElement.attrib.get("familyname")
if familyName is not None:
sourceObject.familyName = familyName
styleName = sourceElement.attrib.get("stylename")
if styleName is not None:
sourceObject.styleName = styleName
for familyNameElement in sourceElement.findall('familyname'):
for key, lang in familyNameElement.items():
if key == XML_LANG:
familyName = familyNameElement.text
sourceObject.setFamilyName(familyName, lang)
designLocation, userLocation = self.locationFromElement(sourceElement)
if userLocation:
raise DesignSpaceDocumentError(f'<source> element "{sourceName}" must only have design locations (using xvalue="").')
sourceObject.location = designLocation
layerName = sourceElement.attrib.get('layer')
if layerName is not None:
sourceObject.layerName = layerName
for libElement in sourceElement.findall('.lib'):
if libElement.attrib.get('copy') == '1':
sourceObject.copyLib = True
for groupsElement in sourceElement.findall('.groups'):
if groupsElement.attrib.get('copy') == '1':
sourceObject.copyGroups = True
for infoElement in sourceElement.findall(".info"):
if infoElement.attrib.get('copy') == '1':
sourceObject.copyInfo = True
if infoElement.attrib.get('mute') == '1':
sourceObject.muteInfo = True
for featuresElement in sourceElement.findall(".features"):
if featuresElement.attrib.get('copy') == '1':
sourceObject.copyFeatures = True
for glyphElement in sourceElement.findall(".glyph"):
glyphName = glyphElement.attrib.get('name')
if glyphName is None:
continue
if glyphElement.attrib.get('mute') == '1':
sourceObject.mutedGlyphNames.append(glyphName)
for kerningElement in sourceElement.findall(".kerning"):
if kerningElement.attrib.get('mute') == '1':
sourceObject.muteKerning = True
self.documentObject.sources.append(sourceObject)
def locationFromElement(self, element):
"""Read a nested ``<location>`` element inside the given ``element``.
.. versionchanged:: 5.0
Return a tuple of (designLocation, userLocation)
"""
elementLocation = (None, None)
for locationElement in element.findall('.location'):
elementLocation = self.readLocationElement(locationElement)
break
return elementLocation
def readLocationElement(self, locationElement):
"""Read a ``<location>`` element.
.. versionchanged:: 5.0
Return a tuple of (designLocation, userLocation)
"""
if self._strictAxisNames and not self.documentObject.axes:
raise DesignSpaceDocumentError("No axes defined")
userLoc = {}
designLoc = {}
for dimensionElement in locationElement.findall(".dimension"):
dimName = dimensionElement.attrib.get("name")
if self._strictAxisNames and dimName not in self.axisDefaults:
# In case the document contains no axis definitions,
self.log.warning("Location with undefined axis: \"%s\".", dimName)
continue
userValue = xValue = yValue = None
try:
userValue = dimensionElement.attrib.get('uservalue')
if userValue is not None:
userValue = float(userValue)
except ValueError:
self.log.warning("ValueError in readLocation userValue %3.3f", userValue)
try:
xValue = dimensionElement.attrib.get('xvalue')
if xValue is not None:
xValue = float(xValue)
except ValueError:
self.log.warning("ValueError in readLocation xValue %3.3f", xValue)
try:
yValue = dimensionElement.attrib.get('yvalue')
if yValue is not None:
yValue = float(yValue)
except ValueError:
self.log.warning("ValueError in readLocation yValue %3.3f", yValue)
if userValue is None == xValue is None:
raise DesignSpaceDocumentError(f'Exactly one of uservalue="" or xvalue="" must be provided for location dimension "{dimName}"')
if yValue is not None:
if xValue is None:
raise DesignSpaceDocumentError(f'Missing xvalue="" for the location dimension "{dimName}"" with yvalue="{yValue}"')
designLoc[dimName] = (xValue, yValue)
elif xValue is not None:
designLoc[dimName] = xValue
else:
userLoc[dimName] = userValue
return designLoc, userLoc
def readInstances(self, makeGlyphs=True, makeKerning=True, makeInfo=True):
instanceElements = self.root.findall('.instances/instance')
for instanceElement in instanceElements:
self._readSingleInstanceElement(instanceElement, makeGlyphs=makeGlyphs, makeKerning=makeKerning, makeInfo=makeInfo)
def _readSingleInstanceElement(self, instanceElement, makeGlyphs=True, makeKerning=True, makeInfo=True):
filename = instanceElement.attrib.get('filename')
if filename is not None and self.documentObject.path is not None:
instancePath = os.path.join(os.path.dirname(self.documentObject.path), filename)
else:
instancePath = None
instanceObject = self.instanceDescriptorClass()
instanceObject.path = instancePath # absolute path to the instance
instanceObject.filename = filename # path as it is stored in the document
name = instanceElement.attrib.get("name")
if name is not None:
instanceObject.name = name
familyname = instanceElement.attrib.get('familyname')
if familyname is not None:
instanceObject.familyName = familyname
stylename = instanceElement.attrib.get('stylename')
if stylename is not None:
instanceObject.styleName = stylename
postScriptFontName = instanceElement.attrib.get('postscriptfontname')
if postScriptFontName is not None:
instanceObject.postScriptFontName = postScriptFontName
styleMapFamilyName = instanceElement.attrib.get('stylemapfamilyname')
if styleMapFamilyName is not None:
instanceObject.styleMapFamilyName = styleMapFamilyName
styleMapStyleName = instanceElement.attrib.get('stylemapstylename')
if styleMapStyleName is not None:
instanceObject.styleMapStyleName = styleMapStyleName
# read localised names
for styleNameElement in instanceElement.findall('stylename'):
for key, lang in styleNameElement.items():
if key == XML_LANG:
styleName = styleNameElement.text
instanceObject.setStyleName(styleName, lang)
for familyNameElement in instanceElement.findall('familyname'):
for key, lang in familyNameElement.items():
if key == XML_LANG:
familyName = familyNameElement.text
instanceObject.setFamilyName(familyName, lang)
for styleMapStyleNameElement in instanceElement.findall('stylemapstylename'):
for key, lang in styleMapStyleNameElement.items():
if key == XML_LANG:
styleMapStyleName = styleMapStyleNameElement.text
instanceObject.setStyleMapStyleName(styleMapStyleName, lang)
for styleMapFamilyNameElement in instanceElement.findall('stylemapfamilyname'):
for key, lang in styleMapFamilyNameElement.items():
if key == XML_LANG:
styleMapFamilyName = styleMapFamilyNameElement.text
instanceObject.setStyleMapFamilyName(styleMapFamilyName, lang)
designLocation, userLocation = self.locationFromElement(instanceElement)
locationLabel = instanceElement.attrib.get('location')
if (designLocation or userLocation) and locationLabel is not None:
raise DesignSpaceDocumentError('instance element must have at most one of the location="..." attribute or the nested location element')
instanceObject.locationLabel = locationLabel
instanceObject.userLocation = userLocation or {}
instanceObject.designLocation = designLocation or {}
for glyphElement in instanceElement.findall('.glyphs/glyph'):
self.readGlyphElement(glyphElement, instanceObject)
for infoElement in instanceElement.findall("info"):
self.readInfoElement(infoElement, instanceObject)
for libElement in instanceElement.findall('lib'):
self.readLibElement(libElement, instanceObject)
self.documentObject.instances.append(instanceObject)
def readLibElement(self, libElement, instanceObject):
"""Read the lib element for the given instance."""
instanceObject.lib = plistlib.fromtree(libElement[0])
def readInfoElement(self, infoElement, instanceObject):
""" Read the info element."""
instanceObject.info = True
def readGlyphElement(self, glyphElement, instanceObject):
"""
Read the glyph element, which could look like either one of these:
.. code-block:: xml
<glyph name="b" unicode="0x62"/>
<glyph name="b"/>
<glyph name="b">
<master location="location-token-bbb" source="master-token-aaa2"/>
<master glyphname="b.alt1" location="location-token-ccc" source="master-token-aaa3"/>
<note>
This is an instance from an anisotropic interpolation.
</note>
</glyph>
"""
glyphData = {}
glyphName = glyphElement.attrib.get('name')
if glyphName is None:
raise DesignSpaceDocumentError("Glyph object without name attribute")
mute = glyphElement.attrib.get("mute")
if mute == "1":
glyphData['mute'] = True
# unicode
unicodes = glyphElement.attrib.get('unicode')
if unicodes is not None:
try:
unicodes = [int(u, 16) for u in unicodes.split(" ")]
glyphData['unicodes'] = unicodes
except ValueError:
raise DesignSpaceDocumentError("unicode values %s are not integers" % unicodes)
for noteElement in glyphElement.findall('.note'):
glyphData['note'] = noteElement.text
break
designLocation, userLocation = self.locationFromElement(glyphElement)
if userLocation:
raise DesignSpaceDocumentError(f'<glyph> element "{glyphName}" must only have design locations (using xvalue="").')
if designLocation is not None:
glyphData['instanceLocation'] = designLocation
glyphSources = None
for masterElement in glyphElement.findall('.masters/master'):
fontSourceName = masterElement.attrib.get('source')
designLocation, userLocation = self.locationFromElement(masterElement)
if userLocation:
raise DesignSpaceDocumentError(f'<master> element "{fontSourceName}" must only have design locations (using xvalue="").')
masterGlyphName = masterElement.attrib.get('glyphname')
if masterGlyphName is None:
# if we don't read a glyphname, use the one we have
masterGlyphName = glyphName
d = dict(font=fontSourceName,
location=designLocation,
glyphName=masterGlyphName)
if glyphSources is None:
glyphSources = []
glyphSources.append(d)
if glyphSources is not None:
glyphData['masters'] = glyphSources
instanceObject.glyphs[glyphName] = glyphData
def readLib(self):
"""Read the lib element for the whole document."""
for libElement in self.root.findall(".lib"):
self.documentObject.lib = plistlib.fromtree(libElement[0])
class DesignSpaceDocument(LogMixin, AsDictMixin):
"""The DesignSpaceDocument object can read and write ``.designspace`` data.
It imports the axes, sources, variable fonts and instances to very basic
**descriptor** objects that store the data in attributes. Data is added to
the document by creating such descriptor objects, filling them with data
and then adding them to the document. This makes it easy to integrate this
object in different contexts.
The **DesignSpaceDocument** object can be subclassed to work with
different objects, as long as they have the same attributes. Reader and
Writer objects can be subclassed as well.
**Note:** Python attribute names are usually camelCased, the
corresponding `XML <document-xml-structure>`_ attributes are usually
all lowercase.
.. code:: python
from fontTools.designspaceLib import DesignSpaceDocument
doc = DesignSpaceDocument.fromfile("some/path/to/my.designspace")
doc.formatVersion
doc.elidedFallbackName
doc.axes
doc.locationLabels
doc.rules
doc.rulesProcessingLast
doc.sources
doc.variableFonts
doc.instances
doc.lib
"""
def __init__(self, readerClass=None, writerClass=None):
self.path = None
"""String, optional. When the document is read from the disk, this is
the full path that was given to :meth:`read` or :meth:`fromfile`.
"""
self.filename = None
"""String, optional. When the document is read from the disk, this is
its original file name, i.e. the last part of its path.
When the document is produced by a Python script and still only exists
in memory, the producing script can write here an indication of a
possible "good" filename, in case one wants to save the file somewhere.
"""
self.formatVersion: Optional[str] = None
"""Format version for this document, as a string. E.g. "4.0" """
self.elidedFallbackName: Optional[str] = None
"""STAT Style Attributes Header field ``elidedFallbackNameID``.
See: `OTSpec STAT Style Attributes Header <https://docs.microsoft.com/en-us/typography/opentype/spec/stat#style-attributes-header>`_
.. versionadded:: 5.0
"""
self.axes: List[Union[AxisDescriptor, DiscreteAxisDescriptor]] = []
"""List of this document's axes."""
self.locationLabels: List[LocationLabelDescriptor] = []
"""List of this document's STAT format 4 labels.
.. versionadded:: 5.0"""
self.rules: List[RuleDescriptor] = []
"""List of this document's rules."""
self.rulesProcessingLast: bool = False
"""This flag indicates whether the substitution rules should be applied
before or after other glyph substitution features.
- False: before
- True: after.
Default is False. For new projects, you probably want True. See
the following issues for more information:
`fontTools#1371 <https://github.com/fonttools/fonttools/issues/1371#issuecomment-590214572>`__
`fontTools#2050 <https://github.com/fonttools/fonttools/issues/2050#issuecomment-678691020>`__
If you want to use a different feature altogether, e.g. ``calt``,
use the lib key ``com.github.fonttools.varLib.featureVarsFeatureTag``
.. code:: xml
<lib>
<dict>
<key>com.github.fonttools.varLib.featureVarsFeatureTag</key>
<string>calt</string>
</dict>
</lib>
"""
self.sources: List[SourceDescriptor] = []
"""List of this document's sources."""
self.variableFonts: List[VariableFontDescriptor] = []
"""List of this document's variable fonts.
.. versionadded:: 5.0"""
self.instances: List[InstanceDescriptor] = []
"""List of this document's instances."""
self.lib: Dict = {}
"""User defined, custom data associated with the whole document.
Use reverse-DNS notation to identify your own data.
Respect the data stored by others.
"""
self.default: Optional[str] = None
"""Name of the default master.
This attribute is updated by the :meth:`findDefault`
"""
if readerClass is not None:
self.readerClass = readerClass
else:
self.readerClass = BaseDocReader
if writerClass is not None:
self.writerClass = writerClass
else:
self.writerClass = BaseDocWriter
@classmethod
def fromfile(cls, path, readerClass=None, writerClass=None):
"""Read a designspace file from ``path`` and return a new instance of
:class:.
"""
self = cls(readerClass=readerClass, writerClass=writerClass)
self.read(path)
return self
@classmethod
def fromstring(cls, string, readerClass=None, writerClass=None):
self = cls(readerClass=readerClass, writerClass=writerClass)
reader = self.readerClass.fromstring(string, self)
reader.read()
if self.sources:
self.findDefault()
return self
def tostring(self, encoding=None):
"""Returns the designspace as a string. Default encoding ``utf-8``."""
if encoding is str or (
encoding is not None and encoding.lower() == "unicode"
):
f = StringIO()
xml_declaration = False
elif encoding is None or encoding == "utf-8":
f = BytesIO()
encoding = "UTF-8"
xml_declaration = True
else:
raise ValueError("unsupported encoding: '%s'" % encoding)
writer = self.writerClass(f, self)
writer.write(encoding=encoding, xml_declaration=xml_declaration)
return f.getvalue()
def read(self, path):
"""Read a designspace file from ``path`` and populates the fields of
``self`` with the data.
"""
if hasattr(path, "__fspath__"): # support os.PathLike objects
path = path.__fspath__()
self.path = path
self.filename = os.path.basename(path)
reader = self.readerClass(path, self)
reader.read()
if self.sources:
self.findDefault()
def write(self, path):
"""Write this designspace to ``path``."""
if hasattr(path, "__fspath__"): # support os.PathLike objects
path = path.__fspath__()
self.path = path
self.filename = os.path.basename(path)
self.updatePaths()
writer = self.writerClass(path, self)
writer.write()
def _posixRelativePath(self, otherPath):
relative = os.path.relpath(otherPath, os.path.dirname(self.path))
return posix(relative)
def updatePaths(self):
"""
Right before we save we need to identify and respond to the following situations:
In each descriptor, we have to do the right thing for the filename attribute.
::
case 1.
descriptor.filename == None
descriptor.path == None
-- action:
write as is, descriptors will not have a filename attr.
useless, but no reason to interfere.
case 2.
descriptor.filename == "../something"
descriptor.path == None
-- action:
write as is. The filename attr should not be touched.
case 3.
descriptor.filename == None
descriptor.path == "~/absolute/path/there"
-- action:
calculate the relative path for filename.
We're not overwriting some other value for filename, it should be fine
case 4.
descriptor.filename == '../somewhere'
descriptor.path == "~/absolute/path/there"
-- action:
there is a conflict between the given filename, and the path.
So we know where the file is relative to the document.
Can't guess why they're different, we just choose for path to be correct and update filename.
"""
assert self.path is not None
for descriptor in self.sources + self.instances:
if descriptor.path is not None:
# case 3 and 4: filename gets updated and relativized
descriptor.filename = self._posixRelativePath(descriptor.path)
def addSource(self, sourceDescriptor: SourceDescriptor):
"""Add the given ``sourceDescriptor`` to ``doc.sources``."""
self.sources.append(sourceDescriptor)
def addSourceDescriptor(self, **kwargs):
"""Instantiate a new :class:`SourceDescriptor` using the given
``kwargs`` and add it to ``doc.sources``.
"""
source = self.writerClass.sourceDescriptorClass(**kwargs)
self.addSource(source)
return source
def addInstance(self, instanceDescriptor: InstanceDescriptor):
"""Add the given ``instanceDescriptor`` to :attr:`instances`."""
self.instances.append(instanceDescriptor)
def addInstanceDescriptor(self, **kwargs):
"""Instantiate a new :class:`InstanceDescriptor` using the given
``kwargs`` and add it to :attr:`instances`.
"""
instance = self.writerClass.instanceDescriptorClass(**kwargs)
self.addInstance(instance)
return instance
def addAxis(self, axisDescriptor: Union[AxisDescriptor, DiscreteAxisDescriptor]):
"""Add the given ``axisDescriptor`` to :attr:`axes`."""
self.axes.append(axisDescriptor)
def addAxisDescriptor(self, **kwargs):
"""Instantiate a new :class:`AxisDescriptor` using the given
``kwargs`` and add it to :attr:`axes`.
The axis will be and instance of :class:`DiscreteAxisDescriptor` if
the ``kwargs`` provide a ``value``, or a :class:`AxisDescriptor` otherwise.
"""
if "values" in kwargs:
axis = self.writerClass.discreteAxisDescriptorClass(**kwargs)
else:
axis = self.writerClass.axisDescriptorClass(**kwargs)
self.addAxis(axis)
return axis
def addRule(self, ruleDescriptor: RuleDescriptor):
"""Add the given ``ruleDescriptor`` to :attr:`rules`."""
self.rules.append(ruleDescriptor)
def addRuleDescriptor(self, **kwargs):
"""Instantiate a new :class:`RuleDescriptor` using the given
``kwargs`` and add it to :attr:`rules`.
"""
rule = self.writerClass.ruleDescriptorClass(**kwargs)
self.addRule(rule)
return rule
def addVariableFont(self, variableFontDescriptor: VariableFontDescriptor):
"""Add the given ``variableFontDescriptor`` to :attr:`variableFonts`.
.. versionadded:: 5.0
"""
self.variableFonts.append(variableFontDescriptor)
def addVariableFontDescriptor(self, **kwargs):
"""Instantiate a new :class:`VariableFontDescriptor` using the given
``kwargs`` and add it to :attr:`variableFonts`.
.. versionadded:: 5.0
"""
variableFont = self.writerClass.variableFontDescriptorClass(**kwargs)
self.addVariableFont(variableFont)
return variableFont
def addLocationLabel(self, locationLabelDescriptor: LocationLabelDescriptor):
"""Add the given ``locationLabelDescriptor`` to :attr:`locationLabels`.
.. versionadded:: 5.0
"""
self.locationLabels.append(locationLabelDescriptor)
def addLocationLabelDescriptor(self, **kwargs):
"""Instantiate a new :class:`LocationLabelDescriptor` using the given
``kwargs`` and add it to :attr:`locationLabels`.
.. versionadded:: 5.0
"""
locationLabel = self.writerClass.locationLabelDescriptorClass(**kwargs)
self.addLocationLabel(locationLabel)
return locationLabel
def newDefaultLocation(self):
"""Return a dict with the default location in design space coordinates."""
# Without OrderedDict, output XML would be non-deterministic.
# https://github.com/LettError/designSpaceDocument/issues/10
loc = collections.OrderedDict()
for axisDescriptor in self.axes:
loc[axisDescriptor.name] = axisDescriptor.map_forward(
axisDescriptor.default
)
return loc
def labelForUserLocation(self, userLocation: SimpleLocationDict) -> Optional[LocationLabelDescriptor]:
"""Return the :class:`LocationLabel` that matches the given
``userLocation``, or ``None`` if no such label exists.
.. versionadded:: 5.0
"""
return next(
(label for label in self.locationLabels if label.userLocation == userLocation), None
)
def updateFilenameFromPath(self, masters=True, instances=True, force=False):
"""Set a descriptor filename attr from the path and this document path.
If the filename attribute is not None: skip it.
"""
if masters:
for descriptor in self.sources:
if descriptor.filename is not None and not force:
continue
if self.path is not None:
descriptor.filename = self._posixRelativePath(descriptor.path)
if instances:
for descriptor in self.instances:
if descriptor.filename is not None and not force:
continue
if self.path is not None:
descriptor.filename = self._posixRelativePath(descriptor.path)
def newAxisDescriptor(self):
"""Ask the writer class to make us a new axisDescriptor."""
return self.writerClass.getAxisDecriptor()
def newSourceDescriptor(self):
"""Ask the writer class to make us a new sourceDescriptor."""
return self.writerClass.getSourceDescriptor()
def newInstanceDescriptor(self):
"""Ask the writer class to make us a new instanceDescriptor."""
return self.writerClass.getInstanceDescriptor()
def getAxisOrder(self):
"""Return a list of axis names, in the same order as defined in the document."""
names = []
for axisDescriptor in self.axes:
names.append(axisDescriptor.name)
return names
def getAxis(self, name):
"""Return the axis with the given ``name``, or ``None`` if no such axis exists."""
for axisDescriptor in self.axes:
if axisDescriptor.name == name:
return axisDescriptor
return None
def getLocationLabel(self, name: str) -> Optional[LocationLabelDescriptor]:
"""Return the top-level location label with the given ``name``, or
``None`` if no such label exists.
.. versionadded:: 5.0
"""
for label in self.locationLabels:
if label.name == name:
return label
return None
def map_forward(self, userLocation: SimpleLocationDict) -> SimpleLocationDict:
"""Map a user location to a design location.
Assume that missing coordinates are at the default location for that axis.
Note: the output won't be anisotropic, only the xvalue is set.
.. versionadded:: 5.0
"""
return {
axis.name: axis.map_forward(userLocation.get(axis.name, axis.default))
for axis in self.axes
}
def map_backward(self, designLocation: AnisotropicLocationDict) -> SimpleLocationDict:
"""Map a design location to a user location.
Assume that missing coordinates are at the default location for that axis.
When the input has anisotropic locations, only the xvalue is used.
.. versionadded:: 5.0
"""
return {
axis.name: (
axis.map_backward(designLocation[axis.name])
if axis.name in designLocation
else axis.default
)
for axis in self.axes
}
def findDefault(self):
"""Set and return SourceDescriptor at the default location or None.
The default location is the set of all `default` values in user space
of all axes.
This function updates the document's :attr:`default` value.
.. versionchanged:: 5.0
Allow the default source to not specify some of the axis values, and
they are assumed to be the default.
See :meth:`SourceDescriptor.getFullDesignLocation()`
"""
self.default = None
# Convert the default location from user space to design space before comparing
# it against the SourceDescriptor locations (always in design space).
defaultDesignLocation = self.newDefaultLocation()
for sourceDescriptor in self.sources:
if sourceDescriptor.getFullDesignLocation(self) == defaultDesignLocation:
self.default = sourceDescriptor
return sourceDescriptor
return None
def normalizeLocation(self, location):
"""Return a dict with normalized axis values."""
from fontTools.varLib.models import normalizeValue
new = {}
for axis in self.axes:
if axis.name not in location:
# skipping this dimension it seems
continue
value = location[axis.name]
# 'anisotropic' location, take first coord only
if isinstance(value, tuple):
value = value[0]
triple = [
axis.map_forward(v) for v in (axis.minimum, axis.default, axis.maximum)
]
new[axis.name] = normalizeValue(value, triple)
return new
def normalize(self):
"""
Normalise the geometry of this designspace:
- scale all the locations of all masters and instances to the -1 - 0 - 1 value.
- we need the axis data to do the scaling, so we do those last.
"""
# masters
for item in self.sources:
item.location = self.normalizeLocation(item.location)
# instances
for item in self.instances:
# glyph masters for this instance
for _, glyphData in item.glyphs.items():
glyphData['instanceLocation'] = self.normalizeLocation(glyphData['instanceLocation'])
for glyphMaster in glyphData['masters']:
glyphMaster['location'] = self.normalizeLocation(glyphMaster['location'])
item.location = self.normalizeLocation(item.location)
# the axes
for axis in self.axes:
# scale the map first
newMap = []
for inputValue, outputValue in axis.map:
newOutputValue = self.normalizeLocation({axis.name: outputValue}).get(axis.name)
newMap.append((inputValue, newOutputValue))
if newMap:
axis.map = newMap
# finally the axis values
minimum = self.normalizeLocation({axis.name: axis.minimum}).get(axis.name)
maximum = self.normalizeLocation({axis.name: axis.maximum}).get(axis.name)
default = self.normalizeLocation({axis.name: axis.default}).get(axis.name)
# and set them in the axis.minimum
axis.minimum = minimum
axis.maximum = maximum
axis.default = default
# now the rules
for rule in self.rules:
newConditionSets = []
for conditions in rule.conditionSets:
newConditions = []
for cond in conditions:
if cond.get('minimum') is not None:
minimum = self.normalizeLocation({cond['name']: cond['minimum']}).get(cond['name'])
else:
minimum = None
if cond.get('maximum') is not None:
maximum = self.normalizeLocation({cond['name']: cond['maximum']}).get(cond['name'])
else:
maximum = None
newConditions.append(dict(name=cond['name'], minimum=minimum, maximum=maximum))
newConditionSets.append(newConditions)
rule.conditionSets = newConditionSets
def loadSourceFonts(self, opener, **kwargs):
"""Ensure SourceDescriptor.font attributes are loaded, and return list of fonts.
Takes a callable which initializes a new font object (e.g. TTFont, or
defcon.Font, etc.) from the SourceDescriptor.path, and sets the
SourceDescriptor.font attribute.
If the font attribute is already not None, it is not loaded again.
Fonts with the same path are only loaded once and shared among SourceDescriptors.
For example, to load UFO sources using defcon:
designspace = DesignSpaceDocument.fromfile("path/to/my.designspace")
designspace.loadSourceFonts(defcon.Font)
Or to load masters as FontTools binary fonts, including extra options:
designspace.loadSourceFonts(ttLib.TTFont, recalcBBoxes=False)
Args:
opener (Callable): takes one required positional argument, the source.path,
and an optional list of keyword arguments, and returns a new font object
loaded from the path.
**kwargs: extra options passed on to the opener function.
Returns:
List of font objects in the order they appear in the sources list.
"""
# we load fonts with the same source.path only once
loaded = {}
fonts = []
for source in self.sources:
if source.font is not None: # font already loaded
fonts.append(source.font)
continue
if source.path in loaded:
source.font = loaded[source.path]
else:
if source.path is None:
raise DesignSpaceDocumentError(
"Designspace source '%s' has no 'path' attribute"
% (source.name or "<Unknown>")
)
source.font = opener(source.path, **kwargs)
loaded[source.path] = source.font
fonts.append(source.font)
return fonts
@property
def formatTuple(self):
"""Return the formatVersion as a tuple of (major, minor).
.. versionadded:: 5.0
"""
if self.formatVersion is None:
return (5, 0)
numbers = (int(i) for i in self.formatVersion.split("."))
major = next(numbers)
minor = next(numbers, 0)
return (major, minor)
def getVariableFonts(self) -> List[VariableFontDescriptor]:
"""Return all variable fonts defined in this document, or implicit
variable fonts that can be built from the document's continuous axes.
In the case of Designspace documents before version 5, the whole
document was implicitly describing a variable font that covers the
whole space.
In version 5 and above documents, there can be as many variable fonts
as there are locations on discrete axes.
.. seealso:: :func:`splitInterpolable`
.. versionadded:: 5.0
"""
if self.variableFonts:
return self.variableFonts
variableFonts = []
discreteAxes = []
rangeAxisSubsets: List[Union[RangeAxisSubsetDescriptor, ValueAxisSubsetDescriptor]] = []
for axis in self.axes:
if isinstance(axis, DiscreteAxisDescriptor):
discreteAxes.append(axis)
else:
rangeAxisSubsets.append(RangeAxisSubsetDescriptor(name=axis.name))
valueCombinations = itertools.product(*[axis.values for axis in discreteAxes])
for values in valueCombinations:
basename = None
if self.filename is not None:
basename = os.path.splitext(self.filename)[0] + "-VF"
if self.path is not None:
basename = os.path.splitext(os.path.basename(self.path))[0] + "-VF"
if basename is None:
basename = "VF"
axisNames = "".join([f"-{axis.tag}{value}" for axis, value in zip(discreteAxes, values)])
variableFonts.append(VariableFontDescriptor(
name=f"{basename}{axisNames}",
axisSubsets=rangeAxisSubsets + [
ValueAxisSubsetDescriptor(name=axis.name, userValue=value)
for axis, value in zip(discreteAxes, values)
]
))
return variableFonts
def deepcopyExceptFonts(self):
"""Allow deep-copying a DesignSpace document without deep-copying
attached UFO fonts or TTFont objects. The :attr:`font` attribute
is shared by reference between the original and the copy.
.. versionadded:: 5.0
"""
fonts = [source.font for source in self.sources]
try:
for source in self.sources:
source.font = None
res = copy.deepcopy(self)
for source, font in zip(res.sources, fonts):
res.font = font
return res
finally:
for source, font in zip(self.sources, fonts):
source.font = font
| [((8905, 8922), 'fontTools.misc.textTools.tostr', 'tostr', (['familyName'], {}), '(familyName)\n', (8910, 8922), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((20148, 20164), 'fontTools.misc.textTools.tostr', 'tostr', (['styleName'], {}), '(styleName)\n', (20153, 20164), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((20380, 20397), 'fontTools.misc.textTools.tostr', 'tostr', (['familyName'], {}), '(familyName)\n', (20385, 20397), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((20636, 20660), 'fontTools.misc.textTools.tostr', 'tostr', (['styleMapStyleName'], {}), '(styleMapStyleName)\n', (20641, 20660), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((20916, 20941), 'fontTools.misc.textTools.tostr', 'tostr', (['styleMapFamilyName'], {}), '(styleMapFamilyName)\n', (20921, 20941), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((30249, 30299), 'fontTools.varLib.models.piecewiseLinearMap', 'piecewiseLinearMap', (['v', '{k: v for k, v in self.map}'], {}), '(v, {k: v for k, v in self.map})\n', (30267, 30299), False, 'from fontTools.varLib.models import piecewiseLinearMap\n'), ((30589, 30639), 'fontTools.varLib.models.piecewiseLinearMap', 'piecewiseLinearMap', (['v', '{v: k for k, v in self.map}'], {}), '(v, {v: k for k, v in self.map})\n', (30607, 30639), False, 'from fontTools.varLib.models import piecewiseLinearMap\n'), ((43672, 43697), 'fontTools.misc.etree.Element', 'ET.Element', (['"""designspace"""'], {}), "('designspace')\n", (43682, 43697), True, 'from fontTools.misc import etree as ET\n'), ((45756, 45781), 'fontTools.misc.etree.ElementTree', 'ET.ElementTree', (['self.root'], {}), '(self.root)\n', (45770, 45781), True, 'from fontTools.misc import etree as ET\n'), ((47128, 47150), 'fontTools.misc.etree.Element', 'ET.Element', (['"""location"""'], {}), "('location')\n", (47138, 47150), True, 'from fontTools.misc import etree as ET\n'), ((48356, 48374), 'fontTools.misc.etree.Element', 'ET.Element', (['"""rule"""'], {}), "('rule')\n", (48366, 48374), True, 'from fontTools.misc import etree as ET\n'), ((49740, 49758), 'fontTools.misc.etree.Element', 'ET.Element', (['"""axis"""'], {}), "('axis')\n", (49750, 49758), True, 'from fontTools.misc import etree as ET\n'), ((51344, 51363), 'fontTools.misc.etree.Element', 'ET.Element', (['"""label"""'], {}), "('label')\n", (51354, 51363), True, 'from fontTools.misc import etree as ET\n'), ((52606, 52625), 'fontTools.misc.etree.Element', 'ET.Element', (['"""label"""'], {}), "('label')\n", (52616, 52625), True, 'from fontTools.misc import etree as ET\n'), ((53236, 53258), 'fontTools.misc.etree.Element', 'ET.Element', (['"""location"""'], {}), "('location')\n", (53246, 53258), True, 'from fontTools.misc import etree as ET\n'), ((54369, 54391), 'fontTools.misc.etree.Element', 'ET.Element', (['"""instance"""'], {}), "('instance')\n", (54379, 54391), True, 'from fontTools.misc import etree as ET\n'), ((59664, 59684), 'fontTools.misc.etree.Element', 'ET.Element', (['"""source"""'], {}), "('source')\n", (59674, 59684), True, 'from fontTools.misc import etree as ET\n'), ((62917, 62944), 'fontTools.misc.etree.Element', 'ET.Element', (['"""variable-font"""'], {}), "('variable-font')\n", (62927, 62944), True, 'from fontTools.misc import etree as ET\n'), ((64336, 64353), 'fontTools.misc.etree.Element', 'ET.Element', (['"""lib"""'], {}), "('lib')\n", (64346, 64353), True, 'from fontTools.misc import etree as ET\n'), ((64579, 64598), 'fontTools.misc.etree.Element', 'ET.Element', (['"""glyph"""'], {}), "('glyph')\n", (64589, 64598), True, 'from fontTools.misc import etree as ET\n'), ((66764, 66783), 'fontTools.misc.etree.parse', 'ET.parse', (['self.path'], {}), '(self.path)\n', (66772, 66783), True, 'from fontTools.misc import etree as ET\n'), ((90558, 90590), 'fontTools.misc.plistlib.fromtree', 'plistlib.fromtree', (['libElement[0]'], {}), '(libElement[0])\n', (90575, 90590), False, 'from fontTools.misc import plistlib\n'), ((99936, 99958), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (99952, 99958), False, 'import os\n'), ((100318, 100340), 'os.path.basename', 'os.path.basename', (['path'], {}), '(path)\n', (100334, 100340), False, 'import os\n'), ((105967, 105992), 'collections.OrderedDict', 'collections.OrderedDict', ([], {}), '()\n', (105990, 105992), False, 'import collections\n'), ((117573, 117631), 'itertools.product', 'itertools.product', (['*[axis.values for axis in discreteAxes]'], {}), '(*[axis.values for axis in discreteAxes])\n', (117590, 117631), False, 'import itertools\n'), ((43980, 43998), 'fontTools.misc.etree.Element', 'ET.Element', (['"""axes"""'], {}), "('axes')\n", (43990, 43998), True, 'from fontTools.misc import etree as ET\n'), ((44372, 44392), 'fontTools.misc.etree.Element', 'ET.Element', (['"""labels"""'], {}), "('labels')\n", (44382, 44392), True, 'from fontTools.misc import etree as ET\n'), ((45214, 45242), 'fontTools.misc.etree.Element', 'ET.Element', (['"""variable-fonts"""'], {}), "('variable-fonts')\n", (45224, 45242), True, 'from fontTools.misc import etree as ET\n'), ((47597, 47620), 'fontTools.misc.etree.Element', 'ET.Element', (['"""dimension"""'], {}), "('dimension')\n", (47607, 47620), True, 'from fontTools.misc import etree as ET\n'), ((48558, 48584), 'fontTools.misc.etree.Element', 'ET.Element', (['"""conditionset"""'], {}), "('conditionset')\n", (48568, 48584), True, 'from fontTools.misc import etree as ET\n'), ((49434, 49451), 'fontTools.misc.etree.Element', 'ET.Element', (['"""sub"""'], {}), "('sub')\n", (49444, 49451), True, 'from fontTools.misc import etree as ET\n'), ((50344, 50364), 'fontTools.misc.etree.Element', 'ET.Element', (['"""labels"""'], {}), "('labels')\n", (50354, 50364), True, 'from fontTools.misc import etree as ET\n'), ((52303, 52326), 'fontTools.misc.etree.Element', 'ET.Element', (['"""labelname"""'], {}), "('labelname')\n", (52313, 52326), True, 'from fontTools.misc import etree as ET\n'), ((61011, 61028), 'fontTools.misc.etree.Element', 'ET.Element', (['"""lib"""'], {}), "('lib')\n", (61021, 61028), True, 'from fontTools.misc import etree as ET\n'), ((61182, 61202), 'fontTools.misc.etree.Element', 'ET.Element', (['"""groups"""'], {}), "('groups')\n", (61192, 61202), True, 'from fontTools.misc import etree as ET\n'), ((61366, 61388), 'fontTools.misc.etree.Element', 'ET.Element', (['"""features"""'], {}), "('features')\n", (61376, 61388), True, 'from fontTools.misc import etree as ET\n'), ((61573, 61591), 'fontTools.misc.etree.Element', 'ET.Element', (['"""info"""'], {}), "('info')\n", (61583, 61591), True, 'from fontTools.misc import etree as ET\n'), ((61878, 61899), 'fontTools.misc.etree.Element', 'ET.Element', (['"""kerning"""'], {}), "('kerning')\n", (61888, 61899), True, 'from fontTools.misc import etree as ET\n'), ((63135, 63161), 'fontTools.misc.etree.Element', 'ET.Element', (['"""axis-subsets"""'], {}), "('axis-subsets')\n", (63145, 63161), True, 'from fontTools.misc import etree as ET\n'), ((64380, 64428), 'fontTools.misc.plistlib.totree', 'plistlib.totree', (['data'], {'indent_level': 'indent_level'}), '(data, indent_level=indent_level)\n', (64395, 64428), False, 'from fontTools.misc import plistlib\n'), ((65180, 65198), 'fontTools.misc.etree.Element', 'ET.Element', (['"""note"""'], {}), "('note')\n", (65190, 65198), True, 'from fontTools.misc import etree as ET\n'), ((65365, 65386), 'fontTools.misc.etree.Element', 'ET.Element', (['"""masters"""'], {}), "('masters')\n", (65375, 65386), True, 'from fontTools.misc import etree as ET\n'), ((67158, 67191), 'fontTools.misc.textTools.tobytes', 'tobytes', (['string'], {'encoding': '"""utf-8"""'}), "(string, encoding='utf-8')\n", (67165, 67191), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((93654, 93686), 'fontTools.misc.plistlib.fromtree', 'plistlib.fromtree', (['libElement[0]'], {}), '(libElement[0])\n', (93671, 93686), False, 'from fontTools.misc import plistlib\n'), ((99210, 99220), 'io.StringIO', 'StringIO', ([], {}), '()\n', (99218, 99220), False, 'from io import BytesIO, StringIO\n'), ((100529, 100555), 'os.path.dirname', 'os.path.dirname', (['self.path'], {}), '(self.path)\n', (100544, 100555), False, 'import os\n'), ((111470, 111499), 'fontTools.varLib.models.normalizeValue', 'normalizeValue', (['value', 'triple'], {}), '(value, triple)\n', (111484, 111499), False, 'from fontTools.varLib.models import normalizeValue\n'), ((118916, 118935), 'copy.deepcopy', 'copy.deepcopy', (['self'], {}), '(self)\n', (118929, 118935), False, 'import copy\n'), ((44816, 44847), 'fontTools.misc.etree.Element', 'ET.Element', (['"""rules"""', 'attributes'], {}), "('rules', attributes)\n", (44826, 44847), True, 'from fontTools.misc import etree as ET\n'), ((45010, 45031), 'fontTools.misc.etree.Element', 'ET.Element', (['"""sources"""'], {}), "('sources')\n", (45020, 45031), True, 'from fontTools.misc import etree as ET\n'), ((45507, 45530), 'fontTools.misc.etree.Element', 'ET.Element', (['"""instances"""'], {}), "('instances')\n", (45517, 45530), True, 'from fontTools.misc import etree as ET\n'), ((48832, 48855), 'fontTools.misc.etree.Element', 'ET.Element', (['"""condition"""'], {}), "('condition')\n", (48842, 48855), True, 'from fontTools.misc import etree as ET\n'), ((50042, 50059), 'fontTools.misc.etree.Element', 'ET.Element', (['"""map"""'], {}), "('map')\n", (50052, 50059), True, 'from fontTools.misc import etree as ET\n'), ((53409, 53432), 'fontTools.misc.etree.Element', 'ET.Element', (['"""dimension"""'], {}), "('dimension')\n", (53419, 53432), True, 'from fontTools.misc import etree as ET\n'), ((55253, 55276), 'fontTools.misc.etree.Element', 'ET.Element', (['"""stylename"""'], {}), "('stylename')\n", (55263, 55276), True, 'from fontTools.misc import etree as ET\n'), ((55837, 55861), 'fontTools.misc.etree.Element', 'ET.Element', (['"""familyname"""'], {}), "('familyname')\n", (55847, 55861), True, 'from fontTools.misc import etree as ET\n'), ((56404, 56435), 'fontTools.misc.etree.Element', 'ET.Element', (['"""stylemapstylename"""'], {}), "('stylemapstylename')\n", (56414, 56435), True, 'from fontTools.misc import etree as ET\n'), ((57009, 57041), 'fontTools.misc.etree.Element', 'ET.Element', (['"""stylemapfamilyname"""'], {}), "('stylemapfamilyname')\n", (57019, 57041), True, 'from fontTools.misc import etree as ET\n'), ((59257, 59278), 'fontTools.misc.etree.Element', 'ET.Element', (['"""kerning"""'], {}), "('kerning')\n", (59267, 59278), True, 'from fontTools.misc import etree as ET\n'), ((59400, 59418), 'fontTools.misc.etree.Element', 'ET.Element', (['"""info"""'], {}), "('info')\n", (59410, 59418), True, 'from fontTools.misc import etree as ET\n'), ((60713, 60737), 'fontTools.misc.etree.Element', 'ET.Element', (['"""familyname"""'], {}), "('familyname')\n", (60723, 60737), True, 'from fontTools.misc import etree as ET\n'), ((62123, 62142), 'fontTools.misc.etree.Element', 'ET.Element', (['"""glyph"""'], {}), "('glyph')\n", (62133, 62142), True, 'from fontTools.misc import etree as ET\n'), ((63236, 63261), 'fontTools.misc.etree.Element', 'ET.Element', (['"""axis-subset"""'], {}), "('axis-subset')\n", (63246, 63261), True, 'from fontTools.misc import etree as ET\n'), ((65461, 65481), 'fontTools.misc.etree.Element', 'ET.Element', (['"""master"""'], {}), "('master')\n", (65471, 65481), True, 'from fontTools.misc import etree as ET\n'), ((77735, 77767), 'fontTools.misc.plistlib.fromtree', 'plistlib.fromtree', (['libElement[0]'], {}), '(libElement[0])\n', (77752, 77767), False, 'from fontTools.misc import plistlib\n'), ((86875, 86916), 'os.path.dirname', 'os.path.dirname', (['self.documentObject.path'], {}), '(self.documentObject.path)\n', (86890, 86916), False, 'import os\n'), ((99327, 99336), 'io.BytesIO', 'BytesIO', ([], {}), '()\n', (99334, 99336), False, 'from io import BytesIO, StringIO\n'), ((53977, 54000), 'fontTools.misc.etree.Element', 'ET.Element', (['"""dimension"""'], {}), "('dimension')\n", (53987, 54000), True, 'from fontTools.misc import etree as ET\n'), ((58794, 58814), 'fontTools.misc.etree.Element', 'ET.Element', (['"""glyphs"""'], {}), "('glyphs')\n", (58804, 58814), True, 'from fontTools.misc import etree as ET\n'), ((72323, 72351), 'fontTools.misc.textTools.tostr', 'tostr', (['labelNameElement.text'], {}), '(labelNameElement.text)\n', (72328, 72351), False, 'from fontTools.misc.textTools import tobytes, tostr\n'), ((80546, 80572), 'os.path.dirname', 'os.path.dirname', (['self.path'], {}), '(self.path)\n', (80561, 80572), False, 'import os\n'), ((117770, 117801), 'os.path.splitext', 'os.path.splitext', (['self.filename'], {}), '(self.filename)\n', (117786, 117801), False, 'import os\n'), ((117895, 117922), 'os.path.basename', 'os.path.basename', (['self.path'], {}), '(self.path)\n', (117911, 117922), False, 'import os\n')] |
dme65/Ax | ax/models/torch/posterior_mean.py | c460eab90d464df87e6478b5765fd02fb5126adb | #!/usr/bin/env python3
# Copyright (c) Facebook, Inc. and its affiliates.
#
# This source code is licensed under the MIT license found in the
# LICENSE file in the root directory of this source tree.
from typing import Any, Optional, Tuple
import torch
from botorch.acquisition.acquisition import AcquisitionFunction
from botorch.acquisition.monte_carlo import qSimpleRegret
from botorch.acquisition.objective import ConstrainedMCObjective, GenericMCObjective
from botorch.acquisition.utils import get_infeasible_cost
from botorch.models.model import Model
from botorch.utils import (
get_objective_weights_transform,
get_outcome_constraint_transforms,
)
from botorch.utils.multi_objective.scalarization import get_chebyshev_scalarization
from botorch.utils.transforms import squeeze_last_dim
from torch import Tensor
def get_PosteriorMean(
model: Model,
objective_weights: Tensor,
outcome_constraints: Optional[Tuple[Tensor, Tensor]] = None,
X_observed: Optional[Tensor] = None,
X_pending: Optional[Tensor] = None,
**kwargs: Any,
) -> AcquisitionFunction:
r"""Instantiates a PosteriorMean acquisition function.
Note: If no OutcomeConstraints given, return an analytic acquisition
function. This requires {optimizer_kwargs: {joint_optimization: True}} or an
optimizer that does not assume pending point support.
Args:
objective_weights: The objective is to maximize a weighted sum of
the columns of f(x). These are the weights.
outcome_constraints: A tuple of (A, b). For k outcome constraints
and m outputs at f(x), A is (k x m) and b is (k x 1) such that
A f(x) <= b. (Not used by single task models)
X_observed: A tensor containing points observed for all objective
outcomes and outcomes that appear in the outcome constraints (if
there are any).
X_pending: A tensor containing points whose evaluation is pending (i.e.
that have been submitted for evaluation) present for all objective
outcomes and outcomes that appear in the outcome constraints (if
there are any).
Returns:
PosteriorMean: The instantiated acquisition function.
"""
if X_observed is None:
raise ValueError("There are no feasible observed points.")
# construct Objective module
if kwargs.get("chebyshev_scalarization", False):
obj_tf = get_chebyshev_scalarization(
weights=objective_weights,
Y=squeeze_last_dim(torch.stack(kwargs.get("Ys")).transpose(0, 1)),
)
else:
obj_tf = get_objective_weights_transform(objective_weights)
if outcome_constraints is None:
objective = GenericMCObjective(objective=obj_tf)
else:
con_tfs = get_outcome_constraint_transforms(outcome_constraints)
inf_cost = get_infeasible_cost(X=X_observed, model=model, objective=obj_tf)
objective = ConstrainedMCObjective(
objective=obj_tf, constraints=con_tfs or [], infeasible_cost=inf_cost
)
# Use qSimpleRegret, not analytic posterior, to handle arbitrary objective fns.
acq_func = qSimpleRegret(model, objective=objective)
return acq_func
| [((3168, 3209), 'botorch.acquisition.monte_carlo.qSimpleRegret', 'qSimpleRegret', (['model'], {'objective': 'objective'}), '(model, objective=objective)\n', (3181, 3209), False, 'from botorch.acquisition.monte_carlo import qSimpleRegret\n'), ((2622, 2672), 'botorch.utils.get_objective_weights_transform', 'get_objective_weights_transform', (['objective_weights'], {}), '(objective_weights)\n', (2653, 2672), False, 'from botorch.utils import get_objective_weights_transform, get_outcome_constraint_transforms\n'), ((2729, 2765), 'botorch.acquisition.objective.GenericMCObjective', 'GenericMCObjective', ([], {'objective': 'obj_tf'}), '(objective=obj_tf)\n', (2747, 2765), False, 'from botorch.acquisition.objective import ConstrainedMCObjective, GenericMCObjective\n'), ((2794, 2848), 'botorch.utils.get_outcome_constraint_transforms', 'get_outcome_constraint_transforms', (['outcome_constraints'], {}), '(outcome_constraints)\n', (2827, 2848), False, 'from botorch.utils import get_objective_weights_transform, get_outcome_constraint_transforms\n'), ((2868, 2932), 'botorch.acquisition.utils.get_infeasible_cost', 'get_infeasible_cost', ([], {'X': 'X_observed', 'model': 'model', 'objective': 'obj_tf'}), '(X=X_observed, model=model, objective=obj_tf)\n', (2887, 2932), False, 'from botorch.acquisition.utils import get_infeasible_cost\n'), ((2953, 3050), 'botorch.acquisition.objective.ConstrainedMCObjective', 'ConstrainedMCObjective', ([], {'objective': 'obj_tf', 'constraints': '(con_tfs or [])', 'infeasible_cost': 'inf_cost'}), '(objective=obj_tf, constraints=con_tfs or [],\n infeasible_cost=inf_cost)\n', (2975, 3050), False, 'from botorch.acquisition.objective import ConstrainedMCObjective, GenericMCObjective\n')] |
fanyu2021/fyAutowareAuto | src/drivers/velodyne_nodes/test/velodyne_node.test.py | 073661c0634de671ff01bda8a316a5ce10c96ca9 | # Copyright 2018 the Autoware Foundation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
# Co-developed by Tier IV, Inc. and Apex.AI, Inc.
import ament_index_python
import launch
import launch.actions
import launch_ros.actions
import lidar_integration
def generate_test_description(ready_fn):
PORT = lidar_integration.get_open_port()
# The node under test and the checker node that will pass/fail our tests:
test_topic = "veloyne_cloud_node_test_topic"
velodyne_cloud_node = launch_ros.actions.Node(
package="velodyne_nodes",
node_executable="velodyne_cloud_node_exe",
node_name="vlp16_driver_node",
node_namespace="lidar_front",
parameters=[
"{}/param/vlp16_test.param.yaml".format(
ament_index_python.get_package_share_directory("velodyne_nodes")
),
{
"port": PORT,
"expected_num_subscribers": 1,
}
],
remappings=[("points_raw", test_topic)],
arguments=["--model", "vlp16"]
)
pcl_checker = lidar_integration.make_pcl_checker(
topic=test_topic,
size=55000,
period=100,
period_tolerance=2.2,
size_tolerance=1.4,
)
return lidar_integration.get_lidar_launch_description(
test_nodes=[velodyne_cloud_node],
checkers=[pcl_checker],
other_actions=[
launch.actions.OpaqueFunction(function=lambda context: ready_fn())
],
port=PORT
)
# Test cases are created automatically by the lidar_integration package. We just need to
# instantiate them
active = lidar_integration.make_active_tests()
after_shutdown = lidar_integration.make_post_shutdown_tests()
| [((2139, 2176), 'lidar_integration.make_active_tests', 'lidar_integration.make_active_tests', ([], {}), '()\n', (2174, 2176), False, 'import lidar_integration\n'), ((2195, 2239), 'lidar_integration.make_post_shutdown_tests', 'lidar_integration.make_post_shutdown_tests', ([], {}), '()\n', (2237, 2239), False, 'import lidar_integration\n'), ((807, 840), 'lidar_integration.get_open_port', 'lidar_integration.get_open_port', ([], {}), '()\n', (838, 840), False, 'import lidar_integration\n'), ((1581, 1703), 'lidar_integration.make_pcl_checker', 'lidar_integration.make_pcl_checker', ([], {'topic': 'test_topic', 'size': '(55000)', 'period': '(100)', 'period_tolerance': '(2.2)', 'size_tolerance': '(1.4)'}), '(topic=test_topic, size=55000, period=100,\n period_tolerance=2.2, size_tolerance=1.4)\n', (1615, 1703), False, 'import lidar_integration\n'), ((1272, 1336), 'ament_index_python.get_package_share_directory', 'ament_index_python.get_package_share_directory', (['"""velodyne_nodes"""'], {}), "('velodyne_nodes')\n", (1318, 1336), False, 'import ament_index_python\n')] |
manhcuogntin4/Color-transfer | example.py | 14b139efa86bb49a07a118c905d9d82cd7ad10d3 | # USAGE
# python example.py --source images/ocean_sunset.jpg --target images/ocean_day.jpg
# import the necessary packages
from color_transfer import color_transfer
import numpy as np
import argparse
import cv2
def show_image(title, image, width = 300):
# resize the image to have a constant width, just to
# make displaying the images take up less screen real
# estate
r = width / float(image.shape[1])
dim = (width, int(image.shape[0] * r))
resized = cv2.resize(image, dim, interpolation = cv2.INTER_AREA)
# show the resized image
cv2.imshow(title, resized)
# construct the argument parser and parse the arguments
ap = argparse.ArgumentParser()
ap.add_argument("-s", "--source", required = True,
help = "Path to the source image")
ap.add_argument("-t", "--target", required = True,
help = "Path to the target image")
ap.add_argument("-o", "--output", help = "Path to the output image (optional)")
args = vars(ap.parse_args())
# load the images
source = cv2.imread(args["source"])
target = cv2.imread(args["target"])
# transfer the color distribution from the source image
# to the target image
transfer = color_transfer(source, target)
# check to see if the output image should be saved
if args["output"] is not None:
cv2.imwrite(args["output"], transfer)
# show the images and wait for a key press
show_image("Source", source)
show_image("Target", target)
show_image("Transfer", transfer)
cv2.waitKey(0) | [((633, 658), 'argparse.ArgumentParser', 'argparse.ArgumentParser', ([], {}), '()\n', (656, 658), False, 'import argparse\n'), ((970, 996), 'cv2.imread', 'cv2.imread', (["args['source']"], {}), "(args['source'])\n", (980, 996), False, 'import cv2\n'), ((1006, 1032), 'cv2.imread', 'cv2.imread', (["args['target']"], {}), "(args['target'])\n", (1016, 1032), False, 'import cv2\n'), ((1123, 1153), 'color_transfer.color_transfer', 'color_transfer', (['source', 'target'], {}), '(source, target)\n', (1137, 1153), False, 'from color_transfer import color_transfer\n'), ((1411, 1425), 'cv2.waitKey', 'cv2.waitKey', (['(0)'], {}), '(0)\n', (1422, 1425), False, 'import cv2\n'), ((461, 513), 'cv2.resize', 'cv2.resize', (['image', 'dim'], {'interpolation': 'cv2.INTER_AREA'}), '(image, dim, interpolation=cv2.INTER_AREA)\n', (471, 513), False, 'import cv2\n'), ((544, 570), 'cv2.imshow', 'cv2.imshow', (['title', 'resized'], {}), '(title, resized)\n', (554, 570), False, 'import cv2\n'), ((1238, 1275), 'cv2.imwrite', 'cv2.imwrite', (["args['output']", 'transfer'], {}), "(args['output'], transfer)\n", (1249, 1275), False, 'import cv2\n')] |
heethesh/Argoverse-HDMap-Update | scripts/registration_pipeline.py | 61e9bf965a1fa7a0c74a2671457a2778d849bfe5 | import copy
import numpy as np
import open3d as o3d
from tqdm import tqdm
from scipy import stats
import utils_o3d as utils
def remove_ground_plane(pcd, z_thresh=-2.7):
cropped = copy.deepcopy(pcd)
cropped_points = np.array(cropped.points)
cropped_points = cropped_points[cropped_points[:, -1] > z_thresh]
pcd_final = o3d.geometry.PointCloud()
pcd_final.points = o3d.utility.Vector3dVector(cropped_points)
return pcd_final
def remove_y_plane(pcd, y_thresh=5):
cropped = copy.deepcopy(pcd)
cropped_points = np.array(cropped.points)
cropped_points = cropped_points[cropped_points[:, 0] < y_thresh]
cropped_points[:, -1] = -cropped_points[:, -1]
pcd_final = o3d.geometry.PointCloud()
pcd_final.points = o3d.utility.Vector3dVector(cropped_points)
return pcd_final
def compute_features(pcd, voxel_size, normals_nn=100, features_nn=120, downsample=True):
normals_radius = voxel_size * 2
features_radius = voxel_size * 4
# Downsample the point cloud using Voxel grids
if downsample:
print(':: Input size:', np.array(pcd.points).shape)
pcd_down = utils.downsample_point_cloud(pcd, voxel_size)
print(':: Downsample with a voxel size %.3f' % voxel_size)
print(':: Downsample size', np.array(pcd_down.points).shape)
else: pcd_down = copy.deepcopy(pcd)
# Estimate normals
print(':: Estimate normal with search radius %.3f' % normals_radius)
pcd_down.estimate_normals(
o3d.geometry.KDTreeSearchParamHybrid(radius=normals_radius, max_nn=normals_nn))
# Compute FPFH features
print(':: Compute FPFH feature with search radius %.3f' % features_radius)
features = o3d.registration.compute_fpfh_feature(pcd_down,
o3d.geometry.KDTreeSearchParamHybrid(radius=features_radius, max_nn=features_nn))
return pcd_down, features
def match_features(pcd0, pcd1, feature0, feature1, thresh=None, display=False):
pcd0, pcd1 = copy.deepcopy(pcd0), copy.deepcopy(pcd1)
print(':: Input size 0:', np.array(pcd0.points).shape)
print(':: Input size 1:', np.array(pcd1.points).shape)
print(':: Features size 0:', np.array(feature0.data).shape)
print(':: Features size 1:', np.array(feature1.data).shape)
utils.paint_uniform_color(pcd0, color=[1, 0.706, 0])
utils.paint_uniform_color(pcd1, color=[0, 0.651, 0.929])
scores, indices = [], []
fpfh_tree = o3d.geometry.KDTreeFlann(feature1)
for i in tqdm(range(len(pcd0.points)), desc=':: Feature Matching'):
[_, idx, _] = fpfh_tree.search_knn_vector_xd(feature0.data[:, i], 1)
scores.append(np.linalg.norm(pcd0.points[i] - pcd1.points[idx[0]]))
indices.append([i, idx[0]])
scores, indices = np.array(scores), np.array(indices)
median = np.median(scores)
if thresh is None: thresh = median
inliers_idx = np.where(scores <= thresh)[0]
pcd0_idx = indices[inliers_idx, 0]
pcd1_idx = indices[inliers_idx, 1]
print(':: Score stats: Min=%0.3f, Max=%0.3f, Median=%0.3f, N<Thresh=%d' % (
np.min(scores), np.max(scores), median, len(inliers_idx)))
if display:
for i, j in zip(pcd0_idx, pcd1_idx):
pcd0.colors[i] = [1, 0, 0]
pcd1.colors[j] = [1, 0, 0]
utils.display([pcd0, pcd1])
return pcd0_idx, pcd1_idx
def estimate_scale(pcd0, pcd1, pcd0_idx, pcd1_idx, top_percent=1.0,
ransac_iters=5000, sample_size=50):
points0 = np.asarray(pcd0.points)[pcd0_idx]
points1 = np.asarray(pcd1.points)[pcd1_idx]
mean0 = np.mean(points0, axis=0)
mean1 = np.mean(points1, axis=0)
top_count = int(top_percent * len(pcd0_idx))
assert top_count > sample_size, 'top_count <= sample_size'
scales = []
for i in tqdm(range(ransac_iters), desc=':: Scale Estimation RANSAC'):
args = np.random.choice(top_count, sample_size, replace=False)
points0_r = points0[args]
points1_r = points1[args]
score0 = np.sum((points0_r - mean0) ** 2, axis=1)
score1 = np.sum((points1_r - mean1) ** 2, axis=1)
scale = np.sqrt(np.mean(score1) / np.mean(score0))
scales.append(scale)
best_scale = stats.mode(scales)[0][0]
print(':: Estimated scale:', best_scale)
return best_scale
def global_registration(source_down, target_down, source_fpfh, target_fpfh, voxel_size,
distance_threshold=1.0, num_iters=4000000, num_val_iters=500):
print(':: Distance threshold %.3f' % distance_threshold)
result = o3d.registration.registration_ransac_based_on_feature_matching(
source_down, target_down, source_fpfh, target_fpfh, distance_threshold,
o3d.registration.TransformationEstimationPointToPoint(False), 4, [
o3d.registration.CorrespondenceCheckerBasedOnEdgeLength(0.9),
o3d.registration.CorrespondenceCheckerBasedOnDistance(
distance_threshold)
], o3d.registration.RANSACConvergenceCriteria(num_iters, num_val_iters))
return result
def fast_global_registration(source_down, target_down,
source_fpfh, target_fpfh, voxel_size):
distance_threshold = 1.0
result = o3d.registration.registration_fast_based_on_feature_matching(
source_down, target_down, source_fpfh, target_fpfh,
o3d.registration.FastGlobalRegistrationOption(
maximum_correspondence_distance=distance_threshold))
return result
def refine_registration(source, target, source_fpfh, target_fpfh, initial_result, voxel_size):
distance_threshold = 0.1
print(':: Distance threshold %.3f' % distance_threshold)
result = o3d.registration.registration_icp(
source, target, distance_threshold, initial_result.transformation,
o3d.registration.TransformationEstimationPointToPlane())
return result
def registration(pcd0, pcd1, feature1, feature2, voxel_size, method='global'):
if method == 'global':
print('\nRANSAC global registration on scaled point clouds...')
initial_result = global_registration(pcd0, pcd1, feature1, feature2, voxel_size)
elif method == 'fast_global':
print('\nFast global registration on scaled point clouds...')
initial_result = fast_global_registration(pcd0, pcd1, feature1, feature2, voxel_size)
else:
print(':: Registration method not supported')
return
print(':: Initial registration results:')
print(initial_result)
print('\nDisplaying initial result...')
draw_registration_result(pcd0, pcd1, initial_result.transformation)
print('\nRefine registration...')
result = refine_registration(pcd0, pcd1, feature1, feature2, initial_result, voxel_size)
print(':: Final registration results:')
print(result)
return result
def draw_registration_result(source, target, transformation):
source_temp = copy.deepcopy(source)
target_temp = copy.deepcopy(target)
source_temp.paint_uniform_color([1, 0.706, 0])
target_temp.paint_uniform_color([0, 0.651, 0.929])
source_temp.transform(transformation)
o3d.visualization.draw_geometries([source_temp, target_temp])
def run():
voxel_size = 0.2
dso_scale = 0.03
pcd_lidar = o3d.io.read_point_cloud('../maps/scans/scan_050.pcd')
pcd_lidar = remove_ground_plane(pcd_lidar)
pcd_dso = o3d.io.read_point_cloud('../maps/dso_map_cleaned.pcd')
pcd_dso = remove_ground_plane(pcd_dso, z_thresh=4.5)
pcd_dso = remove_y_plane(pcd_dso, y_thresh=0.2)
# pcd_dso = utils.scale_point_cloud(pcd_dso, dso_scale).rotate([0.5, 0.5, 0.5]).translate([10, 20, 30])
# Ground plane removal results
# utils.display(pcds=[pcd_lidar, pcd_dso], colors=[[1, 0.706, 0], [0, 0.651, 0.929]])
# utils.display(pcds=[pcd_dso], colors=[[0, 0.651, 0.929]])
# return
print('\nComputing FPFH features for lidar point cloud...')
pcd_lidar_down, features_lidar = compute_features(pcd_lidar, voxel_size=voxel_size)
print('\nComputing FPFH features for DSO point cloud...')
pcd_dso_down, features_dso = compute_features(pcd_dso, voxel_size=voxel_size * (dso_scale if dso_scale < 1 else 1))
print('\nMatching FPFH features...')
pcd_lidar_idx, pcd_dso_idx = match_features(pcd_lidar_down, pcd_dso_down,
features_lidar, features_dso, thresh=None)
print('\nEstimating scale using matches...')
scale = estimate_scale(pcd_lidar_down, pcd_dso_down, pcd_lidar_idx, pcd_dso_idx)
scale = 0.06
print('\nCorrecting scale...')
pcd_dso_scaled = utils.scale_point_cloud(pcd_dso, 1.0 / scale)
utils.display(pcds=[pcd_lidar, pcd_dso_scaled], colors=[[1, 0.706, 0], [0, 0.651, 0.929]])
# return
# Registration
pcd_dso_scaled_down, features_dso_scaled = compute_features(
pcd_dso_scaled, voxel_size=voxel_size)
result = registration(pcd_lidar_down, pcd_dso_scaled_down, features_lidar,
features_dso_scaled, voxel_size, method='global')
print('\nDisplaying result...')
draw_registration_result(pcd_lidar, pcd_dso_scaled, result.transformation)
if __name__ == '__main__':
run()
| [((187, 205), 'copy.deepcopy', 'copy.deepcopy', (['pcd'], {}), '(pcd)\n', (200, 205), False, 'import copy\n'), ((227, 251), 'numpy.array', 'np.array', (['cropped.points'], {}), '(cropped.points)\n', (235, 251), True, 'import numpy as np\n'), ((338, 363), 'open3d.geometry.PointCloud', 'o3d.geometry.PointCloud', ([], {}), '()\n', (361, 363), True, 'import open3d as o3d\n'), ((387, 429), 'open3d.utility.Vector3dVector', 'o3d.utility.Vector3dVector', (['cropped_points'], {}), '(cropped_points)\n', (413, 429), True, 'import open3d as o3d\n'), ((504, 522), 'copy.deepcopy', 'copy.deepcopy', (['pcd'], {}), '(pcd)\n', (517, 522), False, 'import copy\n'), ((544, 568), 'numpy.array', 'np.array', (['cropped.points'], {}), '(cropped.points)\n', (552, 568), True, 'import numpy as np\n'), ((705, 730), 'open3d.geometry.PointCloud', 'o3d.geometry.PointCloud', ([], {}), '()\n', (728, 730), True, 'import open3d as o3d\n'), ((754, 796), 'open3d.utility.Vector3dVector', 'o3d.utility.Vector3dVector', (['cropped_points'], {}), '(cropped_points)\n', (780, 796), True, 'import open3d as o3d\n'), ((2258, 2310), 'utils_o3d.paint_uniform_color', 'utils.paint_uniform_color', (['pcd0'], {'color': '[1, 0.706, 0]'}), '(pcd0, color=[1, 0.706, 0])\n', (2283, 2310), True, 'import utils_o3d as utils\n'), ((2315, 2371), 'utils_o3d.paint_uniform_color', 'utils.paint_uniform_color', (['pcd1'], {'color': '[0, 0.651, 0.929]'}), '(pcd1, color=[0, 0.651, 0.929])\n', (2340, 2371), True, 'import utils_o3d as utils\n'), ((2418, 2452), 'open3d.geometry.KDTreeFlann', 'o3d.geometry.KDTreeFlann', (['feature1'], {}), '(feature1)\n', (2442, 2452), True, 'import open3d as o3d\n'), ((2786, 2803), 'numpy.median', 'np.median', (['scores'], {}), '(scores)\n', (2795, 2803), True, 'import numpy as np\n'), ((3542, 3566), 'numpy.mean', 'np.mean', (['points0'], {'axis': '(0)'}), '(points0, axis=0)\n', (3549, 3566), True, 'import numpy as np\n'), ((3579, 3603), 'numpy.mean', 'np.mean', (['points1'], {'axis': '(0)'}), '(points1, axis=0)\n', (3586, 3603), True, 'import numpy as np\n'), ((6823, 6844), 'copy.deepcopy', 'copy.deepcopy', (['source'], {}), '(source)\n', (6836, 6844), False, 'import copy\n'), ((6863, 6884), 'copy.deepcopy', 'copy.deepcopy', (['target'], {}), '(target)\n', (6876, 6884), False, 'import copy\n'), ((7037, 7098), 'open3d.visualization.draw_geometries', 'o3d.visualization.draw_geometries', (['[source_temp, target_temp]'], {}), '([source_temp, target_temp])\n', (7070, 7098), True, 'import open3d as o3d\n'), ((7171, 7224), 'open3d.io.read_point_cloud', 'o3d.io.read_point_cloud', (['"""../maps/scans/scan_050.pcd"""'], {}), "('../maps/scans/scan_050.pcd')\n", (7194, 7224), True, 'import open3d as o3d\n'), ((7287, 7341), 'open3d.io.read_point_cloud', 'o3d.io.read_point_cloud', (['"""../maps/dso_map_cleaned.pcd"""'], {}), "('../maps/dso_map_cleaned.pcd')\n", (7310, 7341), True, 'import open3d as o3d\n'), ((8478, 8523), 'utils_o3d.scale_point_cloud', 'utils.scale_point_cloud', (['pcd_dso', '(1.0 / scale)'], {}), '(pcd_dso, 1.0 / scale)\n', (8501, 8523), True, 'import utils_o3d as utils\n'), ((8528, 8623), 'utils_o3d.display', 'utils.display', ([], {'pcds': '[pcd_lidar, pcd_dso_scaled]', 'colors': '[[1, 0.706, 0], [0, 0.651, 0.929]]'}), '(pcds=[pcd_lidar, pcd_dso_scaled], colors=[[1, 0.706, 0], [0, \n 0.651, 0.929]])\n', (8541, 8623), True, 'import utils_o3d as utils\n'), ((1134, 1179), 'utils_o3d.downsample_point_cloud', 'utils.downsample_point_cloud', (['pcd', 'voxel_size'], {}), '(pcd, voxel_size)\n', (1162, 1179), True, 'import utils_o3d as utils\n'), ((1337, 1355), 'copy.deepcopy', 'copy.deepcopy', (['pcd'], {}), '(pcd)\n', (1350, 1355), False, 'import copy\n'), ((1494, 1572), 'open3d.geometry.KDTreeSearchParamHybrid', 'o3d.geometry.KDTreeSearchParamHybrid', ([], {'radius': 'normals_radius', 'max_nn': 'normals_nn'}), '(radius=normals_radius, max_nn=normals_nn)\n', (1530, 1572), True, 'import open3d as o3d\n'), ((1754, 1839), 'open3d.geometry.KDTreeSearchParamHybrid', 'o3d.geometry.KDTreeSearchParamHybrid', ([], {'radius': 'features_radius', 'max_nn': 'features_nn'}), '(radius=features_radius, max_nn=features_nn\n )\n', (1790, 1839), True, 'import open3d as o3d\n'), ((1966, 1985), 'copy.deepcopy', 'copy.deepcopy', (['pcd0'], {}), '(pcd0)\n', (1979, 1985), False, 'import copy\n'), ((1987, 2006), 'copy.deepcopy', 'copy.deepcopy', (['pcd1'], {}), '(pcd1)\n', (2000, 2006), False, 'import copy\n'), ((2737, 2753), 'numpy.array', 'np.array', (['scores'], {}), '(scores)\n', (2745, 2753), True, 'import numpy as np\n'), ((2755, 2772), 'numpy.array', 'np.array', (['indices'], {}), '(indices)\n', (2763, 2772), True, 'import numpy as np\n'), ((2861, 2887), 'numpy.where', 'np.where', (['(scores <= thresh)'], {}), '(scores <= thresh)\n', (2869, 2887), True, 'import numpy as np\n'), ((3265, 3292), 'utils_o3d.display', 'utils.display', (['[pcd0, pcd1]'], {}), '([pcd0, pcd1])\n', (3278, 3292), True, 'import utils_o3d as utils\n'), ((3448, 3471), 'numpy.asarray', 'np.asarray', (['pcd0.points'], {}), '(pcd0.points)\n', (3458, 3471), True, 'import numpy as np\n'), ((3496, 3519), 'numpy.asarray', 'np.asarray', (['pcd1.points'], {}), '(pcd1.points)\n', (3506, 3519), True, 'import numpy as np\n'), ((3823, 3878), 'numpy.random.choice', 'np.random.choice', (['top_count', 'sample_size'], {'replace': '(False)'}), '(top_count, sample_size, replace=False)\n', (3839, 3878), True, 'import numpy as np\n'), ((3965, 4005), 'numpy.sum', 'np.sum', (['((points0_r - mean0) ** 2)'], {'axis': '(1)'}), '((points0_r - mean0) ** 2, axis=1)\n', (3971, 4005), True, 'import numpy as np\n'), ((4023, 4063), 'numpy.sum', 'np.sum', (['((points1_r - mean1) ** 2)'], {'axis': '(1)'}), '((points1_r - mean1) ** 2, axis=1)\n', (4029, 4063), True, 'import numpy as np\n'), ((4647, 4707), 'open3d.registration.TransformationEstimationPointToPoint', 'o3d.registration.TransformationEstimationPointToPoint', (['(False)'], {}), '(False)\n', (4700, 4707), True, 'import open3d as o3d\n'), ((4902, 4970), 'open3d.registration.RANSACConvergenceCriteria', 'o3d.registration.RANSACConvergenceCriteria', (['num_iters', 'num_val_iters'], {}), '(num_iters, num_val_iters)\n', (4944, 4970), True, 'import open3d as o3d\n'), ((5263, 5365), 'open3d.registration.FastGlobalRegistrationOption', 'o3d.registration.FastGlobalRegistrationOption', ([], {'maximum_correspondence_distance': 'distance_threshold'}), '(maximum_correspondence_distance\n =distance_threshold)\n', (5308, 5365), True, 'import open3d as o3d\n'), ((5713, 5768), 'open3d.registration.TransformationEstimationPointToPlane', 'o3d.registration.TransformationEstimationPointToPlane', ([], {}), '()\n', (5766, 5768), True, 'import open3d as o3d\n'), ((2037, 2058), 'numpy.array', 'np.array', (['pcd0.points'], {}), '(pcd0.points)\n', (2045, 2058), True, 'import numpy as np\n'), ((2096, 2117), 'numpy.array', 'np.array', (['pcd1.points'], {}), '(pcd1.points)\n', (2104, 2117), True, 'import numpy as np\n'), ((2158, 2181), 'numpy.array', 'np.array', (['feature0.data'], {}), '(feature0.data)\n', (2166, 2181), True, 'import numpy as np\n'), ((2222, 2245), 'numpy.array', 'np.array', (['feature1.data'], {}), '(feature1.data)\n', (2230, 2245), True, 'import numpy as np\n'), ((2624, 2676), 'numpy.linalg.norm', 'np.linalg.norm', (['(pcd0.points[i] - pcd1.points[idx[0]])'], {}), '(pcd0.points[i] - pcd1.points[idx[0]])\n', (2638, 2676), True, 'import numpy as np\n'), ((4170, 4188), 'scipy.stats.mode', 'stats.mode', (['scales'], {}), '(scales)\n', (4180, 4188), False, 'from scipy import stats\n'), ((4726, 4786), 'open3d.registration.CorrespondenceCheckerBasedOnEdgeLength', 'o3d.registration.CorrespondenceCheckerBasedOnEdgeLength', (['(0.9)'], {}), '(0.9)\n', (4781, 4786), True, 'import open3d as o3d\n'), ((4800, 4873), 'open3d.registration.CorrespondenceCheckerBasedOnDistance', 'o3d.registration.CorrespondenceCheckerBasedOnDistance', (['distance_threshold'], {}), '(distance_threshold)\n', (4853, 4873), True, 'import open3d as o3d\n'), ((1087, 1107), 'numpy.array', 'np.array', (['pcd.points'], {}), '(pcd.points)\n', (1095, 1107), True, 'import numpy as np\n'), ((1283, 1308), 'numpy.array', 'np.array', (['pcd_down.points'], {}), '(pcd_down.points)\n', (1291, 1308), True, 'import numpy as np\n'), ((3058, 3072), 'numpy.min', 'np.min', (['scores'], {}), '(scores)\n', (3064, 3072), True, 'import numpy as np\n'), ((3074, 3088), 'numpy.max', 'np.max', (['scores'], {}), '(scores)\n', (3080, 3088), True, 'import numpy as np\n'), ((4088, 4103), 'numpy.mean', 'np.mean', (['score1'], {}), '(score1)\n', (4095, 4103), True, 'import numpy as np\n'), ((4106, 4121), 'numpy.mean', 'np.mean', (['score0'], {}), '(score0)\n', (4113, 4121), True, 'import numpy as np\n')] |
michaelcraige/neo4j-python-driver | neo4j/aio/__init__.py | 27d0ce3f1941c4b29d0f050c6186a4f48ae4d30a | #!/usr/bin/env python
# -*- encoding: utf-8 -*-
# Copyright (c) 2002-2019 "Neo4j,"
# Neo4j Sweden AB [http://neo4j.com]
#
# This file is part of Neo4j.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from asyncio import (
IncompleteReadError,
Lock,
StreamReader,
StreamReaderProtocol,
StreamWriter,
get_event_loop,
wait,
)
from collections import deque
from logging import getLogger
from os import strerror
from random import choice
from ssl import SSLError
from sys import platform, version_info
from time import perf_counter
from neo4j.addressing import Address
from neo4j.aio._collections import WaitingList
from neo4j.aio._mixins import Addressable, Breakable
from neo4j.errors import (
BoltError,
BoltConnectionError,
BoltSecurityError,
BoltConnectionBroken,
BoltHandshakeError,
Neo4jAvailabilityError,
)
from neo4j.api import Version
from neo4j.conf import Config, PoolConfig
from neo4j.meta import version as neo4j_version
from neo4j.routing import RoutingTable
log = getLogger(__name__)
MAGIC = b"\x60\x60\xB0\x17"
class Bolt(Addressable, object):
#: True if this instance uses secure communication, false
#: otherwise.
secure = None
#: As a class attribute, this denotes the version of Bolt handled
#: by that subclass. As an instance attribute, this represents the
#: version of the protocol in use.
protocol_version = ()
# Record of the time at which this connection was opened.
__t_opened = None
# Handle to the StreamReader object.
__reader = None
# Handle to the StreamWriter object, which can be used on close.
__writer = None
# Flag to indicate that the connection is closed
__closed = False
@classmethod
def default_user_agent(cls):
""" Return the default user agent string for a connection.
"""
template = "neo4j-python/{} Python/{}.{}.{}-{}-{} ({})"
fields = (neo4j_version,) + tuple(version_info) + (platform,)
return template.format(*fields)
@classmethod
def protocol_handlers(cls, protocol_version=None):
""" Return a dictionary of available Bolt protocol handlers,
keyed by version tuple. If an explicit protocol version is
provided, the dictionary will contain either zero or one items,
depending on whether that version is supported. If no protocol
version is provided, all available versions will be returned.
:param protocol_version: tuple identifying a specific protocol
version (e.g. (3, 5)) or None
:return: dictionary of version tuple to handler class for all
relevant and supported protocol versions
:raise TypeError: if protocol version is not passed in a tuple
"""
# Carry out subclass imports locally to avoid circular
# dependency issues.
from neo4j.aio.bolt3 import Bolt3
handlers = {bolt.protocol_version: bolt for bolt in [
# This list can be updated as protocol
# versions are added and removed.
Bolt3,
]}
if protocol_version is None:
return handlers
if not isinstance(protocol_version, tuple):
raise TypeError("Protocol version must be specified as a tuple")
return {version: handler
for version, handler in handlers.items()
if version == protocol_version}
@classmethod
def opener(cls, auth=None, **config):
""" Create and return an opener function for a given set of
configuration parameters. This is useful when multiple servers share
the same configuration details, such as within a connection pool.
"""
async def f(address, *, loop=None):
return await Bolt.open(address, auth=auth, loop=loop, **config)
return f
@classmethod
async def open(cls, address, *, auth=None, loop=None, **config):
""" Open a socket connection and perform protocol version
negotiation, in order to construct and return a Bolt client
instance for a supported Bolt protocol version.
:param address: tuples of host and port, such as
("127.0.0.1", 7687)
:param auth:
:param loop:
:param config:
:return: instance of a Bolt subclass
:raise BoltConnectionError: if a connection could not be
established
:raise BoltConnectionLost: if an I/O error occurs on the
underlying socket connection
:raise BoltHandshakeError: if handshake completes without a
successful negotiation
:raise TypeError: if any of the arguments provided are passed
as incompatible types
:raise ValueError: if any of the arguments provided are passed
with unsupported values
"""
# Args
address = Address(address)
if loop is None:
loop = get_event_loop()
config = PoolConfig.consume(config)
# Connect
reader, writer = await cls._connect(address, loop, config)
try:
# Handshake
subclass = await cls._handshake(reader, writer, config.protocol_version)
# Instantiation
obj = subclass(reader, writer)
obj.secure = bool(config.secure)
assert hasattr(obj, "__ainit__")
await obj.__ainit__(auth)
return obj
except BoltError:
writer.write_eof()
writer.close()
raise
@classmethod
async def _connect(cls, address, loop, config):
""" Attempt to establish a TCP connection to the address
provided.
:param address:
:param loop:
:param config:
:return: a 3-tuple of reader, writer and security settings for
the new connection
:raise BoltConnectionError: if a connection could not be
established
"""
assert isinstance(address, Address)
assert loop is not None
assert isinstance(config, Config)
connection_args = {
"host": address.host,
"port": address.port,
"family": address.family,
# TODO: other args
}
ssl_context = config.get_ssl_context()
if ssl_context:
connection_args["ssl"] = ssl_context
connection_args["server_hostname"] = address.host
log.debug("[#0000] C: <DIAL> %s", address)
try:
reader = BoltStreamReader(loop=loop)
protocol = StreamReaderProtocol(reader, loop=loop)
transport, _ = await loop.create_connection(lambda: protocol, **connection_args)
writer = BoltStreamWriter(transport, protocol, reader, loop)
except SSLError as err:
log.debug("[#%04X] S: <REJECT> %s (%d %s)", 0, address,
err.errno, strerror(err.errno))
raise BoltSecurityError("Failed to establish a secure connection", address) from err
except OSError as err:
log.debug("[#%04X] S: <REJECT> %s (%d %s)", 0, address,
err.errno, strerror(err.errno))
raise BoltConnectionError("Failed to establish a connection", address) from err
else:
local_address = Address(transport.get_extra_info("sockname"))
remote_address = Address(transport.get_extra_info("peername"))
log.debug("[#%04X] S: <ACCEPT> %s -> %s",
local_address.port_number, local_address, remote_address)
return reader, writer
@classmethod
async def _handshake(cls, reader, writer, protocol_version):
""" Carry out a Bolt handshake, optionally requesting a
specific protocol version.
:param reader:
:param writer:
:param protocol_version:
:return:
:raise BoltConnectionLost: if an I/O error occurs on the
underlying socket connection
:raise BoltHandshakeError: if handshake completes without a
successful negotiation
"""
local_address = Address(writer.transport.get_extra_info("sockname"))
remote_address = Address(writer.transport.get_extra_info("peername"))
handlers = cls.protocol_handlers(protocol_version)
if not handlers:
raise ValueError("No protocol handlers available (requested Bolt %r)", protocol_version)
offered_versions = sorted(handlers.keys(), reverse=True)[:4]
request_data = MAGIC + b"".join(
v.to_bytes() for v in offered_versions).ljust(16, b"\x00")
log.debug("[#%04X] C: <HANDSHAKE> %r", local_address.port_number, request_data)
writer.write(request_data)
await writer.drain()
response_data = await reader.readexactly(4)
log.debug("[#%04X] S: <HANDSHAKE> %r", local_address.port_number, response_data)
try:
agreed_version = Version.from_bytes(response_data)
except ValueError as err:
writer.close()
raise BoltHandshakeError("Unexpected handshake response %r" % response_data,
remote_address, request_data, response_data) from err
try:
subclass = handlers[agreed_version]
except KeyError:
log.debug("Unsupported Bolt protocol version %s", agreed_version)
raise BoltHandshakeError("Unsupported Bolt protocol version",
remote_address, request_data, response_data)
else:
return subclass
def __new__(cls, reader, writer):
obj = super().__new__(cls)
obj.__t_opened = perf_counter()
obj.__reader = reader
obj.__writer = writer
Addressable.set_transport(obj, writer.transport)
return obj
def __repr__(self):
return "<Bolt address=%r protocol_version=%r>" % (self.remote_address,
self.protocol_version)
async def __ainit__(self, auth):
""" Asynchronous initializer for implementation by subclasses.
:param auth:
"""
@property
def age(self):
""" The age of this connection in seconds.
"""
return perf_counter() - self.__t_opened
@property
def broken(self):
""" Flag to indicate whether this connection has been broken
by the network or remote peer.
"""
return self.__reader.broken or self.__writer.broken
@property
def closed(self):
""" Flag to indicate whether this connection has been closed
locally."""
return self.__closed
async def close(self):
""" Close the connection.
"""
if self.closed:
return
if not self.broken:
log.debug("[#%04X] S: <HANGUP>", self.local_address.port_number)
self.__writer.write_eof()
self.__writer.close()
try:
await self.__writer.wait_closed()
except BoltConnectionBroken:
pass
self.__closed = True
async def reset(self, force=False):
""" Reset the connection to a clean state.
By default, a RESET message will only be sent if required, i.e.
if the connection is not already in a clean state. If forced,
this check will be overridden and a RESET will be sent
regardless.
"""
async def run(self, cypher, parameters=None, discard=False, readonly=False,
bookmarks=None, timeout=None, metadata=None):
""" Run an auto-commit transaction.
:param cypher:
:param parameters:
:param discard:
:param readonly:
:param bookmarks:
:param timeout:
:param metadata:
:raise BoltTransactionError: if a transaction cannot be carried
out at this time
"""
async def begin(self, readonly=False, bookmarks=None,
timeout=None, metadata=None):
""" Begin an explicit transaction.
:param readonly:
:param bookmarks:
:param timeout:
:param metadata:
:return:
"""
async def run_tx(self, f, args=None, kwargs=None, readonly=False,
bookmarks=None, timeout=None, metadata=None):
""" Run a transaction function and return the return value from
that function.
"""
async def get_routing_table(self, context=None):
""" Fetch a new routing table.
:param context: the routing context to use for this call
:return: a new RoutingTable instance or None if the given router is
currently unable to provide routing information
:raise ServiceUnavailable: if no writers are available
:raise ProtocolError: if the routing information received is unusable
"""
class BoltStreamReader(Addressable, Breakable, StreamReader):
""" Wrapper for asyncio.streams.StreamReader
"""
def set_transport(self, transport):
Addressable.set_transport(self, transport)
StreamReader.set_transport(self, transport)
async def readuntil(self, separator=b'\n'): # pragma: no cover
assert False # not used by current implementation
async def read(self, n=-1): # pragma: no cover
assert False # not used by current implementation
async def readexactly(self, n):
try:
return await super().readexactly(n)
except IncompleteReadError as err:
message = ("Network read incomplete (received {} of {} "
"bytes)".format(len(err.partial), err.expected))
log.debug("[#%04X] S: <CLOSE>", self.local_address.port_number)
Breakable.set_broken(self)
raise BoltConnectionBroken(message, self.remote_address) from err
except OSError as err:
log.debug("[#%04X] S: <CLOSE> %d %s", err.errno, strerror(err.errno))
Breakable.set_broken(self)
raise BoltConnectionBroken("Network read failed", self.remote_address) from err
class BoltStreamWriter(Addressable, Breakable, StreamWriter):
""" Wrapper for asyncio.streams.StreamWriter
"""
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
Addressable.set_transport(self, self.transport)
async def drain(self):
try:
await super().drain()
except OSError as err:
log.debug("[#%04X] S: <CLOSE> (%s)", self.local_address.port_number, err)
Breakable.set_broken(self)
raise BoltConnectionBroken("Network write failed", self.remote_address) from err
async def wait_closed(self):
try:
await super().wait_closed()
except AttributeError: # pragma: no cover
# This is a dirty hack for Python 3.6, which didn't include
# 'wait_closed'. The code polls waiting for the stream
# reader inside the protocol to go away which, by the
# implementation of 3.6, occurs on 'connection_lost'. This
# hack is likely safe unless the implementation of 3.6
# changes in a subsequent patch, and can be removed when
# Python 3.6 support is no longer required.
#
from asyncio import sleep
try:
while self._protocol._stream_reader is not None:
await sleep(0.1)
except AttributeError:
pass
class Pool:
def acquire(self, *, force_reset=False, timeout=None):
raise NotImplementedError
def release(self, *connections, force_reset=False):
raise NotImplementedError
def close(self, *, force=False):
raise NotImplementedError
class BoltPool:
""" A pool of connections to a single address.
:param opener: a function to which an address can be passed that
returns an open and ready Bolt connection
:param address: the remote address for which this pool operates
:param max_size: the maximum permitted number of simultaneous
connections that may be owned by this pool, both in-use and
free
:param max_age: the maximum permitted age, in seconds, for
connections to be retained in this pool
"""
@classmethod
async def open(cls, address, *, auth=None, loop=None, **config):
""" Create a new connection pool, with an option to seed one
or more initial connections.
"""
pool_config = PoolConfig.consume(config)
def opener(addr):
return Bolt.open(addr, auth=auth, loop=loop, **pool_config)
pool = cls(loop, opener, pool_config, address)
seeds = [await pool.acquire() for _ in range(pool_config.init_size)]
for seed in seeds:
await pool.release(seed)
return pool
def __init__(self, loop, opener, config, address):
if loop is None:
self._loop = get_event_loop()
else:
self._loop = loop
self._opener = opener
self._address = Address(address)
self._max_size = config.max_size
self._max_age = config.max_age
self._in_use_list = deque()
self._free_list = deque()
self._waiting_list = WaitingList(loop=self._loop)
def __repr__(self):
return "<{} addr'{}' [{}{}{}]>".format(
self.__class__.__name__,
self.address,
"|" * len(self._in_use_list),
"." * len(self._free_list),
" " * (self.max_size - self.size),
)
def __contains__(self, cx):
return cx in self._in_use_list or cx in self._free_list
def __len__(self):
return self.size
@property
def address(self):
""" The remote address for which this pool operates.
"""
return self._address
@property
def max_size(self):
""" The maximum permitted number of simultaneous connections
that may be owned by this pool, both in-use and free.
"""
return self._max_size
@max_size.setter
def max_size(self, value):
old_value = self._max_size
self._max_size = value
if value > old_value:
# The maximum size has grown, so new slots have become
# available. Notify any waiting acquirers of this extra
# capacity.
self._waiting_list.notify()
@property
def max_age(self):
""" The maximum permitted age, in seconds, for connections to
be retained in this pool.
"""
return self._max_age
@property
def in_use(self):
""" The number of connections in this pool that are currently
in use.
"""
return len(self._in_use_list)
@property
def size(self):
""" The total number of connections (both in-use and free)
currently owned by this connection pool.
"""
return len(self._in_use_list) + len(self._free_list)
async def _sanitize(self, cx, *, force_reset):
""" Attempt to clean up a connection, such that it can be
reused.
If the connection is broken or closed, it can be discarded.
Otherwise, the age of the connection is checked against the
maximum age permitted by this pool, consequently closing it
on expiry.
Should the connection be neither broken, closed nor expired,
it will be reset (optionally forcibly so) and the connection
object will be returned, indicating success.
"""
if cx.broken or cx.closed:
return None
expired = self.max_age is not None and cx.age > self.max_age
if expired:
await cx.close()
return None
await cx.reset(force=force_reset)
return cx
async def acquire(self, *, force_reset=False):
""" Acquire a connection from the pool.
In the simplest case, this will return an existing open
connection, if one is free. If not, and the pool is not full,
a new connection will be created. If the pool is full and no
free connections are available, this will block until a
connection is released, or until the acquire call is cancelled.
:param force_reset: if true, the connection will be forcibly
reset before being returned; if false, this will only occur
if the connection is not already in a clean state
:return: a Bolt connection object
"""
log.debug("Acquiring connection from pool %r", self)
cx = None
while cx is None or cx.broken or cx.closed:
try:
# Plan A: select a free connection from the pool
cx = self._free_list.popleft()
except IndexError:
if self.size < self.max_size:
# Plan B: if the pool isn't full, open
# a new connection
cx = await self._opener(self.address)
else:
# Plan C: wait for more capacity to become
# available, then try again
log.debug("Joining waiting list")
await self._waiting_list.join()
else:
cx = await self._sanitize(cx, force_reset=force_reset)
self._in_use_list.append(cx)
return cx
async def release(self, cx, *, force_reset=False):
""" Release a Bolt connection, putting it back into the pool
if the connection is healthy and the pool is not already at
capacity.
:param cx: the connection to release
:param force_reset: if true, the connection will be forcibly
reset before being released back into the pool; if false,
this will only occur if the connection is not already in a
clean state
:raise ValueError: if the connection is not currently in use,
or if it does not belong to this pool
"""
log.debug("Releasing connection %r", cx)
if cx in self._in_use_list:
self._in_use_list.remove(cx)
if self.size < self.max_size:
# If there is spare capacity in the pool, attempt to
# sanitize the connection and return it to the pool.
cx = await self._sanitize(cx, force_reset=force_reset)
if cx:
# Carry on only if sanitation succeeded.
if self.size < self.max_size:
# Check again if there is still capacity.
self._free_list.append(cx)
self._waiting_list.notify()
else:
# Otherwise, close the connection.
await cx.close()
else:
# If the pool is full, simply close the connection.
await cx.close()
elif cx in self._free_list:
raise ValueError("Connection is not in use")
else:
raise ValueError("Connection does not belong to this pool")
async def prune(self):
""" Close all free connections.
"""
await self.__close(self._free_list)
async def close(self):
""" Close all connections immediately.
This does not permanently disable the connection pool, it
merely shuts down all open connections, including those in
use. Depending on the applications, it may be perfectly
acceptable to re-acquire connections after pool closure,
which will have the implicit affect of reopening the pool.
To close gracefully, allowing work in progress to continue
until connections are released, use the following sequence
instead:
pool.max_size = 0
pool.prune()
This will force all future connection acquisitions onto the
waiting list, and released connections will be closed instead
of being returned to the pool.
"""
await self.prune()
await self.__close(self._in_use_list)
async def __close(self, connections):
""" Close all connections in the given list.
"""
closers = deque()
while True:
try:
cx = connections.popleft()
except IndexError:
break
else:
closers.append(cx.close())
if closers:
await wait(closers, loop=self._loop)
class Neo4jPool:
""" Connection pool with routing table.
"""
@classmethod
async def open(cls, *addresses, auth=None, routing_context=None, loop=None, **config):
pool_config = PoolConfig.consume(config)
def opener(addr):
return Bolt.open(addr, auth=auth, **pool_config)
obj = cls(loop, opener, config, addresses, routing_context)
# TODO: get initial routing table and construct
await obj._ensure_routing_table_is_fresh()
return obj
def __init__(self, loop, opener, config, addresses, routing_context):
if loop is None:
self._loop = get_event_loop()
else:
self._loop = loop
self._opener = opener
self._config = config
self._pools = {}
self._missing_writer = False
self._refresh_lock = Lock(loop=self._loop)
self._routing_context = routing_context
self._max_size_per_host = config.max_size
self._initial_routers = addresses
self._routing_table = RoutingTable(addresses)
self._activate_new_pools_in(self._routing_table)
def _activate_new_pools_in(self, routing_table):
""" Add pools for addresses that exist in the given routing
table but which don't already have pools.
"""
for address in routing_table.servers():
if address not in self._pools:
self._pools[address] = BoltPool(self._loop, self._opener, self._config, address)
async def _deactivate_pools_not_in(self, routing_table):
""" Deactivate any pools that aren't represented in the given
routing table.
"""
for address in self._pools:
if address not in routing_table:
await self._deactivate(address)
async def _get_routing_table_from(self, *routers):
""" Try to update routing tables with the given routers.
:return: True if the routing table is successfully updated,
otherwise False
"""
log.debug("Attempting to update routing table from "
"{}".format(", ".join(map(repr, routers))))
for router in routers:
pool = self._pools[router]
cx = await pool.acquire()
try:
new_routing_table = await cx.get_routing_table(self._routing_context)
except BoltError:
await self._deactivate(router)
else:
num_routers = len(new_routing_table.routers)
num_readers = len(new_routing_table.readers)
num_writers = len(new_routing_table.writers)
# No writers are available. This likely indicates a temporary state,
# such as leader switching, so we should not signal an error.
# When no writers available, then we flag we are reading in absence of writer
self._missing_writer = (num_writers == 0)
# No routers
if num_routers == 0:
continue
# No readers
if num_readers == 0:
continue
log.debug("Successfully updated routing table from "
"{!r} ({!r})".format(router, self._routing_table))
return new_routing_table
finally:
await pool.release(cx)
return None
async def _get_routing_table(self):
""" Update the routing table from the first router able to provide
valid routing information.
"""
# copied because it can be modified
existing_routers = list(self._routing_table.routers)
has_tried_initial_routers = False
if self._missing_writer:
has_tried_initial_routers = True
rt = await self._get_routing_table_from(self._initial_routers)
if rt:
return rt
rt = await self._get_routing_table_from(*existing_routers)
if rt:
return rt
if not has_tried_initial_routers and self._initial_routers not in existing_routers:
rt = await self._get_routing_table_from(self._initial_routers)
if rt:
return rt
# None of the routers have been successful, so just fail
log.error("Unable to retrieve routing information")
raise Neo4jAvailabilityError("Unable to retrieve routing information")
async def _ensure_routing_table_is_fresh(self, readonly=False):
""" Update the routing table if stale.
This method performs two freshness checks, before and after acquiring
the refresh lock. If the routing table is already fresh on entry, the
method exits immediately; otherwise, the refresh lock is acquired and
the second freshness check that follows determines whether an update
is still required.
"""
if self._routing_table.is_fresh(readonly=readonly):
return
async with self._refresh_lock:
if self._routing_table.is_fresh(readonly=readonly):
if readonly:
# if reader is fresh but writers are not, then
# we are reading in absence of writer
self._missing_writer = not self._routing_table.is_fresh(readonly=False)
else:
rt = await self._get_routing_table()
self._activate_new_pools_in(rt)
self._routing_table.update(rt)
await self._deactivate_pools_not_in(rt)
async def _select_pool(self, readonly=False):
""" Selects the pool with the fewest in-use connections.
"""
await self._ensure_routing_table_is_fresh(readonly=readonly)
if readonly:
addresses = self._routing_table.readers
else:
addresses = self._routing_table.writers
pools = [pool for address, pool in self._pools.items() if address in addresses]
pools_by_usage = {}
for pool in pools:
pools_by_usage.setdefault(pool.in_use, []).append(pool)
if not pools_by_usage:
raise Neo4jAvailabilityError("No {} service currently "
"available".format("read" if readonly else "write"))
return choice(pools_by_usage[min(pools_by_usage)])
async def acquire(self, *, readonly=False, force_reset=False):
""" Acquire a connection to a server that can satisfy a set of parameters.
:param readonly: true if a readonly connection is required,
otherwise false
:param force_reset:
"""
while True:
pool = await self._select_pool(readonly=readonly)
try:
cx = await pool.acquire(force_reset=force_reset)
except BoltError:
await self._deactivate(pool.address)
else:
if not readonly:
# If we're not acquiring a connection as
# readonly, then intercept NotALeader and
# ForbiddenOnReadOnlyDatabase errors to
# invalidate the routing table.
from neo4j.errors import (
NotALeader,
ForbiddenOnReadOnlyDatabase,
)
def handler(failure):
""" Invalidate the routing table before raising the failure.
"""
log.debug("[#0000] C: <ROUTING> Invalidating routing table")
self._routing_table.ttl = 0
raise failure
cx.set_failure_handler(NotALeader, handler)
cx.set_failure_handler(ForbiddenOnReadOnlyDatabase, handler)
return cx
async def release(self, connection, *, force_reset=False):
""" Release a connection back into the pool.
This method is thread safe.
"""
for pool in self._pools.values():
try:
await pool.release(connection, force_reset=force_reset)
except ValueError:
pass
else:
# Unhook any custom error handling and exit.
from neo4j.errors import (
NotALeader,
ForbiddenOnReadOnlyDatabase,
)
connection.del_failure_handler(NotALeader)
connection.del_failure_handler(ForbiddenOnReadOnlyDatabase)
break
else:
raise ValueError("Connection does not belong to this pool")
async def _deactivate(self, address):
""" Deactivate an address from the connection pool,
if present, remove from the routing table and also closing
all idle connections to that address.
"""
log.debug("[#0000] C: <ROUTING> Deactivating address %r", address)
# We use `discard` instead of `remove` here since the former
# will not fail if the address has already been removed.
self._routing_table.routers.discard(address)
self._routing_table.readers.discard(address)
self._routing_table.writers.discard(address)
log.debug("[#0000] C: <ROUTING> table=%r", self._routing_table)
try:
pool = self._pools.pop(address)
except KeyError:
pass # assume the address has already been removed
else:
pool.max_size = 0
await pool.prune()
async def close(self, force=False):
""" Close all connections and empty the pool. If forced, in-use
connections will be closed immediately; if not, they will
remain open until released.
"""
pools = dict(self._pools)
self._pools.clear()
for address, pool in pools.items():
if force:
await pool.close()
else:
pool.max_size = 0
await pool.prune()
class Neo4j:
# The default router address list to use if no addresses are specified.
default_router_addresses = Address.parse_list(":7687 :17601 :17687")
# TODO
# @classmethod
# async def open(cls, *addresses, auth=None, security=False, protocol_version=None, loop=None):
# opener = Bolt.opener(auth=auth, security=security, protocol_version=protocol_version)
# router_addresses = Address.parse_list(" ".join(addresses), default_port=7687)
# return cls(opener, router_addresses, loop=loop)
#
# def __init__(self, opener, router_addresses, loop=None):
# self._routers = Neo4jPool(opener, router_addresses or self.default_router_addresses)
# self._writers = Neo4jPool(opener)
# self._readers = Neo4jPool(opener)
# self._routing_table = None
#
# @property
# def routing_table(self):
# return self._routing_table
#
# async def update_routing_table(self):
# cx = await self._routers.acquire()
# try:
# result = await cx.run("CALL dbms.cluster.routing.getRoutingTable($context)", {"context": {}})
# record = await result.single()
# self._routing_table = RoutingTable.parse_routing_info([record]) # TODO: handle ValueError?
# return self._routing_table
# finally:
# self._routers.release(cx)
# async def main():
# from neo4j.debug import watch; watch("neo4j")
# neo4j = await Neo4j.open(":17601 :17602 :17603", auth=("neo4j", "password"))
# await neo4j.update_routing_table()
# print(neo4j.routing_table)
#
#
# if __name__ == "__main__":
# run(main())
| [((1531, 1550), 'logging.getLogger', 'getLogger', (['__name__'], {}), '(__name__)\n', (1540, 1550), False, 'from logging import getLogger\n'), ((35280, 35321), 'neo4j.addressing.Address.parse_list', 'Address.parse_list', (['""":7687 :17601 :17687"""'], {}), "(':7687 :17601 :17687')\n", (35298, 35321), False, 'from neo4j.addressing import Address\n'), ((5412, 5428), 'neo4j.addressing.Address', 'Address', (['address'], {}), '(address)\n', (5419, 5428), False, 'from neo4j.addressing import Address\n'), ((5507, 5533), 'neo4j.conf.PoolConfig.consume', 'PoolConfig.consume', (['config'], {}), '(config)\n', (5525, 5533), False, 'from neo4j.conf import Config, PoolConfig\n'), ((10233, 10247), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (10245, 10247), False, 'from time import perf_counter\n'), ((10316, 10364), 'neo4j.aio._mixins.Addressable.set_transport', 'Addressable.set_transport', (['obj', 'writer.transport'], {}), '(obj, writer.transport)\n', (10341, 10364), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((13638, 13680), 'neo4j.aio._mixins.Addressable.set_transport', 'Addressable.set_transport', (['self', 'transport'], {}), '(self, transport)\n', (13663, 13680), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((13689, 13732), 'asyncio.StreamReader.set_transport', 'StreamReader.set_transport', (['self', 'transport'], {}), '(self, transport)\n', (13715, 13732), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((14905, 14952), 'neo4j.aio._mixins.Addressable.set_transport', 'Addressable.set_transport', (['self', 'self.transport'], {}), '(self, self.transport)\n', (14930, 14952), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((17131, 17157), 'neo4j.conf.PoolConfig.consume', 'PoolConfig.consume', (['config'], {}), '(config)\n', (17149, 17157), False, 'from neo4j.conf import Config, PoolConfig\n'), ((17695, 17711), 'neo4j.addressing.Address', 'Address', (['address'], {}), '(address)\n', (17702, 17711), False, 'from neo4j.addressing import Address\n'), ((17820, 17827), 'collections.deque', 'deque', ([], {}), '()\n', (17825, 17827), False, 'from collections import deque\n'), ((17854, 17861), 'collections.deque', 'deque', ([], {}), '()\n', (17859, 17861), False, 'from collections import deque\n'), ((17891, 17919), 'neo4j.aio._collections.WaitingList', 'WaitingList', ([], {'loop': 'self._loop'}), '(loop=self._loop)\n', (17902, 17919), False, 'from neo4j.aio._collections import WaitingList\n'), ((24871, 24878), 'collections.deque', 'deque', ([], {}), '()\n', (24876, 24878), False, 'from collections import deque\n'), ((25344, 25370), 'neo4j.conf.PoolConfig.consume', 'PoolConfig.consume', (['config'], {}), '(config)\n', (25362, 25370), False, 'from neo4j.conf import Config, PoolConfig\n'), ((25991, 26012), 'asyncio.Lock', 'Lock', ([], {'loop': 'self._loop'}), '(loop=self._loop)\n', (25995, 26012), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((26183, 26206), 'neo4j.routing.RoutingTable', 'RoutingTable', (['addresses'], {}), '(addresses)\n', (26195, 26206), False, 'from neo4j.routing import RoutingTable\n'), ((29513, 29577), 'neo4j.errors.Neo4jAvailabilityError', 'Neo4jAvailabilityError', (['"""Unable to retrieve routing information"""'], {}), "('Unable to retrieve routing information')\n", (29535, 29577), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((5473, 5489), 'asyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (5487, 5489), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((7106, 7145), 'asyncio.StreamReaderProtocol', 'StreamReaderProtocol', (['reader'], {'loop': 'loop'}), '(reader, loop=loop)\n', (7126, 7145), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((9497, 9530), 'neo4j.api.Version.from_bytes', 'Version.from_bytes', (['response_data'], {}), '(response_data)\n', (9515, 9530), False, 'from neo4j.api import Version\n'), ((10824, 10838), 'time.perf_counter', 'perf_counter', ([], {}), '()\n', (10836, 10838), False, 'from time import perf_counter\n'), ((17580, 17596), 'asyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (17594, 17596), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((25779, 25795), 'asyncio.get_event_loop', 'get_event_loop', ([], {}), '()\n', (25793, 25795), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((7484, 7553), 'neo4j.errors.BoltSecurityError', 'BoltSecurityError', (['"""Failed to establish a secure connection"""', 'address'], {}), "('Failed to establish a secure connection', address)\n", (7501, 7553), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((7734, 7798), 'neo4j.errors.BoltConnectionError', 'BoltConnectionError', (['"""Failed to establish a connection"""', 'address'], {}), "('Failed to establish a connection', address)\n", (7753, 7798), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((9610, 9729), 'neo4j.errors.BoltHandshakeError', 'BoltHandshakeError', (["('Unexpected handshake response %r' % response_data)", 'remote_address', 'request_data', 'response_data'], {}), "('Unexpected handshake response %r' % response_data,\n remote_address, request_data, response_data)\n", (9628, 9729), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((9954, 10058), 'neo4j.errors.BoltHandshakeError', 'BoltHandshakeError', (['"""Unsupported Bolt protocol version"""', 'remote_address', 'request_data', 'response_data'], {}), "('Unsupported Bolt protocol version', remote_address,\n request_data, response_data)\n", (9972, 10058), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((14343, 14369), 'neo4j.aio._mixins.Breakable.set_broken', 'Breakable.set_broken', (['self'], {}), '(self)\n', (14363, 14369), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((14388, 14438), 'neo4j.errors.BoltConnectionBroken', 'BoltConnectionBroken', (['message', 'self.remote_address'], {}), '(message, self.remote_address)\n', (14408, 14438), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((14573, 14599), 'neo4j.aio._mixins.Breakable.set_broken', 'Breakable.set_broken', (['self'], {}), '(self)\n', (14593, 14599), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((14618, 14682), 'neo4j.errors.BoltConnectionBroken', 'BoltConnectionBroken', (['"""Network read failed"""', 'self.remote_address'], {}), "('Network read failed', self.remote_address)\n", (14638, 14682), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((15157, 15183), 'neo4j.aio._mixins.Breakable.set_broken', 'Breakable.set_broken', (['self'], {}), '(self)\n', (15177, 15183), False, 'from neo4j.aio._mixins import Addressable, Breakable\n'), ((15202, 15267), 'neo4j.errors.BoltConnectionBroken', 'BoltConnectionBroken', (['"""Network write failed"""', 'self.remote_address'], {}), "('Network write failed', self.remote_address)\n", (15222, 15267), False, 'from neo4j.errors import BoltError, BoltConnectionError, BoltSecurityError, BoltConnectionBroken, BoltHandshakeError, Neo4jAvailabilityError\n'), ((25111, 25141), 'asyncio.wait', 'wait', (['closers'], {'loop': 'self._loop'}), '(closers, loop=self._loop)\n', (25115, 25141), False, 'from asyncio import IncompleteReadError, Lock, StreamReader, StreamReaderProtocol, StreamWriter, get_event_loop, wait\n'), ((7445, 7464), 'os.strerror', 'strerror', (['err.errno'], {}), '(err.errno)\n', (7453, 7464), False, 'from os import strerror\n'), ((7695, 7714), 'os.strerror', 'strerror', (['err.errno'], {}), '(err.errno)\n', (7703, 7714), False, 'from os import strerror\n'), ((14540, 14559), 'os.strerror', 'strerror', (['err.errno'], {}), '(err.errno)\n', (14548, 14559), False, 'from os import strerror\n'), ((16043, 16053), 'asyncio.sleep', 'sleep', (['(0.1)'], {}), '(0.1)\n', (16048, 16053), False, 'from asyncio import sleep\n')] |
bubriks/feature-store-api | python/setup.py | fa286f257b87a09c081e86811b853b3e564ce197 | import os
import imp
from setuptools import setup, find_packages
__version__ = imp.load_source(
"hsfs.version", os.path.join("hsfs", "version.py")
).__version__
def read(fname):
return open(os.path.join(os.path.dirname(__file__), fname)).read()
setup(
name="hsfs",
version=__version__,
install_requires=[
"pyhumps==1.6.1",
"requests",
"furl",
"boto3",
"pandas",
"numpy",
"pyjks",
"mock",
"avro==1.10.2",
"sqlalchemy",
"PyMySQL",
],
extras_require={
"dev": [
"pytest",
"flake8",
"black"],
"docs": [
"mkdocs==1.1.2",
"mkdocs-material==6.2.2",
"mike==0.5.5",
"sphinx==3.5.4",
"keras_autodoc @ git+https://[email protected]/moritzmeister/keras-autodoc@split-tags-properties",
"markdown-include"],
"hive": ["pyhopshive[thrift]"]
},
author="Logical Clocks AB",
author_email="[email protected]",
description="HSFS: An environment independent client to interact with the Hopsworks Featurestore",
license="Apache License 2.0",
keywords="Hopsworks, Feature Store, Spark, Machine Learning, MLOps, DataOps",
url="https://github.com/logicalclocks/feature-store-api",
download_url="https://github.com/logicalclocks/feature-store-api/releases/tag/"
+ __version__,
packages=find_packages(),
long_description=read("../README.md"),
long_description_content_type="text/markdown",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Topic :: Utilities",
"License :: OSI Approved :: Apache Software License",
"Programming Language :: Python :: 3",
"Intended Audience :: Developers",
],
)
| [((118, 152), 'os.path.join', 'os.path.join', (['"""hsfs"""', '"""version.py"""'], {}), "('hsfs', 'version.py')\n", (130, 152), False, 'import os\n'), ((1456, 1471), 'setuptools.find_packages', 'find_packages', ([], {}), '()\n', (1469, 1471), False, 'from setuptools import setup, find_packages\n'), ((215, 240), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (230, 240), False, 'import os\n')] |
kfrime/yonder | src/server_py3/aps/src/wes/api/v1/users/__init__.py | cd2f491c24f8552aeadd6ee48c601e1194a2e082 | #!/usr/bin/env python3
from . import signup, signin, signout, update, info, detail
| [] |
jamesmcclain/pytorch-multi-class-focal-loss | hubconf.py | de74657769e07dc40be838a6277dea269bfddad0 | # Optional list of dependencies required by the package
dependencies = ['torch']
from focal_loss import FocalLoss, focal_loss
| [] |
HongqiangWei/gdal | autotest/ogr/ogr_gpx.py | f7c427926438cc39d31e4459fa6401321f8e62f0 | #!/usr/bin/env python
###############################################################################
# $Id$
#
# Project: GDAL/OGR Test Suite
# Purpose: Test GPX driver functionality.
# Author: Even Rouault <even dot rouault at mines dash paris dot org>
#
###############################################################################
# Copyright (c) 2007, Even Rouault <even dot rouault at mines dash paris dot org>
#
# Permission is hereby granted, free of charge, to any person obtaining a
# copy of this software and associated documentation files (the "Software"),
# to deal in the Software without restriction, including without limitation
# the rights to use, copy, modify, merge, publish, distribute, sublicense,
# and/or sell copies of the Software, and to permit persons to whom the
# Software is furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included
# in all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS
# OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL
# THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
###############################################################################
import os
import sys
import string
sys.path.append( '../pymod' )
import gdaltest
import ogrtest
import ogr
import osr
import gdal
def ogr_gpx_init():
gdaltest.gpx_ds = None
try:
gdaltest.gpx_ds = ogr.Open( 'data/test.gpx' )
except:
gdaltest.gpx_ds = None
if gdaltest.gpx_ds is None:
gdaltest.have_gpx = 0
else:
gdaltest.have_gpx = 1
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds.GetLayerCount() != 5:
gdaltest.post_reason( 'wrong number of layers' )
return 'fail'
return 'success'
###############################################################################
# Test waypoints gpx layer.
def ogr_gpx_1():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'fail'
lyr = gdaltest.gpx_ds.GetLayerByName( 'waypoints' )
expect = [2, None]
tr = ogrtest.check_features_against_list( lyr, 'ele', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['waypoint name', None]
tr = ogrtest.check_features_against_list( lyr, 'name', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['href', None]
tr = ogrtest.check_features_against_list( lyr, 'link1_href', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['text', None]
tr = ogrtest.check_features_against_list( lyr, 'link1_text', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['type', None]
tr = ogrtest.check_features_against_list( lyr, 'link1_type', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['href2', None]
tr = ogrtest.check_features_against_list( lyr, 'link2_href', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['text2', None]
tr = ogrtest.check_features_against_list( lyr, 'link2_text', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['type2', None]
tr = ogrtest.check_features_against_list( lyr, 'link2_type', expect )
if not tr:
return 'fail'
lyr.ResetReading()
expect = ['2007/11/25 17:58:00+01', None]
tr = ogrtest.check_features_against_list( lyr, 'time', expect )
if not tr:
return 'fail'
lyr.ResetReading()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (1 0)',
max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (4 3)',
max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
return 'success'
###############################################################################
# Test routes gpx layer.
def ogr_gpx_2():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'fail'
lyr = gdaltest.gpx_ds.GetLayerByName( 'routes' )
lyr.ResetReading()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING (6 5,9 8,12 11)', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'LINESTRING EMPTY', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
return 'success'
###############################################################################
# Test route_points gpx layer.
def ogr_gpx_3():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'fail'
lyr = gdaltest.gpx_ds.GetLayerByName( 'route_points' )
expect = ['route point name', None, None]
tr = ogrtest.check_features_against_list( lyr, 'name', expect )
lyr.ResetReading()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (6 5)', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
return 'success'
###############################################################################
# Test tracks gpx layer.
def ogr_gpx_4():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'fail'
lyr = gdaltest.gpx_ds.GetLayerByName( 'tracks' )
lyr.ResetReading()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING ((15 14,18 17),(21 20,24 23))', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'MULTILINESTRING EMPTY', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
feat = lyr.GetNextFeature()
f_geom = feat.GetGeometryRef()
if f_geom.ExportToWkt()!= 'MULTILINESTRING EMPTY':
return 'fail'
feat.Destroy()
return 'success'
###############################################################################
# Test route_points gpx layer.
def ogr_gpx_5():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'fail'
lyr = gdaltest.gpx_ds.GetLayerByName( 'track_points' )
expect = ['track point name', None, None, None]
tr = ogrtest.check_features_against_list( lyr, 'name', expect )
lyr.ResetReading()
feat = lyr.GetNextFeature()
if ogrtest.check_feature_geometry( feat, 'POINT (15 14)', max_error = 0.0001 ) != 0:
return 'fail'
feat.Destroy()
return 'success'
###############################################################################
# Copy our small gpx file to a new gpx file.
def ogr_gpx_6():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is None:
return 'skip'
try:
gdal.PushErrorHandler( 'CPLQuietErrorHandler' )
ogr.GetDriverByName('CSV').DeleteDataSource( 'tmp/gpx.gpx' )
gdal.PopErrorHandler()
except:
pass
co_opts = [ ]
# Duplicate waypoints
gpx_lyr = gdaltest.gpx_ds.GetLayerByName( 'waypoints' )
gpx2_ds = ogr.GetDriverByName('GPX').CreateDataSource('tmp/gpx.gpx',
options = co_opts )
gpx2_lyr = gpx2_ds.CreateLayer( 'waypoints', geom_type = ogr.wkbPoint )
gpx_lyr.ResetReading()
dst_feat = ogr.Feature( feature_def = gpx2_lyr.GetLayerDefn() )
feat = gpx_lyr.GetNextFeature()
while feat is not None:
dst_feat.SetFrom( feat )
if gpx2_lyr.CreateFeature( dst_feat ) != 0:
gdaltest.post_reason('CreateFeature failed.')
return 'fail'
feat = gpx_lyr.GetNextFeature()
dst_feat.Destroy()
# Duplicate routes
gpx_lyr = gdaltest.gpx_ds.GetLayerByName( 'routes' )
gpx2_lyr = gpx2_ds.CreateLayer( 'routes', geom_type = ogr.wkbLineString )
gpx_lyr.ResetReading()
dst_feat = ogr.Feature( feature_def = gpx2_lyr.GetLayerDefn() )
feat = gpx_lyr.GetNextFeature()
while feat is not None:
dst_feat.SetFrom( feat )
if gpx2_lyr.CreateFeature( dst_feat ) != 0:
gdaltest.post_reason('CreateFeature failed.')
return 'fail'
feat = gpx_lyr.GetNextFeature()
dst_feat.Destroy()
# Duplicate tracks
gpx_lyr = gdaltest.gpx_ds.GetLayerByName( 'tracks' )
gpx2_lyr = gpx2_ds.CreateLayer( 'tracks', geom_type = ogr.wkbMultiLineString )
gpx_lyr.ResetReading()
dst_feat = ogr.Feature( feature_def = gpx2_lyr.GetLayerDefn() )
feat = gpx_lyr.GetNextFeature()
while feat is not None:
dst_feat.SetFrom( feat )
if gpx2_lyr.CreateFeature( dst_feat ) != 0:
gdaltest.post_reason('CreateFeature failed.')
return 'fail'
feat = gpx_lyr.GetNextFeature()
dst_feat.Destroy()
gpx_lyr = None
gpx2_lyr = None
# Explicit destroy is required for old-gen python bindings
gpx2_ds.Destroy()
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = ogr.Open( 'tmp/gpx.gpx' )
return 'success'
###############################################################################
# Output extra fields as <extensions>.
def ogr_gpx_7():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is not None:
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = None
bna_ds = ogr.Open( 'data/bna_for_gpx.bna' )
try:
os.remove ('tmp/gpx.gpx')
except:
pass
co_opts = [ 'GPX_USE_EXTENSIONS=yes' ]
# Duplicate waypoints
bna_lyr = bna_ds.GetLayerByName( 'bna_for_gpx_points' )
gdaltest.gpx_ds = ogr.GetDriverByName('GPX').CreateDataSource('tmp/gpx.gpx',
options = co_opts )
gpx_lyr = gdaltest.gpx_ds.CreateLayer( 'waypoints', geom_type = ogr.wkbPoint )
bna_lyr.ResetReading()
for i in range(bna_lyr.GetLayerDefn().GetFieldCount()):
field_defn = bna_lyr.GetLayerDefn().GetFieldDefn(i)
gpx_lyr.CreateField( field_defn )
dst_feat = ogr.Feature( feature_def = gpx_lyr.GetLayerDefn() )
feat = bna_lyr.GetNextFeature()
while feat is not None:
dst_feat.SetFrom( feat )
if gpx_lyr.CreateFeature( dst_feat ) != 0:
gdaltest.post_reason('CreateFeature failed.')
return 'fail'
feat = bna_lyr.GetNextFeature()
dst_feat.Destroy()
bna_ds.Destroy()
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = None
#Now check that the extensions fields have been well written
gdaltest.gpx_ds = ogr.Open('tmp/gpx.gpx')
gpx_lyr = gdaltest.gpx_ds.GetLayerByName( 'waypoints' )
expect = ['PID1', 'PID2']
tr = ogrtest.check_features_against_list( gpx_lyr, 'ogr_Primary_ID', expect )
if not tr:
return 'fail'
gpx_lyr.ResetReading()
expect = ['SID1', 'SID2']
tr = ogrtest.check_features_against_list( gpx_lyr, 'ogr_Secondary_ID', expect )
if not tr:
return 'fail'
gpx_lyr.ResetReading()
expect = ['TID1', None]
tr = ogrtest.check_features_against_list( gpx_lyr, 'ogr_Third_ID', expect )
if not tr:
return 'fail'
return 'success'
###############################################################################
# Output extra fields as <extensions>.
def ogr_gpx_8():
if not gdaltest.have_gpx:
return 'skip'
if gdaltest.gpx_ds is not None:
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = None
try:
os.remove ('tmp/gpx.gpx')
except:
pass
gdaltest.gpx_ds = ogr.GetDriverByName('GPX').CreateDataSource('tmp/gpx.gpx', options = ['LINEFORMAT=LF'])
lyr = gdaltest.gpx_ds.CreateLayer( 'route_points', geom_type = ogr.wkbPoint )
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(2 49)')
feat.SetField('route_name', 'ROUTE_NAME')
feat.SetField('route_fid', 0)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 50)')
feat.SetField('route_name', '--ignored--')
feat.SetField('route_fid', 0)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 51)')
feat.SetField('route_name', 'ROUTE_NAME2')
feat.SetField('route_fid', 1)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 49)')
feat.SetField('route_fid', 1)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
lyr = gdaltest.gpx_ds.CreateLayer( 'track_points', geom_type = ogr.wkbPoint )
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(2 49)')
feat.SetField('track_name', 'TRACK_NAME')
feat.SetField('track_fid', 0)
feat.SetField('track_seg_id', 0)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 50)')
feat.SetField('track_name', '--ignored--')
feat.SetField('track_fid', 0)
feat.SetField('track_seg_id', 0)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 51)')
feat.SetField('track_fid', 0)
feat.SetField('track_seg_id', 1)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
feat = ogr.Feature(lyr.GetLayerDefn())
geom = ogr.CreateGeometryFromWkt('POINT(3 49)')
feat.SetField('track_name', 'TRACK_NAME2')
feat.SetField('track_fid', 1)
feat.SetField('track_seg_id', 0)
feat.SetGeometry(geom)
lyr.CreateFeature(feat)
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = None
f = open('tmp/gpx.gpx','rb')
f_ref = open('data/ogr_gpx_8_ref.txt','rb')
f_content = f.read()
f_ref_content = f_ref.read()
f.close()
f_ref.close()
if f_content.find(f_ref_content) == -1:
gdaltest.post_reason('did not get expected result')
print(f_content)
return 'fail'
return 'success'
###############################################################################
#
def ogr_gpx_cleanup():
if gdaltest.gpx_ds is not None:
gdaltest.gpx_ds.Destroy()
gdaltest.gpx_ds = None
try:
os.remove ('tmp/gpx.gpx')
except:
pass
return 'success'
gdaltest_list = [
ogr_gpx_init,
ogr_gpx_1,
ogr_gpx_2,
ogr_gpx_3,
ogr_gpx_4,
ogr_gpx_5,
ogr_gpx_6,
# Rerun test 1, 2 and 4 with generated tmp/tmp.gpx
ogr_gpx_1,
ogr_gpx_2,
ogr_gpx_4,
ogr_gpx_7,
ogr_gpx_8,
ogr_gpx_cleanup ]
if __name__ == '__main__':
gdaltest.setup_run( 'ogr_gpx' )
gdaltest.run_tests( gdaltest_list )
gdaltest.summarize()
| [((1597, 1624), 'sys.path.append', 'sys.path.append', (['"""../pymod"""'], {}), "('../pymod')\n", (1612, 1624), False, 'import sys\n'), ((2400, 2443), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""waypoints"""'], {}), "('waypoints')\n", (2430, 2443), False, 'import gdaltest\n'), ((2484, 2539), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""ele"""', 'expect'], {}), "(lyr, 'ele', expect)\n", (2519, 2539), False, 'import ogrtest\n'), ((2659, 2715), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""name"""', 'expect'], {}), "(lyr, 'name', expect)\n", (2694, 2715), False, 'import ogrtest\n'), ((2826, 2888), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link1_href"""', 'expect'], {}), "(lyr, 'link1_href', expect)\n", (2861, 2888), False, 'import ogrtest\n'), ((2999, 3061), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link1_text"""', 'expect'], {}), "(lyr, 'link1_text', expect)\n", (3034, 3061), False, 'import ogrtest\n'), ((3172, 3234), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link1_type"""', 'expect'], {}), "(lyr, 'link1_type', expect)\n", (3207, 3234), False, 'import ogrtest\n'), ((3346, 3408), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link2_href"""', 'expect'], {}), "(lyr, 'link2_href', expect)\n", (3381, 3408), False, 'import ogrtest\n'), ((3520, 3582), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link2_text"""', 'expect'], {}), "(lyr, 'link2_text', expect)\n", (3555, 3582), False, 'import ogrtest\n'), ((3694, 3756), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""link2_type"""', 'expect'], {}), "(lyr, 'link2_type', expect)\n", (3729, 3756), False, 'import ogrtest\n'), ((3885, 3941), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""time"""', 'expect'], {}), "(lyr, 'time', expect)\n", (3920, 3941), False, 'import ogrtest\n'), ((4680, 4720), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""routes"""'], {}), "('routes')\n", (4710, 4720), False, 'import gdaltest\n'), ((5371, 5417), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""route_points"""'], {}), "('route_points')\n", (5401, 5417), False, 'import gdaltest\n'), ((5477, 5533), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""name"""', 'expect'], {}), "(lyr, 'name', expect)\n", (5512, 5533), False, 'import ogrtest\n'), ((5992, 6032), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""tracks"""'], {}), "('tracks')\n", (6022, 6032), False, 'import gdaltest\n'), ((6874, 6920), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""track_points"""'], {}), "('track_points')\n", (6904, 6920), False, 'import gdaltest\n'), ((6986, 7042), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['lyr', '"""name"""', 'expect'], {}), "(lyr, 'name', expect)\n", (7021, 7042), False, 'import ogrtest\n'), ((7776, 7819), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""waypoints"""'], {}), "('waypoints')\n", (7806, 7819), False, 'import gdaltest\n'), ((8489, 8529), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""routes"""'], {}), "('routes')\n", (8519, 8529), False, 'import gdaltest\n'), ((9053, 9093), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""tracks"""'], {}), "('tracks')\n", (9083, 9093), False, 'import gdaltest\n'), ((9714, 9739), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (9737, 9739), False, 'import gdaltest\n'), ((9763, 9786), 'ogr.Open', 'ogr.Open', (['"""tmp/gpx.gpx"""'], {}), "('tmp/gpx.gpx')\n", (9771, 9786), False, 'import ogr\n'), ((10118, 10150), 'ogr.Open', 'ogr.Open', (['"""data/bna_for_gpx.bna"""'], {}), "('data/bna_for_gpx.bna')\n", (10126, 10150), False, 'import ogr\n'), ((10532, 10596), 'gdaltest.gpx_ds.CreateLayer', 'gdaltest.gpx_ds.CreateLayer', (['"""waypoints"""'], {'geom_type': 'ogr.wkbPoint'}), "('waypoints', geom_type=ogr.wkbPoint)\n", (10559, 10596), False, 'import gdaltest\n'), ((11184, 11209), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (11207, 11209), False, 'import gdaltest\n'), ((11325, 11348), 'ogr.Open', 'ogr.Open', (['"""tmp/gpx.gpx"""'], {}), "('tmp/gpx.gpx')\n", (11333, 11348), False, 'import ogr\n'), ((11363, 11406), 'gdaltest.gpx_ds.GetLayerByName', 'gdaltest.gpx_ds.GetLayerByName', (['"""waypoints"""'], {}), "('waypoints')\n", (11393, 11406), False, 'import gdaltest\n'), ((11454, 11524), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['gpx_lyr', '"""ogr_Primary_ID"""', 'expect'], {}), "(gpx_lyr, 'ogr_Primary_ID', expect)\n", (11489, 11524), False, 'import ogrtest\n'), ((11641, 11713), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['gpx_lyr', '"""ogr_Secondary_ID"""', 'expect'], {}), "(gpx_lyr, 'ogr_Secondary_ID', expect)\n", (11676, 11713), False, 'import ogrtest\n'), ((11828, 11896), 'ogrtest.check_features_against_list', 'ogrtest.check_features_against_list', (['gpx_lyr', '"""ogr_Third_ID"""', 'expect'], {}), "(gpx_lyr, 'ogr_Third_ID', expect)\n", (11863, 11896), False, 'import ogrtest\n'), ((12438, 12505), 'gdaltest.gpx_ds.CreateLayer', 'gdaltest.gpx_ds.CreateLayer', (['"""route_points"""'], {'geom_type': 'ogr.wkbPoint'}), "('route_points', geom_type=ogr.wkbPoint)\n", (12465, 12505), False, 'import gdaltest\n'), ((12565, 12605), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(2 49)"""'], {}), "('POINT(2 49)')\n", (12590, 12605), False, 'import ogr\n'), ((12796, 12836), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 50)"""'], {}), "('POINT(3 50)')\n", (12821, 12836), False, 'import ogr\n'), ((13028, 13068), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 51)"""'], {}), "('POINT(3 51)')\n", (13053, 13068), False, 'import ogr\n'), ((13260, 13300), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 49)"""'], {}), "('POINT(3 49)')\n", (13285, 13300), False, 'import ogr\n'), ((13401, 13468), 'gdaltest.gpx_ds.CreateLayer', 'gdaltest.gpx_ds.CreateLayer', (['"""track_points"""'], {'geom_type': 'ogr.wkbPoint'}), "('track_points', geom_type=ogr.wkbPoint)\n", (13428, 13468), False, 'import gdaltest\n'), ((13528, 13568), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(2 49)"""'], {}), "('POINT(2 49)')\n", (13553, 13568), False, 'import ogr\n'), ((13796, 13836), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 50)"""'], {}), "('POINT(3 50)')\n", (13821, 13836), False, 'import ogr\n'), ((14065, 14105), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 51)"""'], {}), "('POINT(3 51)')\n", (14090, 14105), False, 'import ogr\n'), ((14287, 14327), 'ogr.CreateGeometryFromWkt', 'ogr.CreateGeometryFromWkt', (['"""POINT(3 49)"""'], {}), "('POINT(3 49)')\n", (14312, 14327), False, 'import ogr\n'), ((14506, 14531), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (14529, 14531), False, 'import gdaltest\n'), ((15509, 15538), 'gdaltest.setup_run', 'gdaltest.setup_run', (['"""ogr_gpx"""'], {}), "('ogr_gpx')\n", (15527, 15538), False, 'import gdaltest\n'), ((15546, 15579), 'gdaltest.run_tests', 'gdaltest.run_tests', (['gdaltest_list'], {}), '(gdaltest_list)\n', (15564, 15579), False, 'import gdaltest\n'), ((15587, 15607), 'gdaltest.summarize', 'gdaltest.summarize', ([], {}), '()\n', (15605, 15607), False, 'import gdaltest\n'), ((1777, 1802), 'ogr.Open', 'ogr.Open', (['"""data/test.gpx"""'], {}), "('data/test.gpx')\n", (1785, 1802), False, 'import ogr\n'), ((2012, 2043), 'gdaltest.gpx_ds.GetLayerCount', 'gdaltest.gpx_ds.GetLayerCount', ([], {}), '()\n', (2041, 2043), False, 'import gdaltest\n'), ((2058, 2104), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""wrong number of layers"""'], {}), "('wrong number of layers')\n", (2078, 2104), False, 'import gdaltest\n'), ((4044, 4113), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""POINT (1 0)"""'], {'max_error': '(0.0001)'}), "(feat, 'POINT (1 0)', max_error=0.0001)\n", (4074, 4113), False, 'import ogrtest\n'), ((4248, 4317), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""POINT (4 3)"""'], {'max_error': '(0.0001)'}), "(feat, 'POINT (4 3)', max_error=0.0001)\n", (4278, 4317), False, 'import ogrtest\n'), ((4786, 4874), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""LINESTRING (6 5,9 8,12 11)"""'], {'max_error': '(0.0001)'}), "(feat, 'LINESTRING (6 5,9 8,12 11)',\n max_error=0.0001)\n", (4816, 4874), False, 'import ogrtest\n'), ((4966, 5040), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""LINESTRING EMPTY"""'], {'max_error': '(0.0001)'}), "(feat, 'LINESTRING EMPTY', max_error=0.0001)\n", (4996, 5040), False, 'import ogrtest\n'), ((5603, 5672), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""POINT (6 5)"""'], {'max_error': '(0.0001)'}), "(feat, 'POINT (6 5)', max_error=0.0001)\n", (5633, 5672), False, 'import ogrtest\n'), ((6098, 6205), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""MULTILINESTRING ((15 14,18 17),(21 20,24 23))"""'], {'max_error': '(0.0001)'}), "(feat,\n 'MULTILINESTRING ((15 14,18 17),(21 20,24 23))', max_error=0.0001)\n", (6128, 6205), False, 'import ogrtest\n'), ((6297, 6376), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""MULTILINESTRING EMPTY"""'], {'max_error': '(0.0001)'}), "(feat, 'MULTILINESTRING EMPTY', max_error=0.0001)\n", (6327, 6376), False, 'import ogrtest\n'), ((7112, 7183), 'ogrtest.check_feature_geometry', 'ogrtest.check_feature_geometry', (['feat', '"""POINT (15 14)"""'], {'max_error': '(0.0001)'}), "(feat, 'POINT (15 14)', max_error=0.0001)\n", (7142, 7183), False, 'import ogrtest\n'), ((7539, 7584), 'gdal.PushErrorHandler', 'gdal.PushErrorHandler', (['"""CPLQuietErrorHandler"""'], {}), "('CPLQuietErrorHandler')\n", (7560, 7584), False, 'import gdal\n'), ((7664, 7686), 'gdal.PopErrorHandler', 'gdal.PopErrorHandler', ([], {}), '()\n', (7684, 7686), False, 'import gdal\n'), ((10047, 10072), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (10070, 10072), False, 'import gdaltest\n'), ((10175, 10199), 'os.remove', 'os.remove', (['"""tmp/gpx.gpx"""'], {}), "('tmp/gpx.gpx')\n", (10184, 10199), False, 'import os\n'), ((12194, 12219), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (12217, 12219), False, 'import gdaltest\n'), ((12265, 12289), 'os.remove', 'os.remove', (['"""tmp/gpx.gpx"""'], {}), "('tmp/gpx.gpx')\n", (12274, 12289), False, 'import os\n'), ((14784, 14835), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""did not get expected result"""'], {}), "('did not get expected result')\n", (14804, 14835), False, 'import gdaltest\n'), ((15058, 15083), 'gdaltest.gpx_ds.Destroy', 'gdaltest.gpx_ds.Destroy', ([], {}), '()\n', (15081, 15083), False, 'import gdaltest\n'), ((15128, 15152), 'os.remove', 'os.remove', (['"""tmp/gpx.gpx"""'], {}), "('tmp/gpx.gpx')\n", (15137, 15152), False, 'import os\n'), ((7837, 7863), 'ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""GPX"""'], {}), "('GPX')\n", (7856, 7863), False, 'import ogr\n'), ((8314, 8359), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""CreateFeature failed."""'], {}), "('CreateFeature failed.')\n", (8334, 8359), False, 'import gdaltest\n'), ((8874, 8919), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""CreateFeature failed."""'], {}), "('CreateFeature failed.')\n", (8894, 8919), False, 'import gdaltest\n'), ((9443, 9488), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""CreateFeature failed."""'], {}), "('CreateFeature failed.')\n", (9463, 9488), False, 'import gdaltest\n'), ((10380, 10406), 'ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""GPX"""'], {}), "('GPX')\n", (10399, 10406), False, 'import ogr\n'), ((11021, 11066), 'gdaltest.post_reason', 'gdaltest.post_reason', (['"""CreateFeature failed."""'], {}), "('CreateFeature failed.')\n", (11041, 11066), False, 'import gdaltest\n'), ((12339, 12365), 'ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""GPX"""'], {}), "('GPX')\n", (12358, 12365), False, 'import ogr\n'), ((7595, 7621), 'ogr.GetDriverByName', 'ogr.GetDriverByName', (['"""CSV"""'], {}), "('CSV')\n", (7614, 7621), False, 'import ogr\n')] |
max-stack/MWP-SS-Metrics | mwp_solver/models/sausolver.py | 01268f2d6da716596216b04de4197e345b96c219 | # Code Taken from https://github.com/LYH-YF/MWPToolkit
# -*- encoding: utf-8 -*-
# @Author: Yihuai Lan
# @Time: 2021/08/21 04:59:55
# @File: sausolver.py
import random
import torch
from torch import nn
import copy
from module.Encoder.rnn_encoder import BasicRNNEncoder
from module.Embedder.basic_embedder import BasicEmbedder
from module.Decoder.tree_decoder import SARTreeDecoder
from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding
from module.Layer.tree_layers import Prediction, GenerateNode, Merge, SemanticAlignmentModule
from module.Strategy.beam_search import TreeBeam
from loss.masked_cross_entropy_loss import MaskedCrossEntropyLoss, masked_cross_entropy
from loss.mse_loss import MSELoss
from utils.utils import copy_list
from utils.enum_type import NumMask, SpecialTokens
class SAUSolver(nn.Module):
"""
Reference:
Qin et al. "Semantically-Aligned Universal Tree-Structured Solver for Math Word Problems" in EMNLP 2020.
"""
def __init__(self, config, dataset):
super(SAUSolver, self).__init__()
# parameter
self.hidden_size = config["hidden_size"]
self.device = config["device"]
self.USE_CUDA = True if self.device == torch.device('cuda') else False
self.beam_size = config['beam_size']
self.max_out_len = config['max_output_len']
self.embedding_size = config["embedding_size"]
self.dropout_ratio = config["dropout_ratio"]
self.num_layers = config["num_layers"]
self.rnn_cell_type = config["rnn_cell_type"]
self.loss_weight = config['loss_weight']
self.vocab_size = len(dataset.in_idx2word)
self.out_symbol2idx = dataset.out_symbol2idx
self.out_idx2symbol = dataset.out_idx2symbol
generate_list = dataset.generate_list
self.generate_nums = [self.out_symbol2idx[symbol] for symbol in generate_list]
self.mask_list = NumMask.number
self.num_start = dataset.num_start
self.operator_nums = dataset.operator_nums
self.generate_size = len(generate_list)
self.unk_token = self.out_symbol2idx[SpecialTokens.UNK_TOKEN]
try:
self.out_sos_token = self.out_symbol2idx[SpecialTokens.SOS_TOKEN]
except:
self.out_sos_token = None
try:
self.out_eos_token = self.out_symbol2idx[SpecialTokens.EOS_TOKEN]
except:
self.out_eos_token = None
try:
self.out_pad_token = self.out_symbol2idx[SpecialTokens.PAD_TOKEN]
except:
self.out_pad_token = None
# module
self.embedder = BasicEmbedder(self.vocab_size, self.embedding_size, self.dropout_ratio)
# self.t_encoder = BasicRNNEncoder(self.embedding_size, self.hidden_size, self.num_layers, self.rnn_cell_type, self.dropout_ratio)
self.encoder = BasicRNNEncoder(self.embedding_size, self.hidden_size, self.num_layers, self.rnn_cell_type,
self.dropout_ratio, batch_first=False)
#self.decoder = SARTreeDecoder(self.hidden_size, self.operator_nums, self.generate_size, self.dropout_ratio)
self.decoder = Prediction(self.hidden_size,self.operator_nums,self.generate_size,self.dropout_ratio)
self.node_generater = GenerateNode(self.hidden_size, self.operator_nums, self.embedding_size,
self.dropout_ratio)
self.merge = Merge(self.hidden_size, self.embedding_size, self.dropout_ratio)
self.sa = SemanticAlignmentModule(self.hidden_size,self.hidden_size,self.hidden_size)
self.loss1 = MaskedCrossEntropyLoss()
#
def calculate_loss(self, batch_data:dict) -> float:
"""Finish forward-propagating, calculating loss and back-propagation.
:param batch_data: one batch data.
:return: loss value.
batch_data should include keywords 'question', 'ques len', 'equation', 'equ len',
'num stack', 'num size', 'num pos'
"""
seq = torch.tensor(batch_data["question"]).to(self.device)
seq_length = torch.tensor(batch_data["ques len"]).long()
target = torch.tensor(batch_data["equation"]).to(self.device)
target_length = torch.LongTensor(batch_data["equ len"]).to(self.device)
nums_stack = copy.deepcopy(batch_data["num stack"])
num_size = batch_data["num size"]
num_pos = batch_data["num pos"]
generate_nums = self.generate_nums
num_start = self.num_start
# sequence mask for attention
unk = self.unk_token
loss = self.train_tree(seq, seq_length, target, target_length, nums_stack, num_size, generate_nums, num_pos, unk, num_start)
return loss
def model_test(self, batch_data:dict) -> tuple:
"""Model test.
:param batch_data: one batch data.
:return: predicted equation, target equation.
batch_data should include keywords 'question', 'ques len', 'equation',
'num stack', 'num pos', 'num list'
"""
seq = torch.tensor(batch_data["question"]).to(self.device)
seq_length = torch.tensor(batch_data["ques len"]).long()
target = torch.tensor(batch_data["equation"]).to(self.device)
nums_stack = copy.deepcopy(batch_data["num stack"])
num_pos = batch_data["num pos"]
num_list = batch_data['num list']
generate_nums = self.generate_nums
num_start = self.num_start
# sequence mask for attention
all_node_output = self.evaluate_tree(seq, seq_length, generate_nums, num_pos, num_start, self.beam_size,
self.max_out_len)
all_output = self.convert_idx2symbol(all_node_output, num_list[0], copy_list(nums_stack[0]))
targets = self.convert_idx2symbol(target[0], num_list[0], copy_list(nums_stack[0]))
return all_output, targets
def train_tree(self,input_batch, input_length, target_batch, target_length, nums_stack_batch, num_size_batch, generate_nums, num_pos, unk, num_start,
english=False,var_nums=[], batch_first=False):
# sequence mask for attention
seq_mask = []
max_len = max(input_length)
for i in input_length:
seq_mask.append([0 for _ in range(i)] + [1 for _ in range(i, max_len)])
seq_mask = torch.ByteTensor(seq_mask)
num_mask = []
max_num_size = max(num_size_batch) + len(generate_nums) + len(var_nums) # 最大的位置列表数目+常识数字数目+未知数列表
for i in num_size_batch:
d = i + len(generate_nums) + len(var_nums)
num_mask.append([0] * d + [1] * (max_num_size - d))
num_mask = torch.ByteTensor(num_mask) # 用于屏蔽无关数字,防止生成错误的Nx
#unk = output_lang.word2index["UNK"]
# Turn padded arrays into (batch_size x max_len) tensors, transpose into (max_len x batch_size)
input_var = input_batch.transpose(0, 1)
target = target_batch.transpose(0, 1)
padding_hidden = torch.FloatTensor([0.0 for _ in range(self.decoder.hidden_size)]).unsqueeze(0)
batch_size = len(input_length)
if self.USE_CUDA:
input_var = input_var.cuda()
seq_mask = seq_mask.cuda()
padding_hidden = padding_hidden.cuda()
num_mask = num_mask.cuda()
# Zero gradients of both optimizers
# Run words through encoder
#encoder_outputs, problem_output = self.encoder(input_var, input_length)
seq_emb = self.embedder(input_var)
pade_outputs, _ = self.encoder(seq_emb, input_length)
problem_output = pade_outputs[-1, :, :self.hidden_size] + pade_outputs[0, :, self.hidden_size:]
encoder_outputs = pade_outputs[:, :, :self.hidden_size] + pade_outputs[:, :, self.hidden_size:]
# Prepare input and output variables
node_stacks = [[TreeNode(_)] for _ in problem_output.split(1, dim=0)] # root embedding B x 1
max_target_length = max(target_length)
all_node_outputs = []
all_sa_outputs = []
# all_leafs = []
copy_num_len = [len(_) for _ in num_pos]
num_size = max(copy_num_len)
# 提取与问题相关的数字embedding
all_nums_encoder_outputs = self.get_all_number_encoder_outputs(encoder_outputs, num_pos, batch_size, num_size,
self.encoder.hidden_size)
embeddings_stacks = [[] for _ in range(batch_size)] # B x 1 当前的tree state/ subtree embedding / output
left_childs = [None for _ in range(batch_size)] # B x 1
for t in range(max_target_length):
num_score, op, current_embeddings, current_context, current_nums_embeddings = self.decoder(
node_stacks, left_childs, encoder_outputs, all_nums_encoder_outputs, padding_hidden, seq_mask, num_mask)
# all_leafs.append(p_leaf)
outputs = torch.cat((op, num_score), 1)
all_node_outputs.append(outputs)
target_t, generate_input = self.generate_tree_input(target[t].tolist(), outputs, nums_stack_batch, num_start,
unk)
target[t] = target_t
if self.USE_CUDA:
generate_input = generate_input.cuda()
left_child, right_child, node_label = self.node_generater(current_embeddings, generate_input, current_context)
left_childs = []
for idx, l, r, node_stack, i, o in zip(range(batch_size), left_child.split(1), right_child.split(1),
node_stacks, target[t].tolist(), embeddings_stacks):
if len(node_stack) != 0:
node = node_stack.pop()
else:
left_childs.append(None)
continue
# 未知数当数字处理,SEP当操作符处理
if i < num_start: # 非数字
node_stack.append(TreeNode(r))
node_stack.append(TreeNode(l, left_flag=True))
o.append(TreeEmbedding(node_label[idx].unsqueeze(0), terminal=False))
# print(o[-1].embedding.size())
# print(encoder_outputs[idx].size())
else: # 数字
current_num = current_nums_embeddings[idx, i - num_start].unsqueeze(0)
while len(o) > 0 and o[-1].terminal:
sub_stree = o.pop()
op = o.pop()
current_num = self.merge(op.embedding, sub_stree.embedding, current_num) # Subtree embedding
if batch_first:
encoder_mapping, decoder_mapping = self.sa(current_num, encoder_outputs[idx])
else:
temp_encoder_outputs = encoder_outputs.transpose(0, 1)
encoder_mapping, decoder_mapping = self.sa(current_num,temp_encoder_outputs[idx])
all_sa_outputs.append((encoder_mapping, decoder_mapping))
o.append(TreeEmbedding(current_num, terminal=True))
if len(o) > 0 and o[-1].terminal:
left_childs.append(o[-1].embedding)
else:
left_childs.append(None)
# all_leafs = torch.stack(all_leafs, dim=1) # B x S x 2
all_node_outputs = torch.stack(all_node_outputs, dim=1) # B x S x N
target = target.transpose(0, 1).contiguous() # B x S
if self.USE_CUDA:
# all_leafs = all_leafs.cuda()
all_node_outputs = all_node_outputs.cuda()
target = target.cuda()
new_all_sa_outputs = []
for sa_pair in all_sa_outputs:
new_all_sa_outputs.append((sa_pair[0].cuda(), sa_pair[1].cuda()))
all_sa_outputs = new_all_sa_outputs
# target_length = torch.LongTensor(target_length).cuda()
else:
pass
# target_length = torch.LongTensor(target_length)
semantic_alignment_loss = nn.MSELoss()
total_semanti_alognment_loss = 0
sa_len = len(all_sa_outputs)
for sa_pair in all_sa_outputs:
total_semanti_alognment_loss += semantic_alignment_loss(sa_pair[0], sa_pair[1])
# print(total_semanti_alognment_loss)
total_semanti_alognment_loss = total_semanti_alognment_loss / sa_len
# print(total_semanti_alognment_loss)
# op_target = target < num_start
# loss_0 = masked_cross_entropy_without_logit(all_leafs, op_target.long(), target_length)
loss = masked_cross_entropy(all_node_outputs, target,target_length) + 0.01 * total_semanti_alognment_loss
# loss = loss_0 + loss_1
loss.backward()
# clip the grad
# torch.nn.utils.clip_grad_norm_(encoder.parameters(), 5)
# torch.nn.utils.clip_grad_norm_(predict.parameters(), 5)
# torch.nn.utils.clip_grad_norm_(generate.parameters(), 5)
# Update parameters with optimizers
return loss.item() # , loss_0.item(), loss_1.item()
def evaluate_tree(self, input_batch, input_length, generate_nums, num_pos, num_start, beam_size=5, max_length=30):
seq_mask = torch.BoolTensor(1, input_length).fill_(0)
# Turn padded arrays into (batch_size x max_len) tensors, transpose into (max_len x batch_size)
input_var = input_batch.transpose(0, 1)
num_mask = torch.BoolTensor(1, len(num_pos[0]) + len(generate_nums)).fill_(0)
padding_hidden = torch.FloatTensor([0.0 for _ in range(self.hidden_size)]).unsqueeze(0)
batch_size = 1
if self.USE_CUDA:
input_var = input_var.cuda()
seq_mask = seq_mask.cuda()
padding_hidden = padding_hidden.cuda()
num_mask = num_mask.cuda()
# Run words through encoder
seq_emb = self.embedder(input_var)
pade_outputs, _ = self.encoder(seq_emb, input_length)
problem_output = pade_outputs[-1, :, :self.hidden_size] + pade_outputs[0, :, self.hidden_size:]
encoder_outputs = pade_outputs[:, :, :self.hidden_size] + pade_outputs[:, :, self.hidden_size:]
# Prepare input and output variables
node_stacks = [[TreeNode(_)] for _ in problem_output.split(1, dim=0)]
num_size = len(num_pos[0])
all_nums_encoder_outputs = self.get_all_number_encoder_outputs(encoder_outputs, num_pos, batch_size, num_size,
self.hidden_size)
# B x P x N
embeddings_stacks = [[] for _ in range(batch_size)]
left_childs = [None for _ in range(batch_size)]
beams = [TreeBeam(0.0, node_stacks, embeddings_stacks, left_childs, [])]
for t in range(max_length):
current_beams = []
while len(beams) > 0:
b = beams.pop()
if len(b.node_stack[0]) == 0:
current_beams.append(b)
continue
# left_childs = torch.stack(b.left_childs)
left_childs = b.left_childs
num_score, op, current_embeddings, current_context, current_nums_embeddings = self.decoder(b.node_stack,
left_childs,
encoder_outputs,
all_nums_encoder_outputs,
padding_hidden,
seq_mask,
num_mask)
out_score = nn.functional.log_softmax(torch.cat((op, num_score), dim=1), dim=1)
# out_score = p_leaf * out_score
topv, topi = out_score.topk(beam_size)
for tv, ti in zip(topv.split(1, dim=1), topi.split(1, dim=1)):
current_node_stack = copy_list(b.node_stack)
current_left_childs = []
current_embeddings_stacks = copy_list(b.embedding_stack)
current_out = copy.deepcopy(b.out)
out_token = int(ti)
current_out.append(out_token)
node = current_node_stack[0].pop()
if out_token < num_start:
generate_input = torch.LongTensor([out_token])
if self.USE_CUDA:
generate_input = generate_input.cuda()
left_child, right_child, node_label = self.node_generater(current_embeddings, generate_input,
current_context)
current_node_stack[0].append(TreeNode(right_child))
current_node_stack[0].append(TreeNode(left_child, left_flag=True))
current_embeddings_stacks[0].append(TreeEmbedding(node_label[0].unsqueeze(0), False))
else:
current_num = current_nums_embeddings[0, out_token - num_start].unsqueeze(0)
while len(current_embeddings_stacks[0]) > 0 and current_embeddings_stacks[0][-1].terminal:
sub_stree = current_embeddings_stacks[0].pop()
op = current_embeddings_stacks[0].pop()
current_num = self.merge(op.embedding, sub_stree.embedding, current_num)
current_embeddings_stacks[0].append(TreeEmbedding(current_num, True))
if len(current_embeddings_stacks[0]) > 0 and current_embeddings_stacks[0][-1].terminal:
current_left_childs.append(current_embeddings_stacks[0][-1].embedding)
else:
current_left_childs.append(None)
current_beams.append(TreeBeam(b.score + float(tv), current_node_stack, current_embeddings_stacks,
current_left_childs, current_out))
beams = sorted(current_beams, key=lambda x: x.score, reverse=True)
beams = beams[:beam_size]
flag = True
for b in beams:
if len(b.node_stack[0]) != 0:
flag = False
if flag:
break
return beams[0].out
def get_all_number_encoder_outputs(self, encoder_outputs, num_pos, batch_size, num_size, hidden_size):
indices = list()
sen_len = encoder_outputs.size(0)
masked_index = []
temp_1 = [1 for _ in range(hidden_size)]
temp_0 = [0 for _ in range(hidden_size)]
for b in range(batch_size):
for i in num_pos[b]:
indices.append(i + b * sen_len)
masked_index.append(temp_0)
indices += [0 for _ in range(len(num_pos[b]), num_size)]
masked_index += [temp_1 for _ in range(len(num_pos[b]), num_size)]
indices = torch.LongTensor(indices)
masked_index = torch.BoolTensor(masked_index)
masked_index = masked_index.view(batch_size, num_size, hidden_size)
if self.USE_CUDA:
indices = indices.cuda()
masked_index = masked_index.cuda()
all_outputs = encoder_outputs.transpose(0, 1).contiguous()
all_embedding = all_outputs.view(-1, encoder_outputs.size(2)) # S x B x H -> (B x S) x H
all_num = all_embedding.index_select(0, indices)
all_num = all_num.view(batch_size, num_size, hidden_size)
return all_num.masked_fill_(masked_index, 0.0)
def generate_tree_input(self, target, decoder_output, nums_stack_batch, num_start, unk):
# when the decoder input is copied num but the num has two pos, chose the max
target_input = copy.deepcopy(target)
for i in range(len(target)):
if target[i] == unk:
num_stack = nums_stack_batch[i].pop()
max_score = -float("1e12")
for num in num_stack:
if decoder_output[i, num_start + num] > max_score:
target[i] = num + num_start
max_score = decoder_output[i, num_start + num]
if target_input[i] >= num_start:
target_input[i] = 0
return torch.LongTensor(target), torch.LongTensor(target_input)
def mse_loss(self, outputs, targets, mask=None):
# outputs : [batch_size,output_len,hidden_size]
# targets : [batch_size,output_len,hidden_size]
# mask : [batch_size,output_len]
mask = mask.to(self.device)
x = torch.sqrt(torch.sum(torch.square((outputs - targets)), dim=-1)) # [batch_size,output_len]
y = torch.sum(x * mask, dim=-1) / torch.sum(mask, dim=-1) # [batch_size]
return torch.sum(y)
def convert_idx2symbol(self, output, num_list, num_stack):
# batch_size=output.size(0)
'''batch_size=1'''
seq_len = len(output)
num_len = len(num_list)
output_list = []
res = []
for s_i in range(seq_len):
idx = output[s_i]
if idx in [self.out_sos_token, self.out_eos_token, self.out_pad_token]:
break
symbol = self.out_idx2symbol[idx]
if "NUM" in symbol:
num_idx = self.mask_list.index(symbol)
if num_idx >= num_len:
res = []
break
res.append(num_list[num_idx])
elif symbol == SpecialTokens.UNK_TOKEN:
try:
pos_list = num_stack.pop()
c = num_list[pos_list[0]]
res.append(c)
except:
return None
else:
res.append(symbol)
output_list.append(res)
return output_list
| [((2643, 2714), 'module.Embedder.basic_embedder.BasicEmbedder', 'BasicEmbedder', (['self.vocab_size', 'self.embedding_size', 'self.dropout_ratio'], {}), '(self.vocab_size, self.embedding_size, self.dropout_ratio)\n', (2656, 2714), False, 'from module.Embedder.basic_embedder import BasicEmbedder\n'), ((2877, 3011), 'module.Encoder.rnn_encoder.BasicRNNEncoder', 'BasicRNNEncoder', (['self.embedding_size', 'self.hidden_size', 'self.num_layers', 'self.rnn_cell_type', 'self.dropout_ratio'], {'batch_first': '(False)'}), '(self.embedding_size, self.hidden_size, self.num_layers,\n self.rnn_cell_type, self.dropout_ratio, batch_first=False)\n', (2892, 3011), False, 'from module.Encoder.rnn_encoder import BasicRNNEncoder\n'), ((3187, 3280), 'module.Layer.tree_layers.Prediction', 'Prediction', (['self.hidden_size', 'self.operator_nums', 'self.generate_size', 'self.dropout_ratio'], {}), '(self.hidden_size, self.operator_nums, self.generate_size, self.\n dropout_ratio)\n', (3197, 3280), False, 'from module.Layer.tree_layers import Prediction, GenerateNode, Merge, SemanticAlignmentModule\n'), ((3303, 3398), 'module.Layer.tree_layers.GenerateNode', 'GenerateNode', (['self.hidden_size', 'self.operator_nums', 'self.embedding_size', 'self.dropout_ratio'], {}), '(self.hidden_size, self.operator_nums, self.embedding_size,\n self.dropout_ratio)\n', (3315, 3398), False, 'from module.Layer.tree_layers import Prediction, GenerateNode, Merge, SemanticAlignmentModule\n'), ((3459, 3523), 'module.Layer.tree_layers.Merge', 'Merge', (['self.hidden_size', 'self.embedding_size', 'self.dropout_ratio'], {}), '(self.hidden_size, self.embedding_size, self.dropout_ratio)\n', (3464, 3523), False, 'from module.Layer.tree_layers import Prediction, GenerateNode, Merge, SemanticAlignmentModule\n'), ((3542, 3619), 'module.Layer.tree_layers.SemanticAlignmentModule', 'SemanticAlignmentModule', (['self.hidden_size', 'self.hidden_size', 'self.hidden_size'], {}), '(self.hidden_size, self.hidden_size, self.hidden_size)\n', (3565, 3619), False, 'from module.Layer.tree_layers import Prediction, GenerateNode, Merge, SemanticAlignmentModule\n'), ((3639, 3663), 'loss.masked_cross_entropy_loss.MaskedCrossEntropyLoss', 'MaskedCrossEntropyLoss', ([], {}), '()\n', (3661, 3663), False, 'from loss.masked_cross_entropy_loss import MaskedCrossEntropyLoss, masked_cross_entropy\n'), ((4331, 4369), 'copy.deepcopy', 'copy.deepcopy', (["batch_data['num stack']"], {}), "(batch_data['num stack'])\n", (4344, 4369), False, 'import copy\n'), ((5282, 5320), 'copy.deepcopy', 'copy.deepcopy', (["batch_data['num stack']"], {}), "(batch_data['num stack'])\n", (5295, 5320), False, 'import copy\n'), ((6408, 6434), 'torch.ByteTensor', 'torch.ByteTensor', (['seq_mask'], {}), '(seq_mask)\n', (6424, 6434), False, 'import torch\n'), ((6735, 6761), 'torch.ByteTensor', 'torch.ByteTensor', (['num_mask'], {}), '(num_mask)\n', (6751, 6761), False, 'import torch\n'), ((11454, 11490), 'torch.stack', 'torch.stack', (['all_node_outputs'], {'dim': '(1)'}), '(all_node_outputs, dim=1)\n', (11465, 11490), False, 'import torch\n'), ((12133, 12145), 'torch.nn.MSELoss', 'nn.MSELoss', ([], {}), '()\n', (12143, 12145), False, 'from torch import nn\n'), ((19482, 19507), 'torch.LongTensor', 'torch.LongTensor', (['indices'], {}), '(indices)\n', (19498, 19507), False, 'import torch\n'), ((19531, 19561), 'torch.BoolTensor', 'torch.BoolTensor', (['masked_index'], {}), '(masked_index)\n', (19547, 19561), False, 'import torch\n'), ((20294, 20315), 'copy.deepcopy', 'copy.deepcopy', (['target'], {}), '(target)\n', (20307, 20315), False, 'import copy\n'), ((21321, 21333), 'torch.sum', 'torch.sum', (['y'], {}), '(y)\n', (21330, 21333), False, 'import torch\n'), ((5771, 5795), 'utils.utils.copy_list', 'copy_list', (['nums_stack[0]'], {}), '(nums_stack[0])\n', (5780, 5795), False, 'from utils.utils import copy_list\n'), ((5863, 5887), 'utils.utils.copy_list', 'copy_list', (['nums_stack[0]'], {}), '(nums_stack[0])\n', (5872, 5887), False, 'from utils.utils import copy_list\n'), ((8962, 8991), 'torch.cat', 'torch.cat', (['(op, num_score)', '(1)'], {}), '((op, num_score), 1)\n', (8971, 8991), False, 'import torch\n'), ((12679, 12740), 'loss.masked_cross_entropy_loss.masked_cross_entropy', 'masked_cross_entropy', (['all_node_outputs', 'target', 'target_length'], {}), '(all_node_outputs, target, target_length)\n', (12699, 12740), False, 'from loss.masked_cross_entropy_loss import MaskedCrossEntropyLoss, masked_cross_entropy\n'), ((14775, 14837), 'module.Strategy.beam_search.TreeBeam', 'TreeBeam', (['(0.0)', 'node_stacks', 'embeddings_stacks', 'left_childs', '[]'], {}), '(0.0, node_stacks, embeddings_stacks, left_childs, [])\n', (14783, 14837), False, 'from module.Strategy.beam_search import TreeBeam\n'), ((20811, 20835), 'torch.LongTensor', 'torch.LongTensor', (['target'], {}), '(target)\n', (20827, 20835), False, 'import torch\n'), ((20837, 20867), 'torch.LongTensor', 'torch.LongTensor', (['target_input'], {}), '(target_input)\n', (20853, 20867), False, 'import torch\n'), ((21236, 21263), 'torch.sum', 'torch.sum', (['(x * mask)'], {'dim': '(-1)'}), '(x * mask, dim=-1)\n', (21245, 21263), False, 'import torch\n'), ((21266, 21289), 'torch.sum', 'torch.sum', (['mask'], {'dim': '(-1)'}), '(mask, dim=-1)\n', (21275, 21289), False, 'import torch\n'), ((1237, 1257), 'torch.device', 'torch.device', (['"""cuda"""'], {}), "('cuda')\n", (1249, 1257), False, 'import torch\n'), ((4042, 4078), 'torch.tensor', 'torch.tensor', (["batch_data['question']"], {}), "(batch_data['question'])\n", (4054, 4078), False, 'import torch\n'), ((4116, 4152), 'torch.tensor', 'torch.tensor', (["batch_data['ques len']"], {}), "(batch_data['ques len'])\n", (4128, 4152), False, 'import torch\n'), ((4177, 4213), 'torch.tensor', 'torch.tensor', (["batch_data['equation']"], {}), "(batch_data['equation'])\n", (4189, 4213), False, 'import torch\n'), ((4254, 4293), 'torch.LongTensor', 'torch.LongTensor', (["batch_data['equ len']"], {}), "(batch_data['equ len'])\n", (4270, 4293), False, 'import torch\n'), ((5073, 5109), 'torch.tensor', 'torch.tensor', (["batch_data['question']"], {}), "(batch_data['question'])\n", (5085, 5109), False, 'import torch\n'), ((5147, 5183), 'torch.tensor', 'torch.tensor', (["batch_data['ques len']"], {}), "(batch_data['ques len'])\n", (5159, 5183), False, 'import torch\n'), ((5208, 5244), 'torch.tensor', 'torch.tensor', (["batch_data['equation']"], {}), "(batch_data['equation'])\n", (5220, 5244), False, 'import torch\n'), ((7915, 7926), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['_'], {}), '(_)\n', (7923, 7926), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((13303, 13336), 'torch.BoolTensor', 'torch.BoolTensor', (['(1)', 'input_length'], {}), '(1, input_length)\n', (13319, 13336), False, 'import torch\n'), ((14323, 14334), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['_'], {}), '(_)\n', (14331, 14334), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((21153, 21184), 'torch.square', 'torch.square', (['(outputs - targets)'], {}), '(outputs - targets)\n', (21165, 21184), False, 'import torch\n'), ((16106, 16139), 'torch.cat', 'torch.cat', (['(op, num_score)'], {'dim': '(1)'}), '((op, num_score), dim=1)\n', (16115, 16139), False, 'import torch\n'), ((16375, 16398), 'utils.utils.copy_list', 'copy_list', (['b.node_stack'], {}), '(b.node_stack)\n', (16384, 16398), False, 'from utils.utils import copy_list\n'), ((16492, 16520), 'utils.utils.copy_list', 'copy_list', (['b.embedding_stack'], {}), '(b.embedding_stack)\n', (16501, 16520), False, 'from utils.utils import copy_list\n'), ((16555, 16575), 'copy.deepcopy', 'copy.deepcopy', (['b.out'], {}), '(b.out)\n', (16568, 16575), False, 'import copy\n'), ((10009, 10020), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['r'], {}), '(r)\n', (10017, 10020), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((10060, 10087), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['l'], {'left_flag': '(True)'}), '(l, left_flag=True)\n', (10068, 10087), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((11143, 11184), 'module.Layer.tree_layers.TreeEmbedding', 'TreeEmbedding', (['current_num'], {'terminal': '(True)'}), '(current_num, terminal=True)\n', (11156, 11184), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((16831, 16860), 'torch.LongTensor', 'torch.LongTensor', (['[out_token]'], {}), '([out_token])\n', (16847, 16860), False, 'import torch\n'), ((17241, 17262), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['right_child'], {}), '(right_child)\n', (17249, 17262), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((17317, 17353), 'module.Layer.tree_layers.TreeNode', 'TreeNode', (['left_child'], {'left_flag': '(True)'}), '(left_child, left_flag=True)\n', (17325, 17353), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n'), ((18013, 18045), 'module.Layer.tree_layers.TreeEmbedding', 'TreeEmbedding', (['current_num', '(True)'], {}), '(current_num, True)\n', (18026, 18045), False, 'from module.Layer.tree_layers import NodeGenerater, SubTreeMerger, TreeNode, TreeEmbedding\n')] |
rafacarrascosa/rosetta | rosetta/tests/test_parallel.py | d5a964756b4f51e1032df40ee24f18398e3193b7 | import unittest
from functools import partial
import pandas as pd
from pandas.util.testing import assert_frame_equal, assert_series_equal
import numpy as np
import threading
from StringIO import StringIO
from rosetta.parallel import parallel_easy, pandas_easy
from rosetta.parallel.threading_easy import threading_easy, LockIterateApply
# A couple functions for testing parallel easy
# Must be defined outside of the test class for some reason.
def _abfunc(x, a, b=1):
return x * a * b
abfunc = partial(_abfunc, 2, 3)
def frame_to_series(frame):
x = frame.iloc[0, 0]
return pd.Series([x] * len(frame.columns), index=frame.columns)
def rightmax(mylist):
return [max(mylist[i: i+2]) for i in range(len(mylist))]
def leftmax(mylist):
for i in range(len(mylist)):
if i == 0:
result = [mylist[0]]
else:
result.append(max(mylist[i - 1: i+1]))
return result
class TestBase(unittest.TestCase):
"""
Tests the parallel_easy module.
"""
def setUp(self):
self.numbers = range(5)
self.benchmark = [0, 6, 12, 18, 24]
def test_map_easy_1job(self):
result = parallel_easy.map_easy(abfunc, self.numbers, 1)
self.assertEqual(result, self.benchmark)
def test_map_easy_3job(self):
result = parallel_easy.map_easy(abfunc, self.numbers, 3)
self.assertEqual(result, self.benchmark)
def test_imap_easy_1job(self):
result_iterator = parallel_easy.imap_easy(abfunc, self.numbers, 1, 1)
result = []
for number in result_iterator:
result.append(number)
self.assertEqual(result, self.benchmark)
def test_imap_easy_3job(self):
result_iterator = parallel_easy.imap_easy(abfunc, self.numbers, 3, 1)
result = []
for number in result_iterator:
result.append(number)
self.assertEqual(result, self.benchmark)
def test_n_jobs_wrap_positive(self):
"""
For n_jobs positive, the wrap should return n_jobs.
"""
for n_jobs in range(1, 5):
result = parallel_easy._n_jobs_wrap(n_jobs)
self.assertEqual(result, n_jobs)
def test_n_jobs_wrap_zero(self):
"""
For n_jobs zero, the wrap should raise a ValueError
"""
self.assertRaises(ValueError, parallel_easy._n_jobs_wrap, 0)
class TestMapEasyPaddedBlock(unittest.TestCase):
"""
Tests the parallel_easy.map_easy_padded_blocks function.
"""
def setUp(self):
#self.numbers_1 = [
# 0, 0, 2, -1, 4, 2, 6, 7, 6, 9, 12, 11, 11, 14, 55, 55, 44, 33, 33]
self.numbers_10 = np.random.randint(0, 5, 10)
self.numbers_101 = np.random.randint(0, 5, 101)
self.numbers_51 = np.random.randint(0, 5, 101)
#self.numbers_1 = [0, 1, 2, 0, 3, 2, 4, 3, 2, 3, 3]
self.n_jobs = 1
def lefttest(self, numbers, buffer_len, blocksize):
result = parallel_easy.map_easy_padded_blocks(
leftmax, numbers, self.n_jobs, buffer_len, blocksize=blocksize)
benchmark = leftmax(numbers)
self.assertEqual(result, benchmark)
def righttest(self, numbers, buffer_len, blocksize):
result = parallel_easy.map_easy_padded_blocks(
rightmax, numbers, self.n_jobs, buffer_len, blocksize=blocksize)
benchmark = rightmax(numbers)
self.assertEqual(result, benchmark)
def test_map_easy_padded_blocks_14(self):
buffer_len = 1
blocksize = 4
self.lefttest(self.numbers_10, buffer_len, blocksize)
self.lefttest(self.numbers_101, buffer_len, blocksize)
self.lefttest(self.numbers_51, buffer_len, blocksize)
self.righttest(self.numbers_10, buffer_len, blocksize)
self.righttest(self.numbers_101, buffer_len, blocksize)
self.righttest(self.numbers_51, buffer_len, blocksize)
def test_map_easy_padded_blocks_24(self):
buffer_len = 2
blocksize = 4
self.lefttest(self.numbers_10, buffer_len, blocksize)
self.lefttest(self.numbers_101, buffer_len, blocksize)
self.lefttest(self.numbers_51, buffer_len, blocksize)
self.righttest(self.numbers_10, buffer_len, blocksize)
self.righttest(self.numbers_101, buffer_len, blocksize)
self.righttest(self.numbers_51, buffer_len, blocksize)
def test_map_easy_padded_blocks_37(self):
buffer_len = 3
blocksize = 7
self.lefttest(self.numbers_101, buffer_len, blocksize)
self.lefttest(self.numbers_51, buffer_len, blocksize)
self.righttest(self.numbers_101, buffer_len, blocksize)
self.righttest(self.numbers_51, buffer_len, blocksize)
def test_map_easy_padded_blocks_17(self):
buffer_len = 1
blocksize = 7
self.lefttest(self.numbers_10, buffer_len, blocksize)
self.lefttest(self.numbers_101, buffer_len, blocksize)
self.lefttest(self.numbers_51, buffer_len, blocksize)
self.righttest(self.numbers_10, buffer_len, blocksize)
self.righttest(self.numbers_101, buffer_len, blocksize)
self.righttest(self.numbers_51, buffer_len, blocksize)
class TestPandasEasy(unittest.TestCase):
"""
Tests the pandas_easy module.
"""
def setUp(self):
pass
def test_groupby_to_scalar_to_series_1(self):
df = pd.DataFrame({'a': [6, 2, 2], 'b': [4, 5, 6]})
benchmark = df.groupby('a').apply(max)
result = pandas_easy.groupby_to_scalar_to_series(df, max, 1, by='a')
assert_series_equal(result, benchmark)
def test_groupby_to_scalar_to_series_2(self):
s = pd.Series([1, 2, 3, 4])
labels = ['a', 'a', 'b', 'b']
benchmark = s.groupby(labels).apply(max)
result = pandas_easy.groupby_to_scalar_to_series(
s, max, 1, by=labels)
assert_series_equal(result, benchmark)
def test_groupby_to_series_to_frame_1(self):
df = pd.DataFrame({'a': [6, 2, 2], 'b': [4, 5, 6]})
labels = ['g1', 'g1', 'g2']
benchmark = df.groupby(labels).mean()
result = pandas_easy.groupby_to_series_to_frame(
df, np.mean, 1, use_apply=True, by=labels)
assert_frame_equal(result, benchmark)
def test_groupby_to_series_to_frame_2(self):
df = pd.DataFrame({'a': [6, 2, 2], 'b': [4, 5, 6]})
labels = ['g1', 'g1', 'g2']
benchmark = df.groupby(labels).apply(frame_to_series)
result = pandas_easy.groupby_to_series_to_frame(
df, frame_to_series, 1, use_apply=False, by=labels)
assert_frame_equal(result, benchmark)
class TestLockIterateApply(unittest.TestCase):
"""
Test the Locked Iterator Class
"""
def setUp(self):
self.data = ['my', 'name', 'is', 'daniel']
self.num_threads = 4
def bytwo(x):
return 2 * x
self.func = bytwo
def it():
for i in self.data:
yield i
self.myiter = it()
def test_locked_iterator(self):
threads = []
lock = threading.Lock()
out = StringIO()
for i in range(self.num_threads):
t = LockIterateApply(self.func, self.myiter, lock, ',', out)
threads.append(t)
for t in threads:
t.start()
for t in threads:
t.join()
benchmark = set(['mymy', 'namename', 'danieldaniel', 'isis', ''])
results = set(out.getvalue().split(','))
self.assertEqual(results, benchmark)
def test_threading_easy(self):
out = StringIO()
threading_easy(self.func, self.myiter, self.num_threads, ',', out)
benchmark = set(['mymy', 'namename', 'danieldaniel', 'isis', ''])
results = set(out.getvalue().split(','))
self.assertEqual(results, benchmark)
def test_threading_easy_single(self):
out = StringIO()
threading_easy(self.func, self.myiter, 1, ',', out)
benchmark = set(['mymy', 'namename', 'danieldaniel', 'isis', ''])
results = set(out.getvalue().split(','))
self.assertEqual(results, benchmark)
| [((503, 525), 'functools.partial', 'partial', (['_abfunc', '(2)', '(3)'], {}), '(_abfunc, 2, 3)\n', (510, 525), False, 'from functools import partial\n'), ((1164, 1211), 'rosetta.parallel.parallel_easy.map_easy', 'parallel_easy.map_easy', (['abfunc', 'self.numbers', '(1)'], {}), '(abfunc, self.numbers, 1)\n', (1186, 1211), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((1313, 1360), 'rosetta.parallel.parallel_easy.map_easy', 'parallel_easy.map_easy', (['abfunc', 'self.numbers', '(3)'], {}), '(abfunc, self.numbers, 3)\n', (1335, 1360), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((1472, 1523), 'rosetta.parallel.parallel_easy.imap_easy', 'parallel_easy.imap_easy', (['abfunc', 'self.numbers', '(1)', '(1)'], {}), '(abfunc, self.numbers, 1, 1)\n', (1495, 1523), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((1728, 1779), 'rosetta.parallel.parallel_easy.imap_easy', 'parallel_easy.imap_easy', (['abfunc', 'self.numbers', '(3)', '(1)'], {}), '(abfunc, self.numbers, 3, 1)\n', (1751, 1779), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((2658, 2685), 'numpy.random.randint', 'np.random.randint', (['(0)', '(5)', '(10)'], {}), '(0, 5, 10)\n', (2675, 2685), True, 'import numpy as np\n'), ((2713, 2741), 'numpy.random.randint', 'np.random.randint', (['(0)', '(5)', '(101)'], {}), '(0, 5, 101)\n', (2730, 2741), True, 'import numpy as np\n'), ((2768, 2796), 'numpy.random.randint', 'np.random.randint', (['(0)', '(5)', '(101)'], {}), '(0, 5, 101)\n', (2785, 2796), True, 'import numpy as np\n'), ((2955, 3059), 'rosetta.parallel.parallel_easy.map_easy_padded_blocks', 'parallel_easy.map_easy_padded_blocks', (['leftmax', 'numbers', 'self.n_jobs', 'buffer_len'], {'blocksize': 'blocksize'}), '(leftmax, numbers, self.n_jobs,\n buffer_len, blocksize=blocksize)\n', (2991, 3059), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((3225, 3330), 'rosetta.parallel.parallel_easy.map_easy_padded_blocks', 'parallel_easy.map_easy_padded_blocks', (['rightmax', 'numbers', 'self.n_jobs', 'buffer_len'], {'blocksize': 'blocksize'}), '(rightmax, numbers, self.n_jobs,\n buffer_len, blocksize=blocksize)\n', (3261, 3330), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((5364, 5410), 'pandas.DataFrame', 'pd.DataFrame', (["{'a': [6, 2, 2], 'b': [4, 5, 6]}"], {}), "({'a': [6, 2, 2], 'b': [4, 5, 6]})\n", (5376, 5410), True, 'import pandas as pd\n'), ((5475, 5534), 'rosetta.parallel.pandas_easy.groupby_to_scalar_to_series', 'pandas_easy.groupby_to_scalar_to_series', (['df', 'max', '(1)'], {'by': '"""a"""'}), "(df, max, 1, by='a')\n", (5514, 5534), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((5543, 5581), 'pandas.util.testing.assert_series_equal', 'assert_series_equal', (['result', 'benchmark'], {}), '(result, benchmark)\n', (5562, 5581), False, 'from pandas.util.testing import assert_frame_equal, assert_series_equal\n'), ((5645, 5668), 'pandas.Series', 'pd.Series', (['[1, 2, 3, 4]'], {}), '([1, 2, 3, 4])\n', (5654, 5668), True, 'import pandas as pd\n'), ((5773, 5834), 'rosetta.parallel.pandas_easy.groupby_to_scalar_to_series', 'pandas_easy.groupby_to_scalar_to_series', (['s', 'max', '(1)'], {'by': 'labels'}), '(s, max, 1, by=labels)\n', (5812, 5834), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((5856, 5894), 'pandas.util.testing.assert_series_equal', 'assert_series_equal', (['result', 'benchmark'], {}), '(result, benchmark)\n', (5875, 5894), False, 'from pandas.util.testing import assert_frame_equal, assert_series_equal\n'), ((5958, 6004), 'pandas.DataFrame', 'pd.DataFrame', (["{'a': [6, 2, 2], 'b': [4, 5, 6]}"], {}), "({'a': [6, 2, 2], 'b': [4, 5, 6]})\n", (5970, 6004), True, 'import pandas as pd\n'), ((6104, 6190), 'rosetta.parallel.pandas_easy.groupby_to_series_to_frame', 'pandas_easy.groupby_to_series_to_frame', (['df', 'np.mean', '(1)'], {'use_apply': '(True)', 'by': 'labels'}), '(df, np.mean, 1, use_apply=True, by=\n labels)\n', (6142, 6190), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((6207, 6244), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['result', 'benchmark'], {}), '(result, benchmark)\n', (6225, 6244), False, 'from pandas.util.testing import assert_frame_equal, assert_series_equal\n'), ((6308, 6354), 'pandas.DataFrame', 'pd.DataFrame', (["{'a': [6, 2, 2], 'b': [4, 5, 6]}"], {}), "({'a': [6, 2, 2], 'b': [4, 5, 6]})\n", (6320, 6354), True, 'import pandas as pd\n'), ((6470, 6565), 'rosetta.parallel.pandas_easy.groupby_to_series_to_frame', 'pandas_easy.groupby_to_series_to_frame', (['df', 'frame_to_series', '(1)'], {'use_apply': '(False)', 'by': 'labels'}), '(df, frame_to_series, 1, use_apply=\n False, by=labels)\n', (6508, 6565), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((6582, 6619), 'pandas.util.testing.assert_frame_equal', 'assert_frame_equal', (['result', 'benchmark'], {}), '(result, benchmark)\n', (6600, 6619), False, 'from pandas.util.testing import assert_frame_equal, assert_series_equal\n'), ((7072, 7088), 'threading.Lock', 'threading.Lock', ([], {}), '()\n', (7086, 7088), False, 'import threading\n'), ((7103, 7113), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (7111, 7113), False, 'from StringIO import StringIO\n'), ((7575, 7585), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (7583, 7585), False, 'from StringIO import StringIO\n'), ((7594, 7660), 'rosetta.parallel.threading_easy.threading_easy', 'threading_easy', (['self.func', 'self.myiter', 'self.num_threads', '""","""', 'out'], {}), "(self.func, self.myiter, self.num_threads, ',', out)\n", (7608, 7660), False, 'from rosetta.parallel.threading_easy import threading_easy, LockIterateApply\n'), ((7887, 7897), 'StringIO.StringIO', 'StringIO', ([], {}), '()\n', (7895, 7897), False, 'from StringIO import StringIO\n'), ((7906, 7957), 'rosetta.parallel.threading_easy.threading_easy', 'threading_easy', (['self.func', 'self.myiter', '(1)', '""","""', 'out'], {}), "(self.func, self.myiter, 1, ',', out)\n", (7920, 7957), False, 'from rosetta.parallel.threading_easy import threading_easy, LockIterateApply\n'), ((2104, 2138), 'rosetta.parallel.parallel_easy._n_jobs_wrap', 'parallel_easy._n_jobs_wrap', (['n_jobs'], {}), '(n_jobs)\n', (2130, 2138), False, 'from rosetta.parallel import parallel_easy, pandas_easy\n'), ((7172, 7228), 'rosetta.parallel.threading_easy.LockIterateApply', 'LockIterateApply', (['self.func', 'self.myiter', 'lock', '""","""', 'out'], {}), "(self.func, self.myiter, lock, ',', out)\n", (7188, 7228), False, 'from rosetta.parallel.threading_easy import threading_easy, LockIterateApply\n')] |
sdelcore/video-event-notifier-old | modules/helper/subtitles/subtitles.py | 16bd322f2b81efbb3e08e63ed407ab098d610c88 | import time
import srt
import re
import datetime
from mqtthandler import MQTTHandler
INIT_STATUS={
"video": {
"title": None,
"series_title": None,
"season": None,
"episode": None
},
"time": None,
"events": None
}
class SubtitleHandler:
subtitles = []
phrases = []
def __init__(self, broker):
self.mqtt = MQTTHandler(broker)
def parseSRT(self, srt_filename):
f=open(srt_filename, "r")
subtitle_generate = srt.parse(f.read())
f.close()
self.subtitles = list(subtitle_generate)
return self.subtitles
def parsePhrases(self, phrase_filename):
f=open(phrase_filename, "r")
lines = f.readlines()
for line in lines:
phrase = line.rstrip("\n\r").split("/")
self.phrases.append(phrase)
return self.phrases
def isPhraseInLine(self,phrase, sub, content):
sub_line = re.sub('[^A-Za-z0-9\s]+', '', str(content)).lower()
phrase = re.sub('[^A-Za-z0-9\s]+', '', str(phrase)).lower()
count = 0
while bool(re.search(phrase, sub_line)):
count += 1
sub_line = sub_line.replace(phrase, '', 1)
return count
def getEventTime(self,sub):
middle = sub.end - sub.start
between_sec = datetime.timedelta.total_seconds(middle) / 2
sec = between_sec + datetime.timedelta.total_seconds(sub.start)
return int(sec)
def matchEventToMovie(self, movie, subtitles, phrases, time_offset):
global INIT_STATUS
status = INIT_STATUS
status["video"]["title"] = movie
#TODO determine how to set up phrase data
for sub in subtitles:
c = sub.content.replace('\n', ' ')
c = c.split(" ")
firstpart, secondpart = " ".join(c[:len(c)//2]), " ".join(c[len(c)//2:])
mult = 0
for phrase in phrases:
line = phrase[0]
events = phrase[1]
mult += self.isPhraseInLine(line,sub,sub.content)
#f = self.isPhraseInLine(line,sub, firstpart)
#s = self.isPhraseInLine(line,sub, secondpart)
#if f + s == 0:
# mult += self.isPhraseInLine(line,sub,sub.content )
#else:
# mult += f+s
## DEAR LESS DRUNK SELF
# this currently adds the number of events over the entire subtitle
# what you need to do if you wish to accept it, is to split each subtitle into to two parts
# the first part will the the half that has the first bit of text, which will have the correct time to event for the work
# the second half will have the correct time to event gfor the second half
# you could have three if statements that check and each toher them reach a send.message()
if mult > 0: # wotn work properly if events is greater than 1
status["time"] = self.getEventTime(sub) + time_offset
status["events"] = int(events) * mult
self.sendMessage(status)
#print(sub.content)
def sendMessage(self, msg):
self.mqtt.send(msg)
print(msg)
return msg
def isDone(self):
return True | [((380, 399), 'mqtthandler.MQTTHandler', 'MQTTHandler', (['broker'], {}), '(broker)\n', (391, 399), False, 'from mqtthandler import MQTTHandler\n'), ((1112, 1139), 're.search', 're.search', (['phrase', 'sub_line'], {}), '(phrase, sub_line)\n', (1121, 1139), False, 'import re\n'), ((1333, 1373), 'datetime.timedelta.total_seconds', 'datetime.timedelta.total_seconds', (['middle'], {}), '(middle)\n', (1365, 1373), False, 'import datetime\n'), ((1406, 1449), 'datetime.timedelta.total_seconds', 'datetime.timedelta.total_seconds', (['sub.start'], {}), '(sub.start)\n', (1438, 1449), False, 'import datetime\n')] |
rbago/CEBD1160_Class4_hwk | thecsvparser.py | 1012c81663dc60ea9d139d96f368f8289d4b363e | #!/usr/bin/env python
import os
import numpy as np
import pandas as pd
os.getcwd()
# Request for the filename
# Current version of this script works only with TSV type files
mainFilename = input('Input your file name (diabetes.tab.txt or housing.data.txt): ')
print()
# To create proper dataframe, transforming it with numpy
# Then changing it with pandas
filenameData = np.genfromtxt(mainFilename, dtype='str')
filenameData = pd.DataFrame(filenameData)
# Obtains first row to identify header is string or numeric
headers = filenameData.iloc[0]
try:
pd.to_numeric(headers)
except:
filenameData = pd.DataFrame(filenameData.values[1:], columns=headers)
# Changes strings to numbers (self identifies for float or integer)
filenameData = filenameData.apply(pd.to_numeric)
# Obtains the mean and standard deviation of the columns
listMean = filenameData.mean()
listStd = filenameData.std()
print(filenameData)
# Prints out the results
print('Mean for each column:')
for idx in filenameData.columns:
print(idx,':',listMean[idx])
print()
print('Standard deviation for each column:')
for idx in filenameData.columns:
print(idx,':',listStd[idx])
| [((73, 84), 'os.getcwd', 'os.getcwd', ([], {}), '()\n', (82, 84), False, 'import os\n'), ((375, 415), 'numpy.genfromtxt', 'np.genfromtxt', (['mainFilename'], {'dtype': '"""str"""'}), "(mainFilename, dtype='str')\n", (388, 415), True, 'import numpy as np\n'), ((432, 458), 'pandas.DataFrame', 'pd.DataFrame', (['filenameData'], {}), '(filenameData)\n', (444, 458), True, 'import pandas as pd\n'), ((561, 583), 'pandas.to_numeric', 'pd.to_numeric', (['headers'], {}), '(headers)\n', (574, 583), True, 'import pandas as pd\n'), ((611, 665), 'pandas.DataFrame', 'pd.DataFrame', (['filenameData.values[1:]'], {'columns': 'headers'}), '(filenameData.values[1:], columns=headers)\n', (623, 665), True, 'import pandas as pd\n')] |
wenxichen/donkeycar | donkeycar/tests/test_web_socket.py | d70ee60d35d7e0e004b885e6f6062fb51916dad1 |
from donkeycar.parts.web_controller.web import WebSocketCalibrateAPI
from functools import partial
from tornado import testing
import tornado.websocket
import tornado.web
import tornado.ioloop
import json
from unittest.mock import Mock
from donkeycar.parts.actuator import PWMSteering, PWMThrottle
class WebSocketCalibrateTest(testing.AsyncHTTPTestCase):
"""
Example of WebSocket usage as a client
in AsyncHTTPTestCase-based unit tests.
"""
def get_app(self):
app = tornado.web.Application([('/', WebSocketCalibrateAPI)])
self.app = app
return app
def get_ws_url(self):
return "ws://localhost:" + str(self.get_http_port()) + "/"
@tornado.testing.gen_test
def test_calibrate_servo_esc_1(self):
ws_client = yield tornado.websocket.websocket_connect(self.get_ws_url())
# Now we can run a test on the WebSocket.
self.app.drive_train = dict()
self.app.drive_train['steering'] = Mock()
self.app.drive_train_type = "SERVO_ESC"
data = {"config": {"STEERING_LEFT_PWM": 444}}
yield ws_client.write_message(json.dumps(data))
yield ws_client.close()
assert self.app.drive_train['steering'].left_pulse == 444
assert isinstance(self.app.drive_train['steering'].right_pulse, Mock)
@tornado.testing.gen_test
def test_calibrate_servo_esc_2(self):
ws_client = yield tornado.websocket.websocket_connect(self.get_ws_url())
# Now we can run a test on the WebSocket.
self.app.drive_train = dict()
self.app.drive_train['steering'] = Mock()
self.app.drive_train_type = "SERVO_ESC"
data = {"config": {"STEERING_RIGHT_PWM": 555}}
yield ws_client.write_message(json.dumps(data))
yield ws_client.close()
assert self.app.drive_train['steering'].right_pulse == 555
assert isinstance(self.app.drive_train['steering'].left_pulse, Mock)
@tornado.testing.gen_test
def test_calibrate_servo_esc_3(self):
ws_client = yield tornado.websocket.websocket_connect(self.get_ws_url())
# Now we can run a test on the WebSocket.
self.app.drive_train = dict()
self.app.drive_train['throttle'] = Mock()
self.app.drive_train_type = "SERVO_ESC"
data = {"config": {"THROTTLE_FORWARD_PWM": 666}}
yield ws_client.write_message(json.dumps(data))
yield ws_client.close()
assert self.app.drive_train['throttle'].max_pulse == 666
assert isinstance(self.app.drive_train['throttle'].min_pulse, Mock)
@tornado.testing.gen_test
def test_calibrate_mm1(self):
ws_client = yield tornado.websocket.websocket_connect(self.get_ws_url())
# Now we can run a test on the WebSocket.
self.app.drive_train = Mock()
self.app.drive_train_type = "MM1"
data = {"config": {"MM1_STEERING_MID": 1234}}
yield ws_client.write_message(json.dumps(data))
yield ws_client.close()
assert self.app.drive_train.STEERING_MID == 1234
| [((978, 984), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (982, 984), False, 'from unittest.mock import Mock\n'), ((1607, 1613), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (1611, 1613), False, 'from unittest.mock import Mock\n'), ((2237, 2243), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2241, 2243), False, 'from unittest.mock import Mock\n'), ((2808, 2814), 'unittest.mock.Mock', 'Mock', ([], {}), '()\n', (2812, 2814), False, 'from unittest.mock import Mock\n'), ((1126, 1142), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1136, 1142), False, 'import json\n'), ((1756, 1772), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (1766, 1772), False, 'import json\n'), ((2388, 2404), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2398, 2404), False, 'import json\n'), ((2949, 2965), 'json.dumps', 'json.dumps', (['data'], {}), '(data)\n', (2959, 2965), False, 'import json\n')] |
weese/seqan | misc/trac_plugins/IncludeMacro/includemacro/macros.py | 1acb1688969c7b61497f2328af54b4d11228a484 | # TracIncludeMacro macros
import re
import urllib2
from StringIO import StringIO
from trac.core import *
from trac.wiki.macros import WikiMacroBase
from trac.wiki.formatter import system_message
from trac.wiki.model import WikiPage
from trac.mimeview.api import Mimeview, get_mimetype, Context
from trac.perm import IPermissionRequestor
from genshi.core import escape
from genshi.input import HTMLParser, ParseError
from genshi.filters.html import HTMLSanitizer
__all__ = ['IncludeMacro']
class IncludeMacro(WikiMacroBase):
"""A macro to include other resources in wiki pages.
More documentation to follow.
"""
implements(IPermissionRequestor)
# Default output formats for sources that need them
default_formats = {
'wiki': 'text/x-trac-wiki',
}
# IWikiMacroProvider methods
def expand_macro(self, formatter, name, content):
req = formatter.req # Shortcut.
safe_content = False # Whether or not to disable cleaning HTML.
args = [x.strip() for x in content.split(',')]
if len(args) == 1:
args.append(None)
elif len(args) == 3:
return system_message('args == %s' % args)
if not args[2].startswith('fragment='):
msg = ('If three arguments are given, the last one must'
' start with fragment=, but tag content was %s')
return system_message(msg % content)
elif len(args) != 2:
return system_message('Invalid arguments "%s"'%content)
# Parse out fragment name.
fragment_name = None
if args[-1] and args[-1].startswith('fragment='):
fragment_name = args[-1][len('fragment='):]
args.pop()
if len(args) == 1:
args.append(None)
# Pull out the arguments
source, dest_format = args
try:
source_format, source_obj = source.split(':', 1)
except ValueError: # If no : is present, assume its a wiki page
source_format, source_obj = 'wiki', source
# Apply a default format if needed
if dest_format is None:
try:
dest_format = self.default_formats[source_format]
except KeyError:
pass
if source_format in ('http', 'https', 'ftp'):
# Since I can't really do recursion checking, and because this
# could be a source of abuse allow selectively blocking it.
# RFE: Allow blacklist/whitelist patterns for URLS. <NPK>
# RFE: Track page edits and prevent unauthorized users from ever entering a URL include. <NPK>
if not req.perm.has_permission('INCLUDE_URL'):
self.log.info('IncludeMacro: Blocking attempt by %s to include URL %s on page %s', req.authname, source, req.path_info)
return ''
try:
urlf = urllib2.urlopen(source)
out = urlf.read()
except urllib2.URLError, e:
return system_message('Error while retrieving file', str(e))
except TracError, e:
return system_message('Error while previewing', str(e))
ctxt = Context.from_request(req)
elif source_format == 'wiki':
# XXX: Check for recursion in page includes. <NPK>
if not req.perm.has_permission('WIKI_VIEW'):
return ''
page = WikiPage(self.env, source_obj)
if not page.exists:
return system_message('Wiki page %s does not exist'%source_obj)
out = page.text
ctxt = Context.from_request(req, 'wiki', source_obj)
elif source_format == 'source':
if not req.perm.has_permission('FILE_VIEW'):
return ''
repo = self.env.get_repository(authname=req.authname)
node = repo.get_node(source_obj)
out = node.get_content().read()
if dest_format is None:
dest_format = node.content_type or get_mimetype(source_obj, out)
ctxt = Context.from_request(req, 'source', source_obj)
# RFE: Add ticket: and comment: sources. <NPK>
# RFE: Add attachment: source. <NPK>
else:
return system_message('Unsupported include source %s'%source)
# If there was a fragment name given then find the fragment.
fragment = []
current_fragment_name = None
if fragment_name:
for line in out.splitlines():
res = re.search(r'FRAGMENT\(([^)]*)\)', line)
if res:
current_fragment_name = res.groups()[0]
else:
if current_fragment_name == fragment_name:
fragment.append(line)
out = '\n'.join(fragment)
# If we have a preview format, use it
if dest_format:
# We can trust the output and do not need to call the HTML sanitizer
# below. The HTML sanitization leads to whitespace being stripped.
safe_content = True
out = Mimeview(self.env).render(ctxt, dest_format, out, force_source=True)
# Escape if needed
if not safe_content and not self.config.getbool('wiki', 'render_unsafe_content', False):
try:
out = HTMLParser(StringIO(out)).parse() | HTMLSanitizer()
except ParseError:
out = escape(out)
return out
# IPermissionRequestor methods
def get_permission_actions(self):
yield 'INCLUDE_URL'
| [] |
rjcuevas/Email-Frontend-AngularJS- | packages/google/cloud/logging/client.py | 753dbd190582ed953058c9e15c2be920716c7985 | # Copyright 2016 Google Inc.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Client for interacting with the Google Stackdriver Logging API."""
import os
try:
from google.cloud.gapic.logging.v2.config_service_v2_api import (
ConfigServiceV2Api as GeneratedSinksAPI)
from google.cloud.gapic.logging.v2.logging_service_v2_api import (
LoggingServiceV2Api as GeneratedLoggingAPI)
from google.cloud.gapic.logging.v2.metrics_service_v2_api import (
MetricsServiceV2Api as GeneratedMetricsAPI)
from google.cloud.logging._gax import _LoggingAPI as GAXLoggingAPI
from google.cloud.logging._gax import _MetricsAPI as GAXMetricsAPI
from google.cloud.logging._gax import _SinksAPI as GAXSinksAPI
except ImportError: # pragma: NO COVER
_HAVE_GAX = False
GeneratedLoggingAPI = GAXLoggingAPI = None
GeneratedMetricsAPI = GAXMetricsAPI = None
GeneratedSinksAPI = GAXSinksAPI = None
else:
_HAVE_GAX = True
from google.cloud.client import JSONClient
from google.cloud.environment_vars import DISABLE_GRPC
from google.cloud.logging.connection import Connection
from google.cloud.logging.connection import _LoggingAPI as JSONLoggingAPI
from google.cloud.logging.connection import _MetricsAPI as JSONMetricsAPI
from google.cloud.logging.connection import _SinksAPI as JSONSinksAPI
from google.cloud.logging.entries import ProtobufEntry
from google.cloud.logging.entries import StructEntry
from google.cloud.logging.entries import TextEntry
from google.cloud.logging.logger import Logger
from google.cloud.logging.metric import Metric
from google.cloud.logging.sink import Sink
_DISABLE_GAX = os.getenv(DISABLE_GRPC, False)
_USE_GAX = _HAVE_GAX and not _DISABLE_GAX
class Client(JSONClient):
"""Client to bundle configuration needed for API requests.
:type project: str
:param project: the project which the client acts on behalf of.
If not passed, falls back to the default inferred
from the environment.
:type credentials: :class:`oauth2client.client.OAuth2Credentials` or
:class:`NoneType`
:param credentials: The OAuth2 Credentials to use for the connection
owned by this client. If not passed (and if no ``http``
object is passed), falls back to the default inferred
from the environment.
:type http: :class:`httplib2.Http` or class that defines ``request()``.
:param http: An optional HTTP object to make requests. If not passed, an
``http`` object is created that is bound to the
``credentials`` for the current object.
"""
_connection_class = Connection
_logging_api = _sinks_api = _metrics_api = None
@property
def logging_api(self):
"""Helper for logging-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.logs
"""
if self._logging_api is None:
if _USE_GAX:
generated = GeneratedLoggingAPI()
self._logging_api = GAXLoggingAPI(generated)
else:
self._logging_api = JSONLoggingAPI(self.connection)
return self._logging_api
@property
def sinks_api(self):
"""Helper for log sink-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks
"""
if self._sinks_api is None:
if _USE_GAX:
generated = GeneratedSinksAPI()
self._sinks_api = GAXSinksAPI(generated)
else:
self._sinks_api = JSONSinksAPI(self.connection)
return self._sinks_api
@property
def metrics_api(self):
"""Helper for log metric-related API calls.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics
"""
if self._metrics_api is None:
if _USE_GAX:
generated = GeneratedMetricsAPI()
self._metrics_api = GAXMetricsAPI(generated)
else:
self._metrics_api = JSONMetricsAPI(self.connection)
return self._metrics_api
def logger(self, name):
"""Creates a logger bound to the current client.
:type name: str
:param name: the name of the logger to be constructed.
:rtype: :class:`google.cloud.logging.logger.Logger`
:returns: Logger created with the current client.
"""
return Logger(name, client=self)
def _entry_from_resource(self, resource, loggers):
"""Detect correct entry type from resource and instantiate.
:type resource: dict
:param resource: one entry resource from API response
:type loggers: dict or None
:param loggers: A mapping of logger fullnames -> loggers. If not
passed, the entry will have a newly-created logger.
:rtype: One of:
:class:`google.cloud.logging.entries.TextEntry`,
:class:`google.cloud.logging.entries.StructEntry`,
:class:`google.cloud.logging.entries.ProtobufEntry`
:returns: the entry instance, constructed via the resource
"""
if 'textPayload' in resource:
return TextEntry.from_api_repr(resource, self, loggers)
elif 'jsonPayload' in resource:
return StructEntry.from_api_repr(resource, self, loggers)
elif 'protoPayload' in resource:
return ProtobufEntry.from_api_repr(resource, self, loggers)
raise ValueError('Cannot parse log entry resource')
def list_entries(self, projects=None, filter_=None, order_by=None,
page_size=None, page_token=None):
"""Return a page of log entries.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/entries/list
:type projects: list of strings
:param projects: project IDs to include. If not passed,
defaults to the project bound to the client.
:type filter_: str
:param filter_: a filter expression. See:
https://cloud.google.com/logging/docs/view/advanced_filters
:type order_by: str
:param order_by: One of :data:`~google.cloud.logging.ASCENDING`
or :data:`~google.cloud.logging.DESCENDING`.
:type page_size: int
:param page_size: maximum number of entries to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of entries. If not
passed, the API will return the first page of
entries.
:rtype: tuple, (list, str)
:returns: list of :class:`google.cloud.logging.entry.TextEntry`, plus a
"next page token" string: if not None, indicates that
more entries can be retrieved with another call (pass that
value as ``page_token``).
"""
if projects is None:
projects = [self.project]
resources, token = self.logging_api.list_entries(
projects=projects, filter_=filter_, order_by=order_by,
page_size=page_size, page_token=page_token)
loggers = {}
entries = [self._entry_from_resource(resource, loggers)
for resource in resources]
return entries, token
def sink(self, name, filter_=None, destination=None):
"""Creates a sink bound to the current client.
:type name: str
:param name: the name of the sink to be constructed.
:type filter_: str
:param filter_: (optional) the advanced logs filter expression
defining the entries exported by the sink. If not
passed, the instance should already exist, to be
refreshed via :meth:`Sink.reload`.
:type destination: str
:param destination: destination URI for the entries exported by
the sink. If not passed, the instance should
already exist, to be refreshed via
:meth:`Sink.reload`.
:rtype: :class:`google.cloud.logging.sink.Sink`
:returns: Sink created with the current client.
"""
return Sink(name, filter_, destination, client=self)
def list_sinks(self, page_size=None, page_token=None):
"""List sinks for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.sinks/list
:type page_size: int
:param page_size: maximum number of sinks to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of sinks. If not
passed, the API will return the first page of
sinks.
:rtype: tuple, (list, str)
:returns: list of :class:`google.cloud.logging.sink.Sink`, plus a
"next page token" string: if not None, indicates that
more sinks can be retrieved with another call (pass that
value as ``page_token``).
"""
resources, token = self.sinks_api.list_sinks(
self.project, page_size, page_token)
sinks = [Sink.from_api_repr(resource, self)
for resource in resources]
return sinks, token
def metric(self, name, filter_=None, description=''):
"""Creates a metric bound to the current client.
:type name: str
:param name: the name of the metric to be constructed.
:type filter_: str
:param filter_: the advanced logs filter expression defining the
entries tracked by the metric. If not
passed, the instance should already exist, to be
refreshed via :meth:`Metric.reload`.
:type description: str
:param description: the description of the metric to be constructed.
If not passed, the instance should already exist,
to be refreshed via :meth:`Metric.reload`.
:rtype: :class:`google.cloud.logging.metric.Metric`
:returns: Metric created with the current client.
"""
return Metric(name, filter_, client=self, description=description)
def list_metrics(self, page_size=None, page_token=None):
"""List metrics for the project associated with this client.
See:
https://cloud.google.com/logging/docs/api/ref_v2beta1/rest/v2beta1/projects.metrics/list
:type page_size: int
:param page_size: maximum number of metrics to return, If not passed,
defaults to a value set by the API.
:type page_token: str
:param page_token: opaque marker for the next "page" of metrics. If not
passed, the API will return the first page of
metrics.
:rtype: tuple, (list, str)
:returns: list of :class:`google.cloud.logging.metric.Metric`, plus a
"next page token" string: if not None, indicates that
more metrics can be retrieved with another call (pass that
value as ``page_token``).
"""
resources, token = self.metrics_api.list_metrics(
self.project, page_size, page_token)
metrics = [Metric.from_api_repr(resource, self)
for resource in resources]
return metrics, token
| [((2148, 2178), 'os.getenv', 'os.getenv', (['DISABLE_GRPC', '(False)'], {}), '(DISABLE_GRPC, False)\n', (2157, 2178), False, 'import os\n'), ((5172, 5197), 'google.cloud.logging.logger.Logger', 'Logger', (['name'], {'client': 'self'}), '(name, client=self)\n', (5178, 5197), False, 'from google.cloud.logging.logger import Logger\n'), ((9133, 9178), 'google.cloud.logging.sink.Sink', 'Sink', (['name', 'filter_', 'destination'], {'client': 'self'}), '(name, filter_, destination, client=self)\n', (9137, 9178), False, 'from google.cloud.logging.sink import Sink\n'), ((11246, 11305), 'google.cloud.logging.metric.Metric', 'Metric', (['name', 'filter_'], {'client': 'self', 'description': 'description'}), '(name, filter_, client=self, description=description)\n', (11252, 11305), False, 'from google.cloud.logging.metric import Metric\n'), ((5962, 6010), 'google.cloud.logging.entries.TextEntry.from_api_repr', 'TextEntry.from_api_repr', (['resource', 'self', 'loggers'], {}), '(resource, self, loggers)\n', (5985, 6010), False, 'from google.cloud.logging.entries import TextEntry\n'), ((10233, 10267), 'google.cloud.logging.sink.Sink.from_api_repr', 'Sink.from_api_repr', (['resource', 'self'], {}), '(resource, self)\n', (10251, 10267), False, 'from google.cloud.logging.sink import Sink\n'), ((12384, 12420), 'google.cloud.logging.metric.Metric.from_api_repr', 'Metric.from_api_repr', (['resource', 'self'], {}), '(resource, self)\n', (12404, 12420), False, 'from google.cloud.logging.metric import Metric\n'), ((3660, 3681), 'google.cloud.gapic.logging.v2.logging_service_v2_api.LoggingServiceV2Api', 'GeneratedLoggingAPI', ([], {}), '()\n', (3679, 3681), True, 'from google.cloud.gapic.logging.v2.logging_service_v2_api import LoggingServiceV2Api as GeneratedLoggingAPI\n'), ((3718, 3742), 'google.cloud.logging._gax._LoggingAPI', 'GAXLoggingAPI', (['generated'], {}), '(generated)\n', (3731, 3742), True, 'from google.cloud.logging._gax import _LoggingAPI as GAXLoggingAPI\n'), ((3797, 3828), 'google.cloud.logging.connection._LoggingAPI', 'JSONLoggingAPI', (['self.connection'], {}), '(self.connection)\n', (3811, 3828), True, 'from google.cloud.logging.connection import _LoggingAPI as JSONLoggingAPI\n'), ((4157, 4176), 'google.cloud.gapic.logging.v2.config_service_v2_api.ConfigServiceV2Api', 'GeneratedSinksAPI', ([], {}), '()\n', (4174, 4176), True, 'from google.cloud.gapic.logging.v2.config_service_v2_api import ConfigServiceV2Api as GeneratedSinksAPI\n'), ((4211, 4233), 'google.cloud.logging._gax._SinksAPI', 'GAXSinksAPI', (['generated'], {}), '(generated)\n', (4222, 4233), True, 'from google.cloud.logging._gax import _SinksAPI as GAXSinksAPI\n'), ((4286, 4315), 'google.cloud.logging.connection._SinksAPI', 'JSONSinksAPI', (['self.connection'], {}), '(self.connection)\n', (4298, 4315), True, 'from google.cloud.logging.connection import _SinksAPI as JSONSinksAPI\n'), ((4650, 4671), 'google.cloud.gapic.logging.v2.metrics_service_v2_api.MetricsServiceV2Api', 'GeneratedMetricsAPI', ([], {}), '()\n', (4669, 4671), True, 'from google.cloud.gapic.logging.v2.metrics_service_v2_api import MetricsServiceV2Api as GeneratedMetricsAPI\n'), ((4708, 4732), 'google.cloud.logging._gax._MetricsAPI', 'GAXMetricsAPI', (['generated'], {}), '(generated)\n', (4721, 4732), True, 'from google.cloud.logging._gax import _MetricsAPI as GAXMetricsAPI\n'), ((4787, 4818), 'google.cloud.logging.connection._MetricsAPI', 'JSONMetricsAPI', (['self.connection'], {}), '(self.connection)\n', (4801, 4818), True, 'from google.cloud.logging.connection import _MetricsAPI as JSONMetricsAPI\n'), ((6070, 6120), 'google.cloud.logging.entries.StructEntry.from_api_repr', 'StructEntry.from_api_repr', (['resource', 'self', 'loggers'], {}), '(resource, self, loggers)\n', (6095, 6120), False, 'from google.cloud.logging.entries import StructEntry\n'), ((6181, 6233), 'google.cloud.logging.entries.ProtobufEntry.from_api_repr', 'ProtobufEntry.from_api_repr', (['resource', 'self', 'loggers'], {}), '(resource, self, loggers)\n', (6208, 6233), False, 'from google.cloud.logging.entries import ProtobufEntry\n')] |
wwwidonja/changed_plotly | tests/test_core/test_graph_objs/test_instantiate_hierarchy.py | 1bda35a438539a97c84a3ab3952e95e8848467bd | from __future__ import absolute_import
from unittest import TestCase
import os
import importlib
import inspect
from plotly.basedatatypes import BasePlotlyType, BaseFigure
datatypes_root = "new_plotly/graph_objs"
datatype_modules = [
dirpath.replace("/", ".")
for dirpath, _, _ in os.walk(datatypes_root)
if not dirpath.endswith("__pycache__")
]
class HierarchyTest(TestCase):
def test_construct_datatypes(self):
for datatypes_module in datatype_modules:
module = importlib.import_module(datatypes_module)
for name in getattr(module, "__all__", []):
if name.startswith("_") or name[0].islower() or name == "FigureWidget":
continue
obj = getattr(module, name)
try:
v = obj()
except Exception:
print(
"Failed to construct {obj} in module {module}".format(
obj=obj, module=datatypes_module
)
)
raise
if obj.__module__ == "new_plotly.graph_objs._deprecations":
self.assertTrue(isinstance(v, list) or isinstance(v, dict))
obj()
elif name in ("Figure", "FigureWidget"):
self.assertIsInstance(v, BaseFigure)
else:
self.assertIsInstance(v, BasePlotlyType)
| [((290, 313), 'os.walk', 'os.walk', (['datatypes_root'], {}), '(datatypes_root)\n', (297, 313), False, 'import os\n'), ((503, 544), 'importlib.import_module', 'importlib.import_module', (['datatypes_module'], {}), '(datatypes_module)\n', (526, 544), False, 'import importlib\n')] |
lyrl/mycli | mycli/packages/special/main.py | d62eefdc819a11ecdb97d93dd7ad1922d28a3795 | import logging
from collections import namedtuple
from . import export
log = logging.getLogger(__name__)
NO_QUERY = 0
PARSED_QUERY = 1
RAW_QUERY = 2
SpecialCommand = namedtuple('SpecialCommand',
['handler', 'command', 'shortcut', 'description', 'arg_type', 'hidden',
'case_sensitive'])
COMMANDS = {}
@export
class CommandNotFound(Exception):
pass
@export
def parse_special_command(sql):
command, _, arg = sql.partition(' ')
verbose = '+' in command
command = command.strip().replace('+', '')
return (command, verbose, arg.strip())
@export
def special_command(command, shortcut, description, arg_type=PARSED_QUERY,
hidden=False, case_sensitive=False, aliases=()):
def wrapper(wrapped):
register_special_command(wrapped, command, shortcut, description,
arg_type, hidden, case_sensitive, aliases)
return wrapped
return wrapper
@export
def register_special_command(handler, command, shortcut, description,
arg_type=PARSED_QUERY, hidden=False, case_sensitive=False, aliases=()):
cmd = command.lower() if not case_sensitive else command
COMMANDS[cmd] = SpecialCommand(handler, command, shortcut, description,
arg_type, hidden, case_sensitive)
for alias in aliases:
cmd = alias.lower() if not case_sensitive else alias
COMMANDS[cmd] = SpecialCommand(handler, command, shortcut, description,
arg_type, case_sensitive=case_sensitive,
hidden=True)
@export
def execute(cur, sql):
"""Execute a special command and return the results. If the special command
is not supported a KeyError will be raised.
"""
command, verbose, arg = parse_special_command(sql)
if (command not in COMMANDS) and (command.lower() not in COMMANDS):
raise CommandNotFound
try:
special_cmd = COMMANDS[command]
except KeyError:
special_cmd = COMMANDS[command.lower()]
if special_cmd.case_sensitive:
raise CommandNotFound('Command not found: %s' % command)
# "help <SQL KEYWORD> is a special case. We want built-in help, not
# mycli help here.
if command == 'help' and arg:
return show_keyword_help(cur=cur, arg=arg)
if special_cmd.arg_type == NO_QUERY:
return special_cmd.handler()
elif special_cmd.arg_type == PARSED_QUERY:
return special_cmd.handler(cur=cur, arg=arg, verbose=verbose)
elif special_cmd.arg_type == RAW_QUERY:
return special_cmd.handler(cur=cur, query=sql)
@special_command('help', '\\?', 'Show this help.', arg_type=NO_QUERY, aliases=('\\?', '?'))
def show_help(): # All the parameters are ignored.
headers = ['Command', 'Shortcut', 'Description']
result = []
for _, value in sorted(COMMANDS.items()):
if not value.hidden:
result.append((value.command, value.shortcut, value.description))
return [(None, result, headers, None)]
def show_keyword_help(cur, arg):
"""
Call the built-in "show <command>", to display help for an SQL keyword.
:param cur: cursor
:param arg: string
:return: list
"""
keyword = arg.strip('"').strip("'")
query = "help '{0}'".format(keyword)
log.debug(query)
cur.execute(query)
if cur.description and cur.rowcount > 0:
headers = [x[0] for x in cur.description]
return [(None, cur, headers, '')]
else:
return [(None, None, None, 'No help found for {0}.'.format(keyword))]
@special_command('exit', '\\q', 'Exit.', arg_type=NO_QUERY, aliases=('\\q', ))
@special_command('quit', '\\q', 'Quit.', arg_type=NO_QUERY)
def quit(*_args):
raise EOFError
@special_command('\\e', '\\e', 'Edit command with editor (uses $EDITOR).',
arg_type=NO_QUERY, case_sensitive=True)
@special_command('\\clip', '\\clip', 'Copy query to the system clipboard.',
arg_type=NO_QUERY, case_sensitive=True)
@special_command('\\G', '\\G', 'Display current query results vertically.',
arg_type=NO_QUERY, case_sensitive=True)
def stub():
raise NotImplementedError
| [((79, 106), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (96, 106), False, 'import logging\n'), ((170, 293), 'collections.namedtuple', 'namedtuple', (['"""SpecialCommand"""', "['handler', 'command', 'shortcut', 'description', 'arg_type', 'hidden',\n 'case_sensitive']"], {}), "('SpecialCommand', ['handler', 'command', 'shortcut',\n 'description', 'arg_type', 'hidden', 'case_sensitive'])\n", (180, 293), False, 'from collections import namedtuple\n')] |
ShreyasTheOne/Super-Duper-Fuzzer | core/sample_fuzzer/data_generators/base.py | b667e2dca3e49a370634ad4b0bd826aca06136b7 | from abc import ABC, abstractmethod
class BaseDataGenerator(ABC):
def __init__(self):
pass
@staticmethod
@abstractmethod
def generate(cls):
pass
| [] |
TungTNg/itc110_python | Mon_08_06/convert2.py | 589ca1398f26d39b05a0b798100df0b05e556e3c | # convert2.py
# A program to convert Celsius temps to Fahrenheit.
# This version issues heat and cold warnings.
def main():
celsius = float(input("What is the Celsius temperature? "))
fahrenheit = 9 / 5 * celsius + 32
print("The temperature is", fahrenheit, "degrees fahrenheit.")
if fahrenheit >= 90:
print("It's really hot out there, be careful!")
if fahrenheit <= 30:
print("Brrrrr. Be sure to dress warmly")
main() | [] |
basicpail/core | homeassistant/components/wolflink/__init__.py | 5cc54618c5af3f75c08314bf2375cc7ac40d2b7e | """The Wolf SmartSet Service integration."""
from datetime import timedelta
import logging
from httpx import ConnectError, ConnectTimeout
from wolf_smartset.token_auth import InvalidAuth
from wolf_smartset.wolf_client import FetchFailed, ParameterReadError, WolfClient
from homeassistant.config_entries import ConfigEntry
from homeassistant.const import CONF_PASSWORD, CONF_USERNAME
from homeassistant.core import HomeAssistant
from homeassistant.exceptions import ConfigEntryNotReady
from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed
from .const import (
COORDINATOR,
DEVICE_GATEWAY,
DEVICE_ID,
DEVICE_NAME,
DOMAIN,
PARAMETERS,
)
_LOGGER = logging.getLogger(__name__)
PLATFORMS = ["sensor"]
async def async_setup_entry(hass: HomeAssistant, entry: ConfigEntry) -> bool:
"""Set up Wolf SmartSet Service from a config entry."""
username = entry.data[CONF_USERNAME]
password = entry.data[CONF_PASSWORD]
device_name = entry.data[DEVICE_NAME]
device_id = entry.data[DEVICE_ID]
gateway_id = entry.data[DEVICE_GATEWAY]
refetch_parameters = False
_LOGGER.debug(
"Setting up wolflink integration for device: %s (ID: %s, gateway: %s)",
device_name,
device_id,
gateway_id,
)
wolf_client = WolfClient(username, password)
parameters = await fetch_parameters_init(wolf_client, gateway_id, device_id)
async def async_update_data():
"""Update all stored entities for Wolf SmartSet."""
try:
nonlocal refetch_parameters
nonlocal parameters
await wolf_client.update_session()
if not wolf_client.fetch_system_state_list(device_id, gateway_id):
refetch_parameters = True
raise UpdateFailed(
"Could not fetch values from server because device is Offline."
)
if refetch_parameters:
parameters = await fetch_parameters(wolf_client, gateway_id, device_id)
hass.data[DOMAIN][entry.entry_id][PARAMETERS] = parameters
refetch_parameters = False
values = {
v.value_id: v.value
for v in await wolf_client.fetch_value(
gateway_id, device_id, parameters
)
}
return {
parameter.parameter_id: (
parameter.value_id,
values[parameter.value_id],
)
for parameter in parameters
if parameter.value_id in values
}
except ConnectError as exception:
raise UpdateFailed(
f"Error communicating with API: {exception}"
) from exception
except FetchFailed as exception:
raise UpdateFailed(
f"Could not fetch values from server due to: {exception}"
) from exception
except ParameterReadError as exception:
refetch_parameters = True
raise UpdateFailed(
"Could not fetch values for parameter. Refreshing value IDs."
) from exception
except InvalidAuth as exception:
raise UpdateFailed("Invalid authentication during update.") from exception
coordinator = DataUpdateCoordinator(
hass,
_LOGGER,
name=DOMAIN,
update_method=async_update_data,
update_interval=timedelta(minutes=1),
)
await coordinator.async_refresh()
hass.data.setdefault(DOMAIN, {})
hass.data[DOMAIN][entry.entry_id] = {}
hass.data[DOMAIN][entry.entry_id][PARAMETERS] = parameters
hass.data[DOMAIN][entry.entry_id][COORDINATOR] = coordinator
hass.data[DOMAIN][entry.entry_id][DEVICE_ID] = device_id
hass.config_entries.async_setup_platforms(entry, PLATFORMS)
return True
async def async_unload_entry(hass: HomeAssistant, entry: ConfigEntry):
"""Unload a config entry."""
unload_ok = await hass.config_entries.async_unload_platforms(entry, PLATFORMS)
if unload_ok:
hass.data[DOMAIN].pop(entry.entry_id)
return unload_ok
async def fetch_parameters(client: WolfClient, gateway_id: int, device_id: int):
"""
Fetch all available parameters with usage of WolfClient.
By default Reglertyp entity is removed because API will not provide value for this parameter.
"""
fetched_parameters = await client.fetch_parameters(gateway_id, device_id)
return [param for param in fetched_parameters if param.name != "Reglertyp"]
async def fetch_parameters_init(client: WolfClient, gateway_id: int, device_id: int):
"""Fetch all available parameters with usage of WolfClient but handles all exceptions and results in ConfigEntryNotReady."""
try:
return await fetch_parameters(client, gateway_id, device_id)
except (ConnectError, ConnectTimeout, FetchFailed) as exception:
raise ConfigEntryNotReady(
f"Error communicating with API: {exception}"
) from exception
| [((708, 735), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (725, 735), False, 'import logging\n'), ((1321, 1351), 'wolf_smartset.wolf_client.WolfClient', 'WolfClient', (['username', 'password'], {}), '(username, password)\n', (1331, 1351), False, 'from wolf_smartset.wolf_client import FetchFailed, ParameterReadError, WolfClient\n'), ((3490, 3510), 'datetime.timedelta', 'timedelta', ([], {'minutes': '(1)'}), '(minutes=1)\n', (3499, 3510), False, 'from datetime import timedelta\n'), ((4979, 5044), 'homeassistant.exceptions.ConfigEntryNotReady', 'ConfigEntryNotReady', (['f"""Error communicating with API: {exception}"""'], {}), "(f'Error communicating with API: {exception}')\n", (4998, 5044), False, 'from homeassistant.exceptions import ConfigEntryNotReady\n'), ((1805, 1882), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['"""Could not fetch values from server because device is Offline."""'], {}), "('Could not fetch values from server because device is Offline.')\n", (1817, 1882), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n'), ((2698, 2756), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['f"""Error communicating with API: {exception}"""'], {}), "(f'Error communicating with API: {exception}')\n", (2710, 2756), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n'), ((2861, 2932), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['f"""Could not fetch values from server due to: {exception}"""'], {}), "(f'Could not fetch values from server due to: {exception}')\n", (2873, 2932), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n'), ((3082, 3157), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['"""Could not fetch values for parameter. Refreshing value IDs."""'], {}), "('Could not fetch values for parameter. Refreshing value IDs.')\n", (3094, 3157), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n'), ((3262, 3315), 'homeassistant.helpers.update_coordinator.UpdateFailed', 'UpdateFailed', (['"""Invalid authentication during update."""'], {}), "('Invalid authentication during update.')\n", (3274, 3315), False, 'from homeassistant.helpers.update_coordinator import DataUpdateCoordinator, UpdateFailed\n')] |
chunribu/python-algorithms | src/levenshtein_distance.py | 0483df09b5b4f93bd96712d78e3ad34bcb7e57cc | class LevenshteinDistance:
def solve(self, str_a, str_b):
a, b = str_a, str_b
dist = {(x,y):0 for x in range(len(a)) for y in range(len(b))}
for x in range(len(a)): dist[(x,-1)] = x+1
for y in range(len(b)): dist[(-1,y)] = y+1
dist[(-1,-1)] = 0
for i in range(len(a)):
for j in range(len(b)):
need_edit = a[i]!=b[j]
last_edits = min(dist[(i,j-1)], dist[(i-1,j)], dist[(i-1,j-1)])
dist[(i,j)] = last_edits + int(need_edit)
self.distance = dist
return dist[(i,j)]
def show(self):
if hasattr(self, 'distance'):
dist = self.distance
for x in range(-1,len(a)):
row = []
for y in range(-1, len(b)):
row.append(dist[(x,y)])
print(row)
# test
ld = LevenshteinDistance()
ld.solve('kitten','sitting')
ld.show() | [] |
ConnectedSystems/pyapprox | pyapprox/benchmarks/test_spectral_diffusion.py | 4f405654c707cba83d211f327c0f0fdbc95efa29 | import numpy as np
import unittest
from pyapprox.benchmarks.spectral_diffusion import (
kronecker_product_2d, chebyshev_derivative_matrix,
SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D
)
from pyapprox.univariate_polynomials.quadrature import gauss_jacobi_pts_wts_1D
import pyapprox as pya
class TestSpectralDiffusion2D(unittest.TestCase):
def setUp(self):
np.random.seed(1)
self.eps = 2 * np.finfo(np.float).eps
def test_derivative_matrix(self):
order = 4
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0., 0.0]
xlim = [-1, 1]
model.initialize(order, bndry_cond, xlim)
derivative_matrix = model.get_derivative_matrix()
true_matrix = \
[[5.5, -6.82842712, 2., -1.17157288, 0.5],
[1.70710678, -0.70710678, -1.41421356, 0.70710678, -0.29289322],
[-0.5, 1.41421356, -0., -1.41421356, 0.5],
[0.29289322, -0.70710678, 1.41421356, 0.70710678, -1.70710678],
[-0.5, 1.17157288, -2., 6.82842712, -5.5]]
# I return points and calculate derivatives using reverse order of
# points compared to what is used by Matlab cheb function thus the
# derivative matrix I return will be the negative of the matlab version
assert np.allclose(-derivative_matrix, true_matrix)
def test_homogeneous_possion_equation(self):
"""
solve u(x)'' = 0, u(0) = 0, u(1) = 0.5
"""
order = 4
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.5]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
mesh_pts = model.get_collocation_points()
diff_vals = 0*mesh_pts.squeeze()+1
forcing_vals = 0*mesh_pts.squeeze()
solution = model.solve(diff_vals, forcing_vals)
def exact_sol(x): return 0.5*x
assert np.linalg.norm(exact_sol(mesh_pts.squeeze())-solution) < 20*self.eps
def test_inhomogeneous_possion_equation(self):
"""
solve u(x)'' = -1, u(0) = 0, u(1) = 1
solution u(x) = -0.5*(x-3.)*x
"""
order = 4
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 1.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
mesh_pts = model.get_collocation_points()
diff_vals = 0*mesh_pts.squeeze()+1
forcing_vals = 0*mesh_pts.squeeze()-1
solution = model.solve(diff_vals, forcing_vals)
def exact_sol(x): return -0.5*(x-3.)*x
assert np.linalg.norm(
exact_sol(mesh_pts.squeeze())-solution) < 30*self.eps
def test_inhomogeneous_diffusion_equation_with_variable_coefficient(self):
"""
solve ((1+x)*u(x)')' = -1, u(0) = 0, u(1) = 0
solution u(x) = log(x+1)/log(2) - x
"""
order = 20
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
mesh_pts = model.get_collocation_points()
def diffusivity_function(x): return x + 1
diff_vals = diffusivity_function(mesh_pts.squeeze())
forcing_vals = 0*mesh_pts.squeeze()-1
solution = model.solve(diff_vals, forcing_vals)
def exact_sol(x): return np.log(x+1.) / np.log(2.) - x
assert np.linalg.norm(exact_sol(mesh_pts.squeeze())-solution) < 3e-13
def test_integrate_1d(self):
order = 4
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
mesh_pts = model.get_collocation_points()
assert np.allclose(model.integrate(mesh_pts.T**2), 1./3.)
assert np.allclose(model.integrate(mesh_pts.T**3), 1./4.)
order = 4
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [-1, 1]
model.initialize(order, bndry_cond, xlim)
mesh_pts = model.get_collocation_points()
assert np.allclose(model.integrate(mesh_pts.T**2), 2./3.)
assert np.allclose(model.integrate(mesh_pts.T**3), 0.)
def test_evaluate(self):
"""
for the PDE ((1+z*x)*u(x)')' = -1, u(0) = 0, u(1) = 0
use model.evaluate to extract QoI
"""
order = 20
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: z*x + 1.
model.forcing_function = lambda x, z: 0*x-1
qoi_coords = np.array([0.05, 0.5, 0.95])
model.qoi_functional = lambda x: model.interpolate(x, qoi_coords)[:, 0]
sample = np.ones((1, 1), float)
qoi = model(sample)
assert np.allclose(np.log(qoi_coords+1.)/np.log(2.)-qoi_coords, qoi)
sample = 0.5*np.ones((1, 1), float)
qoi = model(sample)
assert np.allclose(
-(qoi_coords*np.log(9./4.)-2.*np.log(qoi_coords+2.) +
np.log(4.))/np.log(3./2.), qoi)
def test_evaluate_gradient_1d(self):
"""
for the PDE ((1+sum(z^2)*x)*u(x)')' = -2, u(0) = 0, u(1) = 1
use model.evaluate_gradient to evaluate the gradient of the QoI
with respect to the random parameter vector z.
The QoI is the intergral of the solution over the entire domain
The adjoint rhs is then just 1.
"""
order = 20
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: (z[0]**2+z[1]**2)*x + 1.
model.forcing_function = lambda x, z: 0*x-2
sample = np.random.RandomState(2).uniform(-1, 1, (2, 1))
model.diffusivity_derivs_function = \
lambda x, z, i: np.array([2.*x*z[i]]).T
model.forcing_derivs_function = \
lambda x, z, i: np.array([0.*x]).T
model(sample)
# evaluate_gradient has to be called before any more calls to
# model.solve with different parameters, because we need to
# access self.fwd_solution, which will change with any subsuquent calls
errors = pya.check_gradients(
model, lambda x: model.evaluate_gradient(x[:, 0]), sample)
errors = errors[np.isfinite(errors)]
assert errors.max() > 0.1 and errors.min() <= 6e-7
@unittest.skip("Not fully implemented")
def test_compute_error_estimate(self):
"""
for the PDE ((1+z*x)*u(x)')' = -1, u(0) = 0, u(1) = 0
use model.compute_error_estomate to compute an error estimate of
the deterministic error in the foward solution.
The QoI is the intergral of the solution over the entire domain
The adjoint rhs is then just 1.
"""
order = 5
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: z[0]*x + 1.
model.forcing_function = lambda x, z: 0.*x-1.
sample = np.ones((1, 1), float)
qoi = model(sample)
error_estimate = model.compute_error_estimate(sample)
solution = model.run(sample[:, 0])
def exact_solution(x): return np.log(x+1.)/np.log(2.)-x
gl_pts, gl_wts = gauss_jacobi_pts_wts_1D(50, 0, 0)
x_range = model.xlim[1]-model.xlim[0]
gl_pts = x_range*(gl_pts+1.)/2.+model.xlim[0]
gl_wts *= x_range
gl_vals = exact_solution(gl_pts)
exact_qoi = np.dot(gl_vals, gl_wts)
exact_error = abs(exact_qoi-qoi)
print('err estimate', error_estimate)
print('exact err', exact_error)
print('effectivity ratio', error_estimate / exact_error)
# should be very close to 1. As adjoint order is increased
# it will converge to 1
sample = 0.5*np.ones((1), float)
qoi = model.evaluate(sample)
exact_solution = -(model.mesh_pts*np.log(9./4.) -
2.*np.log(model.mesh_pts+2.) +
np.log(4.))/np.log(3./2.)
exact_qoi = model.qoi_functional(exact_solution)
error = abs(exact_qoi-qoi)
error_estimate = model.compute_error_estimate(sample)
print(error_estimate, error)
# print model.integrate( (exact_solution - solution )**2 )
assert np.allclose(error_estimate, error)
def test_timestepping_without_forcing(self):
r"""
solve u_t(x,t) = u_xx(x,t), u(-1,t) = 0, u(1,t) = 0,
u(x,0) = \sin(\pi*x)
Exact solution
u(x,t) = \exp(-\pi^2t)*sin(\pi*x)
"""
order = 16
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [-1, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: 0*x + 1.
model.forcing_function = lambda x, t, z: 0*x
sample = np.ones((1), float) # dummy argument for this example
model.num_time_steps = 1000
model.initial_sol = np.sin(np.pi*model.mesh_pts)
model.time_step_size = 1e-4
model.time_step_method = 'adams-moulton-3'
# model.time_step_method = 'crank-nicholson'
model.time_step_method = 'backward-euler'
model.num_stored_timesteps = 100
solution = model.transient_solve(sample)
def exact_sol(x, t): return np.exp(-np.pi**2*t)*np.sin(np.pi*x)
test_mesh_pts = np.linspace(xlim[0], xlim[1], 100)
plot = False # True
for i, t in enumerate(model.times):
if plot:
exact_sol_t = exact_sol(test_mesh_pts, t)
model_sol_t = model.interpolate(solution[:, i], test_mesh_pts)
pya.plt.plot(test_mesh_pts, model_sol_t, 'k',
label='collocation', linewidth=2)
pya.plt.plot(test_mesh_pts, exact_sol_t,
'r--', label='exact', linewidth=2)
pya.plt.legend(loc=0)
pya.plt.title('$t=%1.2f$' % t)
pya.plt.show()
L2_error = np.sqrt(model.integrate(
(exact_sol(model.mesh_pts, t)-solution[:, i])**2))
factor = np.sqrt(
model.integrate(exact_sol(model.mesh_pts, t)**2))
# print L2_error, 1e-3*factor
assert L2_error < 1e-3*factor
def test_timestepping_with_time_independent_forcing(self):
r"""
solve u_t(x,t) = u_xx(x,t)+sin(3\pi x), u(0,t) = 0, u(1,t) = 0,
u(x,0) = 5\sin(2\pi x)+2\sin(3\pi x)
Exact solution
u(x,t) = 5\exp(-4\pi^2t)*sin(2\pi*x)+(2\exp(-9\pi^2t)+(1-\exp(-9\pi^2t))/(9\pi^2))*\sin(3\pi x)
"""
order = 32
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: 0*x + 1.
model.forcing_function = lambda x, t, z: np.sin(3*np.pi*x)
sample = np.ones((1), float) # dummy argument for this example
model.num_time_steps = 10000
model.initial_sol = 5*np.sin(2*np.pi*model.mesh_pts) + \
2*np.sin(3*np.pi*model.mesh_pts)
model.time_step_size = 1e-4
# model.time_step_method = 'adams-moulton-3'
model.time_step_method = 'crank-nicholson'
# model.time_step_method = 'backward-euler'
model.num_stored_timesteps = 100
solution = model.transient_solve(sample)
def exact_sol(x, t): return 5.*np.exp(-4.*np.pi**2*t)*np.sin(2.*np.pi*x) + \
(2.*np.exp(-9.*np.pi**2*t)+(1.-np.exp(-9.*np.pi**2*t))/(9.*np.pi**2))*np.sin(3.*np.pi*x)
# test_mesh_pts = np.linspace(xlim[0], xlim[1], 100)
for i, t in enumerate(model.times):
# exact_sol_t = exact_sol(test_mesh_pts,t)
# model_sol_t = model.interpolate(solution[:,i],test_mesh_pts)
# pya.plt.plot(test_mesh_pts,model_sol_t,'k',label='collocation',linewidth=2)
# pya.plt.plot(test_mesh_pts,exact_sol_t,'r--',label='exact',linewidth=2)
# pya.plt.legend(loc=0)
# pya.plt.title('$t=%1.2f$'%t)
# pya.plt.show()
L2_error = np.sqrt(model.integrate(
(exact_sol(model.mesh_pts, t)-solution[:, i])**2))
factor = np.sqrt(
model.integrate(exact_sol(model.mesh_pts, t)**2))
# print(L2_error, 1e-4*factor)
assert L2_error < 1e-4*factor
def test_timestepping_with_time_dependent_forcing(self):
r"""
solve u_t(x,t) = u_xx(x,t)+np.sin(3\pi x)*np.sin(t), u(0,t) = 0, u(1,t) = 0,
u(x,0) = 5sin(2\pi x)+2sin(3\pi x)
Exact solution
u(x,t) = 5\exp(-4\pi^2t)*np.sin(2\pi*x)+(2\exp(-9\pi^2t)+\exp(-9\pi^2t)(9\pi^2sin(t)-cos(t)+\exp(-9\pi^2t))/(1+81\pi^4))*sin(3\pi x)
"""
order = 32
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: 0*x + 1.
model.forcing_function = lambda x, t, z: np.sin(3*np.pi*x)*np.sin(t)
sample = np.ones((1), float) # dummy argument for this example
model.num_time_steps = int(1e4)
model.initial_sol = 5*np.sin(2*np.pi*model.mesh_pts) + \
2*np.sin(3*np.pi*model.mesh_pts)
model.time_step_size = 1e-4
model.num_stored_timesteps = 100
# model.time_step_method = 'adams-moulton-3'
model.time_step_method = 'crank-nicholson'
# model.time_step_method = 'backward-euler'
# model.time_step_method = 'RK4'
solution = model.transient_solve(sample)
def exact_sol(x, t): return 5.*np.exp(
-4.*np.pi**2*t)*np.sin(2.*np.pi*x)+(
2.*np.exp(-9.*np.pi**2*t)+(
9.*np.pi**2*np.sin(t)-np.cos(t) +
np.exp(-9.*np.pi**2*t))/(1+81.*np.pi**4))*np.sin(
3.*np.pi*x)
test_mesh_pts = np.linspace(xlim[0], xlim[1], 100)
plot = False
for i, t in enumerate(model.times):
if plot:
exact_sol_t = exact_sol(test_mesh_pts, t)
model_sol_t = model.interpolate(solution[:, i], test_mesh_pts)
pya.plt.plot(test_mesh_pts, model_sol_t, 'k',
label='collocation', linewidth=2)
pya.plt.plot(test_mesh_pts, exact_sol_t, 'r--', label='exact',
linewidth=2)
pya.plt.legend(loc=0)
pya.plt.title('$t=%1.3f$' % t)
pya.plt.show()
L2_error = np.sqrt(model.integrate(
(exact_sol(model.mesh_pts, t)-solution[:, i])**2))
factor = np.sqrt(
model.integrate(exact_sol(model.mesh_pts, t)**2))
# print(L2_error, 1e-4*factor)
assert L2_error < 1e-4*factor
# print('time %1.2e: L2 error %1.2e' % (t, L2_error))
def test_convergence(self):
order = 8 # 1e-5
# order = 16 #1e-11
order = 20 # 2e-15
model = SteadyStateDiffusionEquation1D()
bndry_cond = [0.0, 0.0]
xlim = [0, 1]
model.initialize(order, bndry_cond, xlim)
model.diffusivity_function = lambda x, z: 0*x + 1.
model.forcing_function = lambda x, t, z: np.sin(3*np.pi*x)*np.sin(t)
sample = np.ones((1), float) # dummy argument for this example
model.initial_sol = 5*np.sin(2*np.pi*model.mesh_pts) + \
2*np.sin(3*np.pi*model.mesh_pts)
final_time = 1.
model.time_step_size = 1e-2
model.num_stored_timesteps = 1
# model.time_step_method = 'crank-nicholson'
# model.time_step_method = 'backward-euler'
# model.time_step_method = 'RK4' needs bug fixes and testing
def exact_sol(x, t): return 5.*np.exp(
-4.*np.pi**2*t)*np.sin(2.*np.pi*x)+(2.*np.exp(-9.*np.pi**2*t) + (
9.*np.pi**2*np.sin(t)-np.cos(t)+np.exp(-9.*np.pi**2*t))/(1+81.*np.pi**4))*np.sin(3.*np.pi*x)
# test_mesh_pts = np.linspace(xlim[0], xlim[1], 1000)
num_convergence_steps = 4
errors = np.empty((num_convergence_steps), float)
time_step_sizes = np.empty((num_convergence_steps), float)
num_time_steps = np.empty((num_convergence_steps), float)
for i in range(num_convergence_steps):
model.num_time_steps = int(
np.ceil(final_time/model.time_step_size))
solution = model.transient_solve(sample)
assert np.allclose(model.times[0], final_time, atol=1e-15)
L2_error = np.sqrt(model.integrate(
(exact_sol(model.mesh_pts, final_time)-solution[:, 0])**2))
# interpolated_sol = model.interpolate(exact_sol(model.mesh_pts,final_time),test_mesh_pts)
# print(np.linalg.norm(exact_sol(test_mesh_pts,final_time)-interpolated_sol)/np.sqrt(interpolated_sol.shape[0]))
# print(model.num_time_steps, L2_error)
errors[i] = L2_error
time_step_sizes[i] = model.time_step_size
num_time_steps[i] = model.num_time_steps
model.time_step_size /= 2
# print(errors)
conv_rate = -np.log10(errors[-1]/errors[0])/np.log10(
num_time_steps[-1]/num_time_steps[0])
assert np.allclose(conv_rate, 2, atol=1e-4)
# pya.plt.loglog(
# num_time_steps, errors, 'o-r',
# label=r'$\lVert u(x,T)-\hat{u}(x,T)\\rVert_{\ell_2(D)}$',
# linewidth=2)
# # print errors[0]*num_time_steps[0]/num_time_steps
# order = 1
# pya.plt.loglog(
# num_time_steps,
# errors[0]*num_time_steps[0]**order/num_time_steps**order,
# 'o--', label=r'$(\Delta t)^{-%d}$' % order, linewidth=2)
# order = 2
# pya.plt.loglog(
# num_time_steps,
# errors[0]*num_time_steps[0]**order/num_time_steps**order,
# 'o--', label=r'$(\Delta t)^{-%d}$' % order, linewidth=2)
# pya.plt.legend(loc=0)
# pya.plt.show()
def test_inhomogeneous_diffusion_equation_2d_variable_coefficient(self):
"""
wolfram alpha z random variable x and w are spatial dimension
d/dx 16*exp(-z^2)*(x^2-1/4)*(w^2-1/4)
d/dx (1+t/pi^2*z*cos(pi/2*(x^2+w^2)))*32*(w^2-1/4)*x*exp(-z^2)
Peter zaspels thesis is wrong it is 1 = sigma * not 1 + sigma +
"""
sigma = 1
num_dims = 1
order = 16
model = SteadyStateDiffusionEquation2D()
lims = [-0.5, 0.5, -0.5, 0.5]
bndry_cond = [0., 0.]
model.initialize(order, bndry_cond, lims)
def forcing_function(x, y): return \
32.*(1.+sigma*y[0]*sigma*np.cos(np.pi/2.*(x[0, :]**2+x[1, :]**2))/np.pi**2) * \
np.exp(-y[0]**2)*(x[0, :]**2+x[1, :]**2-0.5) -\
32./np.pi*y[0]*sigma*np.sin(np.pi/2.*(x[0, :]**2+x[1, :]**2)) *\
(x[0, :]**2 * np.exp(-y[0]**2)*(x[1, :]**2-0.25)+x[1, :]**2 *
np.exp(-y[0]**2)*(x[0, :]**2-0.25))
def diffusivity_function(x, y):
return 1.+sigma/np.pi**2*y[0]*np.cos(
np.pi/2.*(x[0, :]**2+x[1, :]**2))
# only well posed if |y| < pi^2/sigma
def exact_sol(x, y): return 16.*np.exp(-y**2) * \
(x[0, :]**2-0.25)*(x[1, :]**2-0.25)
rng = np.random.RandomState(1)
sample = rng.uniform(-np.sqrt(3), np.sqrt(3), (num_dims))
mesh_pts = model.get_collocation_points()
diff_vals = diffusivity_function(mesh_pts, sample)
forcing_vals = forcing_function(mesh_pts, sample)
solution = model.solve(diff_vals, forcing_vals)
# print np.linalg.norm(exact_sol( mesh_pts, sample )- solution )
assert np.linalg.norm(exact_sol(mesh_pts, sample) - solution) < 2.e-12
def test_2d_matlab_example(self):
"""
Example from Spectral methods in Matlab. Specifically program 16 on page
70 (90 PDF page number)
Solve Poisson eq. on [-1,1]x[-1,1] with u=0 on boundary
and forcing 10*np.sin(8*xx.*(yy-1))
true_solution at (xx,yy)=(1/np.sqrt(2),1/np.sqrt(2))= 0.32071594511
"""
num_dims = 10
order = 24
model = SteadyStateDiffusionEquation2D()
lims = [-1, 1, -1, 1]
bndry_cond = [0., 0.]
model.initialize(order, bndry_cond, lims)
def diffusivity(x, y): return np.ones(x.shape[1])
def forcing(x, y): return 10.*np.sin(8.*(x[0, :])*(x[1, :]-1))
rng = np.random.RandomState(1)
sample = rng.uniform(-1, 1., (num_dims))
mesh_pts = model.get_collocation_points()
diff_vals = diffusivity(mesh_pts, sample)
forcing_vals = forcing(mesh_pts, sample)
solution = model.solve(diff_vals, forcing_vals)
# because I used reverse order of chebyshev points
# and thus negative sign
# of derivative matrix the solution returned here will have different
# order to matlab which can be obtained by applying flipud(fliplr(x)),
# e.g. we can obtain the correct coordinates used in the example with
# index = np.arange((order+1)**2).reshape(
# (order+1, order+1))[3*order//4, 3*order//4]
# print(mesh_pts[:, index])
eval_samples = np.array([[1./np.sqrt(2), 1./np.sqrt(2)]]).T
qoi = model.interpolate(solution, eval_samples)
assert np.allclose(qoi, 0.32071594511)
def test_integrate_2d(self):
order = 4
model = SteadyStateDiffusionEquation2D()
bndry_cond = [0.0, 0.0]
lims = [0., 1., 0., 1.]
model.initialize(order, bndry_cond, lims)
mesh_pts = model.get_collocation_points()
assert np.allclose(
model.integrate(np.sum(mesh_pts**2, axis=0)[:, None]), 2./3.)
order = 4
model = SteadyStateDiffusionEquation2D()
bndry_cond = [0.0, 0.0]
lims = [-1., 1., -1., 1.]
model.initialize(order, bndry_cond, lims)
mesh_pts = model.get_collocation_points()
assert np.allclose(
model.integrate(np.sum(mesh_pts**2, axis=0)[:, None]), 8./3.)
def test_evaluate_gradient_2d(self):
"""
for the PDE ((1+sum(z^2)*x)*u(x)')' = -2, u(0) = 0, u(1) = 1
use model.evaluate_gradient to evaluate the gradient of the QoI
with respect to the random parameter vector z.
The QoI is the intergral of the solution over the entire domain
The adjoint rhs is then just 1.
"""
order = 20
model = SteadyStateDiffusionEquation2D()
lims = [0., 1., 0., 1.]
bndry_cond = [0., 0.]
model.initialize(order, bndry_cond, lims)
model.diffusivity_function = \
lambda x, z: (z[0]**2+z[1]**2)*(x[0]+x[1]) + 1.
model.forcing_function = lambda x, z: 0*x[0]-2
sample = np.random.RandomState(2).uniform(-1, 1, (2, 1))
model.diffusivity_derivs_function = \
lambda x, z, i: np.array([2.*(x[0]+x[1])*z[i]]).T
model.forcing_derivs_function = \
lambda x, z, i: np.array([0.*x[0]]).T
model(sample)
# evaluate_gradient has to be called before any more calls to
# model.solve with different parameters, because we need to
# access self.fwd_solution, which will change with any subsuquent calls
errors = pya.check_gradients(
model, lambda x: model.evaluate_gradient(x[:, 0]), sample)
errors = errors[np.isfinite(errors)]
assert errors.max() > 0.1 and errors.min() <= 4e-6
if __name__ == "__main__":
spectral_diffusion_test_suite = \
unittest.TestLoader().loadTestsFromTestCase(TestSpectralDiffusion2D)
unittest.TextTestRunner(verbosity=2).run(spectral_diffusion_test_suite)
| [((6532, 6570), 'unittest.skip', 'unittest.skip', (['"""Not fully implemented"""'], {}), "('Not fully implemented')\n", (6545, 6570), False, 'import unittest\n'), ((396, 413), 'numpy.random.seed', 'np.random.seed', (['(1)'], {}), '(1)\n', (410, 413), True, 'import numpy as np\n'), ((533, 565), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (563, 565), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((1373, 1417), 'numpy.allclose', 'np.allclose', (['(-derivative_matrix)', 'true_matrix'], {}), '(-derivative_matrix, true_matrix)\n', (1384, 1417), True, 'import numpy as np\n'), ((1574, 1606), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (1604, 1606), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((2223, 2255), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (2253, 2255), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((2936, 2968), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (2966, 2968), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((3545, 3577), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (3575, 3577), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((3899, 3931), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (3929, 3931), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((4409, 4441), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (4439, 4441), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((4679, 4706), 'numpy.array', 'np.array', (['[0.05, 0.5, 0.95]'], {}), '([0.05, 0.5, 0.95])\n', (4687, 4706), True, 'import numpy as np\n'), ((4805, 4827), 'numpy.ones', 'np.ones', (['(1, 1)', 'float'], {}), '((1, 1), float)\n', (4812, 4827), True, 'import numpy as np\n'), ((5555, 5587), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (5585, 5587), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((6975, 7007), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (7005, 7007), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((7247, 7269), 'numpy.ones', 'np.ones', (['(1, 1)', 'float'], {}), '((1, 1), float)\n', (7254, 7269), True, 'import numpy as np\n'), ((7493, 7526), 'pyapprox.univariate_polynomials.quadrature.gauss_jacobi_pts_wts_1D', 'gauss_jacobi_pts_wts_1D', (['(50)', '(0)', '(0)'], {}), '(50, 0, 0)\n', (7516, 7526), False, 'from pyapprox.univariate_polynomials.quadrature import gauss_jacobi_pts_wts_1D\n'), ((7714, 7737), 'numpy.dot', 'np.dot', (['gl_vals', 'gl_wts'], {}), '(gl_vals, gl_wts)\n', (7720, 7737), True, 'import numpy as np\n'), ((8553, 8587), 'numpy.allclose', 'np.allclose', (['error_estimate', 'error'], {}), '(error_estimate, error)\n', (8564, 8587), True, 'import numpy as np\n'), ((8854, 8886), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (8884, 8886), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((9121, 9138), 'numpy.ones', 'np.ones', (['(1)', 'float'], {}), '(1, float)\n', (9128, 9138), True, 'import numpy as np\n'), ((9240, 9270), 'numpy.sin', 'np.sin', (['(np.pi * model.mesh_pts)'], {}), '(np.pi * model.mesh_pts)\n', (9246, 9270), True, 'import numpy as np\n'), ((9645, 9679), 'numpy.linspace', 'np.linspace', (['xlim[0]', 'xlim[1]', '(100)'], {}), '(xlim[0], xlim[1], 100)\n', (9656, 9679), True, 'import numpy as np\n'), ((10937, 10969), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (10967, 10969), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((11217, 11234), 'numpy.ones', 'np.ones', (['(1)', 'float'], {}), '(1, float)\n', (11224, 11234), True, 'import numpy as np\n'), ((13117, 13149), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (13147, 13149), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((13407, 13424), 'numpy.ones', 'np.ones', (['(1)', 'float'], {}), '(1, float)\n', (13414, 13424), True, 'import numpy as np\n'), ((14280, 14314), 'numpy.linspace', 'np.linspace', (['xlim[0]', 'xlim[1]', '(100)'], {}), '(xlim[0], xlim[1], 100)\n', (14291, 14314), True, 'import numpy as np\n'), ((15393, 15425), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation1D', 'SteadyStateDiffusionEquation1D', ([], {}), '()\n', (15423, 15425), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((15683, 15700), 'numpy.ones', 'np.ones', (['(1)', 'float'], {}), '(1, float)\n', (15690, 15700), True, 'import numpy as np\n'), ((16477, 16515), 'numpy.empty', 'np.empty', (['num_convergence_steps', 'float'], {}), '(num_convergence_steps, float)\n', (16485, 16515), True, 'import numpy as np\n'), ((16544, 16582), 'numpy.empty', 'np.empty', (['num_convergence_steps', 'float'], {}), '(num_convergence_steps, float)\n', (16552, 16582), True, 'import numpy as np\n'), ((16610, 16648), 'numpy.empty', 'np.empty', (['num_convergence_steps', 'float'], {}), '(num_convergence_steps, float)\n', (16618, 16648), True, 'import numpy as np\n'), ((17653, 17691), 'numpy.allclose', 'np.allclose', (['conv_rate', '(2)'], {'atol': '(0.0001)'}), '(conv_rate, 2, atol=0.0001)\n', (17664, 17691), True, 'import numpy as np\n'), ((18853, 18885), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation2D', 'SteadyStateDiffusionEquation2D', ([], {}), '()\n', (18883, 18885), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((19711, 19735), 'numpy.random.RandomState', 'np.random.RandomState', (['(1)'], {}), '(1)\n', (19732, 19735), True, 'import numpy as np\n'), ((20596, 20628), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation2D', 'SteadyStateDiffusionEquation2D', ([], {}), '()\n', (20626, 20628), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((20882, 20906), 'numpy.random.RandomState', 'np.random.RandomState', (['(1)'], {}), '(1)\n', (20903, 20906), True, 'import numpy as np\n'), ((21773, 21804), 'numpy.allclose', 'np.allclose', (['qoi', '(0.32071594511)'], {}), '(qoi, 0.32071594511)\n', (21784, 21804), True, 'import numpy as np\n'), ((21873, 21905), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation2D', 'SteadyStateDiffusionEquation2D', ([], {}), '()\n', (21903, 21905), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((22207, 22239), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation2D', 'SteadyStateDiffusionEquation2D', ([], {}), '()\n', (22237, 22239), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((22917, 22949), 'pyapprox.benchmarks.spectral_diffusion.SteadyStateDiffusionEquation2D', 'SteadyStateDiffusionEquation2D', ([], {}), '()\n', (22947, 22949), False, 'from pyapprox.benchmarks.spectral_diffusion import kronecker_product_2d, chebyshev_derivative_matrix, SteadyStateDiffusionEquation2D, SteadyStateDiffusionEquation1D\n'), ((4955, 4977), 'numpy.ones', 'np.ones', (['(1, 1)', 'float'], {}), '((1, 1), float)\n', (4962, 4977), True, 'import numpy as np\n'), ((8053, 8070), 'numpy.ones', 'np.ones', (['(1)', 'float'], {}), '(1, float)\n', (8060, 8070), True, 'import numpy as np\n'), ((8265, 8282), 'numpy.log', 'np.log', (['(3.0 / 2.0)'], {}), '(3.0 / 2.0)\n', (8271, 8282), True, 'import numpy as np\n'), ((11182, 11203), 'numpy.sin', 'np.sin', (['(3 * np.pi * x)'], {}), '(3 * np.pi * x)\n', (11188, 11203), True, 'import numpy as np\n'), ((16868, 16919), 'numpy.allclose', 'np.allclose', (['model.times[0]', 'final_time'], {'atol': '(1e-15)'}), '(model.times[0], final_time, atol=1e-15)\n', (16879, 16919), True, 'import numpy as np\n'), ((17578, 17626), 'numpy.log10', 'np.log10', (['(num_time_steps[-1] / num_time_steps[0])'], {}), '(num_time_steps[-1] / num_time_steps[0])\n', (17586, 17626), True, 'import numpy as np\n'), ((19778, 19788), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (19785, 19788), True, 'import numpy as np\n'), ((20777, 20796), 'numpy.ones', 'np.ones', (['x.shape[1]'], {}), '(x.shape[1])\n', (20784, 20796), True, 'import numpy as np\n'), ((24011, 24032), 'unittest.TestLoader', 'unittest.TestLoader', ([], {}), '()\n', (24030, 24032), False, 'import unittest\n'), ((24084, 24120), 'unittest.TextTestRunner', 'unittest.TextTestRunner', ([], {'verbosity': '(2)'}), '(verbosity=2)\n', (24107, 24120), False, 'import unittest\n'), ((437, 455), 'numpy.finfo', 'np.finfo', (['np.float'], {}), '(np.float)\n', (445, 455), True, 'import numpy as np\n'), ((5126, 5143), 'numpy.log', 'np.log', (['(3.0 / 2.0)'], {}), '(3.0 / 2.0)\n', (5132, 5143), True, 'import numpy as np\n'), ((5838, 5862), 'numpy.random.RandomState', 'np.random.RandomState', (['(2)'], {}), '(2)\n', (5859, 5862), True, 'import numpy as np\n'), ((5960, 5986), 'numpy.array', 'np.array', (['[2.0 * x * z[i]]'], {}), '([2.0 * x * z[i]])\n', (5968, 5986), True, 'import numpy as np\n'), ((6054, 6073), 'numpy.array', 'np.array', (['[0.0 * x]'], {}), '([0.0 * x])\n', (6062, 6073), True, 'import numpy as np\n'), ((6446, 6465), 'numpy.isfinite', 'np.isfinite', (['errors'], {}), '(errors)\n', (6457, 6465), True, 'import numpy as np\n'), ((9585, 9608), 'numpy.exp', 'np.exp', (['(-np.pi ** 2 * t)'], {}), '(-np.pi ** 2 * t)\n', (9591, 9608), True, 'import numpy as np\n'), ((9605, 9622), 'numpy.sin', 'np.sin', (['(np.pi * x)'], {}), '(np.pi * x)\n', (9611, 9622), True, 'import numpy as np\n'), ((9927, 10006), 'pyapprox.plt.plot', 'pya.plt.plot', (['test_mesh_pts', 'model_sol_t', '"""k"""'], {'label': '"""collocation"""', 'linewidth': '(2)'}), "(test_mesh_pts, model_sol_t, 'k', label='collocation', linewidth=2)\n", (9939, 10006), True, 'import pyapprox as pya\n'), ((10052, 10127), 'pyapprox.plt.plot', 'pya.plt.plot', (['test_mesh_pts', 'exact_sol_t', '"""r--"""'], {'label': '"""exact"""', 'linewidth': '(2)'}), "(test_mesh_pts, exact_sol_t, 'r--', label='exact', linewidth=2)\n", (10064, 10127), True, 'import pyapprox as pya\n'), ((10173, 10194), 'pyapprox.plt.legend', 'pya.plt.legend', ([], {'loc': '(0)'}), '(loc=0)\n', (10187, 10194), True, 'import pyapprox as pya\n'), ((10211, 10241), 'pyapprox.plt.title', 'pya.plt.title', (["('$t=%1.2f$' % t)"], {}), "('$t=%1.2f$' % t)\n", (10224, 10241), True, 'import pyapprox as pya\n'), ((10258, 10272), 'pyapprox.plt.show', 'pya.plt.show', ([], {}), '()\n', (10270, 10272), True, 'import pyapprox as pya\n'), ((11339, 11373), 'numpy.sin', 'np.sin', (['(2 * np.pi * model.mesh_pts)'], {}), '(2 * np.pi * model.mesh_pts)\n', (11345, 11373), True, 'import numpy as np\n'), ((11388, 11422), 'numpy.sin', 'np.sin', (['(3 * np.pi * model.mesh_pts)'], {}), '(3 * np.pi * model.mesh_pts)\n', (11394, 11422), True, 'import numpy as np\n'), ((13362, 13383), 'numpy.sin', 'np.sin', (['(3 * np.pi * x)'], {}), '(3 * np.pi * x)\n', (13368, 13383), True, 'import numpy as np\n'), ((13380, 13389), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (13386, 13389), True, 'import numpy as np\n'), ((13532, 13566), 'numpy.sin', 'np.sin', (['(2 * np.pi * model.mesh_pts)'], {}), '(2 * np.pi * model.mesh_pts)\n', (13538, 13566), True, 'import numpy as np\n'), ((13581, 13615), 'numpy.sin', 'np.sin', (['(3 * np.pi * model.mesh_pts)'], {}), '(3 * np.pi * model.mesh_pts)\n', (13587, 13615), True, 'import numpy as np\n'), ((14554, 14633), 'pyapprox.plt.plot', 'pya.plt.plot', (['test_mesh_pts', 'model_sol_t', '"""k"""'], {'label': '"""collocation"""', 'linewidth': '(2)'}), "(test_mesh_pts, model_sol_t, 'k', label='collocation', linewidth=2)\n", (14566, 14633), True, 'import pyapprox as pya\n'), ((14679, 14754), 'pyapprox.plt.plot', 'pya.plt.plot', (['test_mesh_pts', 'exact_sol_t', '"""r--"""'], {'label': '"""exact"""', 'linewidth': '(2)'}), "(test_mesh_pts, exact_sol_t, 'r--', label='exact', linewidth=2)\n", (14691, 14754), True, 'import pyapprox as pya\n'), ((14800, 14821), 'pyapprox.plt.legend', 'pya.plt.legend', ([], {'loc': '(0)'}), '(loc=0)\n', (14814, 14821), True, 'import pyapprox as pya\n'), ((14838, 14868), 'pyapprox.plt.title', 'pya.plt.title', (["('$t=%1.3f$' % t)"], {}), "('$t=%1.3f$' % t)\n", (14851, 14868), True, 'import pyapprox as pya\n'), ((14885, 14899), 'pyapprox.plt.show', 'pya.plt.show', ([], {}), '()\n', (14897, 14899), True, 'import pyapprox as pya\n'), ((15638, 15659), 'numpy.sin', 'np.sin', (['(3 * np.pi * x)'], {}), '(3 * np.pi * x)\n', (15644, 15659), True, 'import numpy as np\n'), ((15656, 15665), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (15662, 15665), True, 'import numpy as np\n'), ((15768, 15802), 'numpy.sin', 'np.sin', (['(2 * np.pi * model.mesh_pts)'], {}), '(2 * np.pi * model.mesh_pts)\n', (15774, 15802), True, 'import numpy as np\n'), ((15817, 15851), 'numpy.sin', 'np.sin', (['(3 * np.pi * model.mesh_pts)'], {}), '(3 * np.pi * model.mesh_pts)\n', (15823, 15851), True, 'import numpy as np\n'), ((16754, 16796), 'numpy.ceil', 'np.ceil', (['(final_time / model.time_step_size)'], {}), '(final_time / model.time_step_size)\n', (16761, 16796), True, 'import numpy as np\n'), ((17547, 17579), 'numpy.log10', 'np.log10', (['(errors[-1] / errors[0])'], {}), '(errors[-1] / errors[0])\n', (17555, 17579), True, 'import numpy as np\n'), ((19766, 19776), 'numpy.sqrt', 'np.sqrt', (['(3)'], {}), '(3)\n', (19773, 19776), True, 'import numpy as np\n'), ((20835, 20876), 'numpy.sin', 'np.sin', (['(8.0 * x[(0), :] * (x[(1), :] - 1))'], {}), '(8.0 * x[(0), :] * (x[(1), :] - 1))\n', (20841, 20876), True, 'import numpy as np\n'), ((23235, 23259), 'numpy.random.RandomState', 'np.random.RandomState', (['(2)'], {}), '(2)\n', (23256, 23259), True, 'import numpy as np\n'), ((23357, 23395), 'numpy.array', 'np.array', (['[2.0 * (x[0] + x[1]) * z[i]]'], {}), '([2.0 * (x[0] + x[1]) * z[i]])\n', (23365, 23395), True, 'import numpy as np\n'), ((23461, 23483), 'numpy.array', 'np.array', (['[0.0 * x[0]]'], {}), '([0.0 * x[0]])\n', (23469, 23483), True, 'import numpy as np\n'), ((23856, 23875), 'numpy.isfinite', 'np.isfinite', (['errors'], {}), '(errors)\n', (23867, 23875), True, 'import numpy as np\n'), ((3369, 3384), 'numpy.log', 'np.log', (['(x + 1.0)'], {}), '(x + 1.0)\n', (3375, 3384), True, 'import numpy as np\n'), ((3384, 3395), 'numpy.log', 'np.log', (['(2.0)'], {}), '(2.0)\n', (3390, 3395), True, 'import numpy as np\n'), ((4883, 4907), 'numpy.log', 'np.log', (['(qoi_coords + 1.0)'], {}), '(qoi_coords + 1.0)\n', (4889, 4907), True, 'import numpy as np\n'), ((4905, 4916), 'numpy.log', 'np.log', (['(2.0)'], {}), '(2.0)\n', (4911, 4916), True, 'import numpy as np\n'), ((7442, 7457), 'numpy.log', 'np.log', (['(x + 1.0)'], {}), '(x + 1.0)\n', (7448, 7457), True, 'import numpy as np\n'), ((7455, 7466), 'numpy.log', 'np.log', (['(2.0)'], {}), '(2.0)\n', (7461, 7466), True, 'import numpy as np\n'), ((8253, 8264), 'numpy.log', 'np.log', (['(4.0)'], {}), '(4.0)\n', (8259, 8264), True, 'import numpy as np\n'), ((11763, 11786), 'numpy.sin', 'np.sin', (['(2.0 * np.pi * x)'], {}), '(2.0 * np.pi * x)\n', (11769, 11786), True, 'import numpy as np\n'), ((11868, 11891), 'numpy.sin', 'np.sin', (['(3.0 * np.pi * x)'], {}), '(3.0 * np.pi * x)\n', (11874, 11891), True, 'import numpy as np\n'), ((14015, 14038), 'numpy.sin', 'np.sin', (['(2.0 * np.pi * x)'], {}), '(2.0 * np.pi * x)\n', (14021, 14038), True, 'import numpy as np\n'), ((14208, 14231), 'numpy.sin', 'np.sin', (['(3.0 * np.pi * x)'], {}), '(3.0 * np.pi * x)\n', (14214, 14231), True, 'import numpy as np\n'), ((16201, 16224), 'numpy.sin', 'np.sin', (['(2.0 * np.pi * x)'], {}), '(2.0 * np.pi * x)\n', (16207, 16224), True, 'import numpy as np\n'), ((16345, 16368), 'numpy.sin', 'np.sin', (['(3.0 * np.pi * x)'], {}), '(3.0 * np.pi * x)\n', (16351, 16368), True, 'import numpy as np\n'), ((19485, 19540), 'numpy.cos', 'np.cos', (['(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))'], {}), '(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))\n', (19491, 19540), True, 'import numpy as np\n'), ((22126, 22155), 'numpy.sum', 'np.sum', (['(mesh_pts ** 2)'], {'axis': '(0)'}), '(mesh_pts ** 2, axis=0)\n', (22132, 22155), True, 'import numpy as np\n'), ((22462, 22491), 'numpy.sum', 'np.sum', (['(mesh_pts ** 2)'], {'axis': '(0)'}), '(mesh_pts ** 2, axis=0)\n', (22468, 22491), True, 'import numpy as np\n'), ((5114, 5125), 'numpy.log', 'np.log', (['(4.0)'], {}), '(4.0)\n', (5120, 5125), True, 'import numpy as np\n'), ((11740, 11769), 'numpy.exp', 'np.exp', (['(-4.0 * np.pi ** 2 * t)'], {}), '(-4.0 * np.pi ** 2 * t)\n', (11746, 11769), True, 'import numpy as np\n'), ((13975, 14004), 'numpy.exp', 'np.exp', (['(-4.0 * np.pi ** 2 * t)'], {}), '(-4.0 * np.pi ** 2 * t)\n', (13981, 14004), True, 'import numpy as np\n'), ((16161, 16190), 'numpy.exp', 'np.exp', (['(-4.0 * np.pi ** 2 * t)'], {}), '(-4.0 * np.pi ** 2 * t)\n', (16167, 16190), True, 'import numpy as np\n'), ((19154, 19172), 'numpy.exp', 'np.exp', (['(-y[0] ** 2)'], {}), '(-y[0] ** 2)\n', (19160, 19172), True, 'import numpy as np\n'), ((19235, 19290), 'numpy.sin', 'np.sin', (['(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))'], {}), '(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))\n', (19241, 19290), True, 'import numpy as np\n'), ((19630, 19645), 'numpy.exp', 'np.exp', (['(-y ** 2)'], {}), '(-y ** 2)\n', (19636, 19645), True, 'import numpy as np\n'), ((8152, 8169), 'numpy.log', 'np.log', (['(9.0 / 4.0)'], {}), '(9.0 / 4.0)\n', (8158, 8169), True, 'import numpy as np\n'), ((8198, 8226), 'numpy.log', 'np.log', (['(model.mesh_pts + 2.0)'], {}), '(model.mesh_pts + 2.0)\n', (8204, 8226), True, 'import numpy as np\n'), ((11802, 11831), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (11808, 11831), True, 'import numpy as np\n'), ((14059, 14088), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (14065, 14088), True, 'import numpy as np\n'), ((16224, 16253), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (16230, 16253), True, 'import numpy as np\n'), ((21671, 21681), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (21678, 21681), True, 'import numpy as np\n'), ((21686, 21696), 'numpy.sqrt', 'np.sqrt', (['(2)'], {}), '(2)\n', (21693, 21696), True, 'import numpy as np\n'), ((5059, 5076), 'numpy.log', 'np.log', (['(9.0 / 4.0)'], {}), '(9.0 / 4.0)\n', (5065, 5076), True, 'import numpy as np\n'), ((5076, 5100), 'numpy.log', 'np.log', (['(qoi_coords + 2.0)'], {}), '(qoi_coords + 2.0)\n', (5082, 5100), True, 'import numpy as np\n'), ((11829, 11858), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (11835, 11858), True, 'import numpy as np\n'), ((14166, 14195), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (14172, 14195), True, 'import numpy as np\n'), ((16303, 16332), 'numpy.exp', 'np.exp', (['(-9.0 * np.pi ** 2 * t)'], {}), '(-9.0 * np.pi ** 2 * t)\n', (16309, 16332), True, 'import numpy as np\n'), ((19305, 19323), 'numpy.exp', 'np.exp', (['(-y[0] ** 2)'], {}), '(-y[0] ** 2)\n', (19311, 19323), True, 'import numpy as np\n'), ((19366, 19384), 'numpy.exp', 'np.exp', (['(-y[0] ** 2)'], {}), '(-y[0] ** 2)\n', (19372, 19384), True, 'import numpy as np\n'), ((14130, 14139), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (14136, 14139), True, 'import numpy as np\n'), ((16293, 16302), 'numpy.cos', 'np.cos', (['t'], {}), '(t)\n', (16299, 16302), True, 'import numpy as np\n'), ((14120, 14129), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (14126, 14129), True, 'import numpy as np\n'), ((16283, 16292), 'numpy.sin', 'np.sin', (['t'], {}), '(t)\n', (16289, 16292), True, 'import numpy as np\n'), ((19087, 19142), 'numpy.cos', 'np.cos', (['(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))'], {}), '(np.pi / 2.0 * (x[(0), :] ** 2 + x[(1), :] ** 2))\n', (19093, 19142), True, 'import numpy as np\n')] |
wconnell/torchdrug | torchdrug/layers/flow.py | a710097cb4ad4c48e0de0d18fbb77ef0e806cdc8 | import torch
from torch import nn
from torch.nn import functional as F
from torchdrug import layers
class ConditionalFlow(nn.Module):
"""
Conditional flow transformation from `Masked Autoregressive Flow for Density Estimation`_.
.. _Masked Autoregressive Flow for Density Estimation:
https://arxiv.org/pdf/1705.07057.pdf
Parameters:
input_dim (int): input & output dimension
condition_dim (int): condition dimension
hidden_dims (list of int, optional): hidden dimensions
activation (str or function, optional): activation function
"""
def __init__(self, input_dim, condition_dim, hidden_dims=None, activation="relu"):
super(ConditionalFlow, self).__init__()
self.input_dim = input_dim
self.output_dim = input_dim
if hidden_dims is None:
hidden_dims = []
self.mlp = layers.MLP(condition_dim, list(hidden_dims) + [input_dim * 2], activation)
self.rescale = nn.Parameter(torch.zeros(1))
def forward(self, input, condition):
"""
Transform data into latent representations.
Parameters:
input (Tensor): input representations
condition (Tensor): conditional representations
Returns:
(Tensor, Tensor): latent representations, log-likelihood of the transformation
"""
scale, bias = self.mlp(condition).chunk(2, dim=-1)
scale = (F.tanh(scale) * self.rescale)
output = (input + bias) * scale.exp()
log_det = scale
return output, log_det
def reverse(self, latent, condition):
"""
Transform latent representations into data.
Parameters:
latent (Tensor): latent representations
condition (Tensor): conditional representations
Returns:
(Tensor, Tensor): input representations, log-likelihood of the transformation
"""
scale, bias = self.mlp(condition).chunk(2, dim=-1)
scale = (F.tanh(scale) * self.rescale)
output = latent / scale.exp() - bias
log_det = scale
return output, log_det | [((999, 1013), 'torch.zeros', 'torch.zeros', (['(1)'], {}), '(1)\n', (1010, 1013), False, 'import torch\n'), ((1449, 1462), 'torch.nn.functional.tanh', 'F.tanh', (['scale'], {}), '(scale)\n', (1455, 1462), True, 'from torch.nn import functional as F\n'), ((2016, 2029), 'torch.nn.functional.tanh', 'F.tanh', (['scale'], {}), '(scale)\n', (2022, 2029), True, 'from torch.nn import functional as F\n')] |
lowandrew/OLCTools | olctools/accessoryFunctions/metadataprinter.py | c74e9d18e2ebe0159aa824e095091045ed227e95 | #!/usr/bin/env python3
import logging
import json
import os
__author__ = 'adamkoziol'
class MetadataPrinter(object):
def printmetadata(self):
# Iterate through each sample in the analysis
for sample in self.metadata:
# Set the name of the json file
jsonfile = os.path.join(sample.general.outputdirectory, '{}_metadata.json'.format(sample.name))
try:
# Open the metadata file to write
with open(jsonfile, 'w') as metadatafile:
# Write the json dump of the object dump to the metadata file
json.dump(sample.dump(), metadatafile, sort_keys=True, indent=4, separators=(',', ': '))
except IOError:
# Print useful information in case of an error
logging.warning('Error creating .json file for {sample}'.format(sample=sample.name))
raise
except TypeError as e:
logging.debug(f'Encountered TypeError writing metadata to file with the following details: {e}')
def __init__(self, inputobject):
try:
self.metadata = inputobject.runmetadata.samples
except AttributeError:
try:
self.metadata = inputobject.metadata.samples
except AttributeError:
try:
self.metadata = inputobject.metadata
except AttributeError:
self.metadata = inputobject.runmetadata
self.printmetadata()
| [((973, 1079), 'logging.debug', 'logging.debug', (['f"""Encountered TypeError writing metadata to file with the following details: {e}"""'], {}), "(\n f'Encountered TypeError writing metadata to file with the following details: {e}'\n )\n", (986, 1079), False, 'import logging\n')] |
aman-gupta-1995/Machine-Learning-Mindware | mindware/estimators.py | 8b3050720711730520683c89949e3dbdfb168961 | import numpy as np
from sklearn.utils.multiclass import type_of_target
from mindware.base_estimator import BaseEstimator
from mindware.components.utils.constants import type_dict, MULTILABEL_CLS, IMG_CLS, TEXT_CLS, OBJECT_DET
from mindware.components.feature_engineering.transformation_graph import DataNode
class Classifier(BaseEstimator):
"""This class implements the classification task. """
def initialize(self, data: DataNode, **kwargs):
if self.metric is None:
self.metric = 'acc'
# Check the task type: {binary, multiclass}
task_type = type_of_target(data.data[1])
if task_type in type_dict:
task_type = type_dict[task_type]
else:
raise ValueError("Invalid Task Type: %s!" % task_type)
self.task_type = task_type
super().initialize(data=data, **kwargs)
def fit(self, data: DataNode, **kwargs):
"""
Fit the classifier to given training data.
:param data: instance of DataNode
:return: self
"""
if self._ml_engine is None:
self.initialize(data=data, **kwargs)
super().fit(data, **kwargs)
return self
def predict(self, X, batch_size=None, n_jobs=1):
"""
Predict classes for X.
:param X: Datanode
:param batch_size: int
:param n_jobs: int
:return: y : array of shape = [n_samples]
The predicted classes.
"""
if not isinstance(X, DataNode):
raise ValueError("X is supposed to be a Data Node, but get %s" % type(X))
return super().predict(X, batch_size=batch_size, n_jobs=n_jobs)
def refit(self):
return super().refit()
def predict_proba(self, X, batch_size=None, n_jobs=1):
"""
Predict probabilities of classes for all samples X.
:param X: Datanode
:param batch_size: int
:param n_jobs: int
:return: y : array of shape = [n_samples, n_classes]
The predicted class probabilities.
"""
if not isinstance(X, DataNode):
raise ValueError("X is supposed to be a Data Node, but get %s" % type(X))
pred_proba = super().predict_proba(X, batch_size=batch_size, n_jobs=n_jobs)
if self.task_type != MULTILABEL_CLS:
assert (
np.allclose(
np.sum(pred_proba, axis=1),
np.ones_like(pred_proba[:, 0]))
), "Prediction probability does not sum up to 1!"
# Check that all probability values lie between 0 and 1.
assert (
(pred_proba >= 0).all() and (pred_proba <= 1).all()
), "Found prediction probability value outside of [0, 1]!"
return pred_proba
def get_tree_importance(self, data: DataNode):
from lightgbm import LGBMClassifier
import pandas as pd
X, y = self.data_transformer(data).data
lgb = LGBMClassifier(random_state=1)
lgb.fit(X, y)
_importance = lgb.feature_importances_
h = {}
h['feature_id'] = np.array(range(len(_importance)))
h['feature_importance'] = _importance
return pd.DataFrame(h)
def get_linear_importance(self, data: DataNode):
from sklearn.linear_model import LogisticRegression
import pandas as pd
X, y = self.data_transformer(data).data
clf = LogisticRegression(random_state=1)
clf.fit(X, y)
_ef = clf.coef_
std_array = np.std(_ef, ddof=1, axis=0)
abs_array = abs(_ef)
mean_array = np.mean(abs_array, axis=0)
_importance = std_array / mean_array
h = {}
h['feature_id'] = np.array(range(len(_importance)))
h['feature_importance'] = _importance
return pd.DataFrame(h)
def get_linear_impact(self, data: DataNode):
from sklearn.linear_model import LogisticRegression
import pandas as pd
if (len(set(data.data[1]))) > 2:
print('ERROR! Only binary classification is supported!')
return 0
X, y = self.data_transformer(data).data
clf = LogisticRegression(random_state=1)
clf.fit(X, y)
_ef = clf.coef_
_impact = _ef[0]
h = {}
h['feature_id'] = np.array(range(len(_impact)))
h['feature_impact'] = _impact
return pd.DataFrame(h)
class Regressor(BaseEstimator):
"""This class implements the regression task. """
def initialize(self, data: DataNode, **kwargs):
self.metric = 'mse' if self.metric is None else self.metric
# Check the task type: {continuous}
task_type = type_dict['continuous']
self.task_type = task_type
super().initialize(data=data, **kwargs)
def fit(self, data, **kwargs):
"""
Fit the regressor to given training data.
:param data: DataNode
:return: self
"""
if self._ml_engine is None:
self.initialize(data=data, **kwargs)
super().fit(data, **kwargs)
return self
def predict(self, X, batch_size=None, n_jobs=1):
"""
Make predictions for X.
:param X: DataNode
:param batch_size: int
:param n_jobs: int
:return: y : array of shape = [n_samples] or [n_samples, n_labels]
The predicted classes.
"""
if not isinstance(X, DataNode):
raise ValueError("X is supposed to be a Data Node, but get %s" % type(X))
return super().predict(X, batch_size=batch_size, n_jobs=n_jobs)
def get_tree_importance(self, data: DataNode):
from lightgbm import LGBMRegressor
import pandas as pd
X, y = self.data_transformer(data).data
lgb = LGBMRegressor(random_state=1)
lgb.fit(X, y)
_importance = lgb.feature_importances_
h = {}
h['feature_id'] = np.array(range(len(_importance)))
h['feature_importance'] = _importance
return pd.DataFrame(h)
def get_linear_impact(self, data: DataNode):
from sklearn.linear_model import LinearRegression
import pandas as pd
X, y = self.data_transformer(data).data
reg = LinearRegression()
reg.fit(X, y)
_impact = reg.coef_
h = {}
h['feature_id'] = np.array(range(len(_impact)))
h['feature_impact'] = _impact
return pd.DataFrame(h)
| [((591, 619), 'sklearn.utils.multiclass.type_of_target', 'type_of_target', (['data.data[1]'], {}), '(data.data[1])\n', (605, 619), False, 'from sklearn.utils.multiclass import type_of_target\n'), ((2957, 2987), 'lightgbm.LGBMClassifier', 'LGBMClassifier', ([], {'random_state': '(1)'}), '(random_state=1)\n', (2971, 2987), False, 'from lightgbm import LGBMClassifier\n'), ((3193, 3208), 'pandas.DataFrame', 'pd.DataFrame', (['h'], {}), '(h)\n', (3205, 3208), True, 'import pandas as pd\n'), ((3413, 3447), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(1)'}), '(random_state=1)\n', (3431, 3447), False, 'from sklearn.linear_model import LogisticRegression\n'), ((3514, 3541), 'numpy.std', 'np.std', (['_ef'], {'ddof': '(1)', 'axis': '(0)'}), '(_ef, ddof=1, axis=0)\n', (3520, 3541), True, 'import numpy as np\n'), ((3592, 3618), 'numpy.mean', 'np.mean', (['abs_array'], {'axis': '(0)'}), '(abs_array, axis=0)\n', (3599, 3618), True, 'import numpy as np\n'), ((3800, 3815), 'pandas.DataFrame', 'pd.DataFrame', (['h'], {}), '(h)\n', (3812, 3815), True, 'import pandas as pd\n'), ((4147, 4181), 'sklearn.linear_model.LogisticRegression', 'LogisticRegression', ([], {'random_state': '(1)'}), '(random_state=1)\n', (4165, 4181), False, 'from sklearn.linear_model import LogisticRegression\n'), ((4377, 4392), 'pandas.DataFrame', 'pd.DataFrame', (['h'], {}), '(h)\n', (4389, 4392), True, 'import pandas as pd\n'), ((5766, 5795), 'lightgbm.LGBMRegressor', 'LGBMRegressor', ([], {'random_state': '(1)'}), '(random_state=1)\n', (5779, 5795), False, 'from lightgbm import LGBMRegressor\n'), ((6001, 6016), 'pandas.DataFrame', 'pd.DataFrame', (['h'], {}), '(h)\n', (6013, 6016), True, 'import pandas as pd\n'), ((6215, 6233), 'sklearn.linear_model.LinearRegression', 'LinearRegression', ([], {}), '()\n', (6231, 6233), False, 'from sklearn.linear_model import LinearRegression\n'), ((6408, 6423), 'pandas.DataFrame', 'pd.DataFrame', (['h'], {}), '(h)\n', (6420, 6423), True, 'import pandas as pd\n'), ((2384, 2410), 'numpy.sum', 'np.sum', (['pred_proba'], {'axis': '(1)'}), '(pred_proba, axis=1)\n', (2390, 2410), True, 'import numpy as np\n'), ((2432, 2464), 'numpy.ones_like', 'np.ones_like', (['pred_proba[:, (0)]'], {}), '(pred_proba[:, (0)])\n', (2444, 2464), True, 'import numpy as np\n')] |
xiaowenwen1995/AnimeSpider | AnimeSpider/spiders/AinmeLinkList.py | 11c676b772508fd4e14565a7adbfc7336d69b982 | # -*- coding: utf-8 -*-
import scrapy
import json
import os
import codecs
from AnimeSpider.items import AnimespiderItem
class AinmelinklistSpider(scrapy.Spider):
name = 'AinmeLinkList'
allowed_domains = ['bilibili.com']
start_urls = ['http://bilibili.com/']
def start_requests(self):
jsonpath = os.path.dirname(__file__) + '/output'
jsonfile = codecs.open('%s/AinmeList_items.json' % jsonpath, 'r', encoding='utf-8')
for line in jsonfile:
ainme = json.loads(line)
ainmename = ainme["name"]
url = ainme["link"].replace("//", "https://")
yield scrapy.Request(url=url, callback=self.parse, meta={'ainmename': ainmename})
def parse(self, response):
item = AnimespiderItem()
item["info_link"] = response.css(".media-title").xpath('@href').get()
yield item
| [((379, 451), 'codecs.open', 'codecs.open', (["('%s/AinmeList_items.json' % jsonpath)", '"""r"""'], {'encoding': '"""utf-8"""'}), "('%s/AinmeList_items.json' % jsonpath, 'r', encoding='utf-8')\n", (390, 451), False, 'import codecs\n'), ((756, 773), 'AnimeSpider.items.AnimespiderItem', 'AnimespiderItem', ([], {}), '()\n', (771, 773), False, 'from AnimeSpider.items import AnimespiderItem\n'), ((322, 347), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (337, 347), False, 'import os\n'), ((502, 518), 'json.loads', 'json.loads', (['line'], {}), '(line)\n', (512, 518), False, 'import json\n'), ((633, 708), 'scrapy.Request', 'scrapy.Request', ([], {'url': 'url', 'callback': 'self.parse', 'meta': "{'ainmename': ainmename}"}), "(url=url, callback=self.parse, meta={'ainmename': ainmename})\n", (647, 708), False, 'import scrapy\n')] |
PacktPublishing/Raspberry-Pi-Making-Amazing-Projects-Right-from-Scratch- | Module 1/Chapter 7/prog1.py | 49fd30ca8e1e30e7d85cf14e9dcb6e1d24d4a445 | import cv2
print cv2.__version__
| [] |
darlenew/pytest-testplan | setup.py | 85ef0c196efced681b6559328b3db3d409b2612d | """Setup for pytest-testplan plugin."""
from setuptools import setup
setup(
name='pytest-testplan',
version='0.1.0',
description='A pytest plugin to generate a CSV test report.',
author='Darlene Wong',
author_email='[email protected]',
license='MIT',
py_modules=['pytest_testplan'],
install_requires=['pytest'],
entry_points={'pytest11': ['testplan = pytest_testplan', ]},
)
| [((71, 393), 'setuptools.setup', 'setup', ([], {'name': '"""pytest-testplan"""', 'version': '"""0.1.0"""', 'description': '"""A pytest plugin to generate a CSV test report."""', 'author': '"""Darlene Wong"""', 'author_email': '"""[email protected]"""', 'license': '"""MIT"""', 'py_modules': "['pytest_testplan']", 'install_requires': "['pytest']", 'entry_points': "{'pytest11': ['testplan = pytest_testplan']}"}), "(name='pytest-testplan', version='0.1.0', description=\n 'A pytest plugin to generate a CSV test report.', author='Darlene Wong',\n author_email='[email protected]', license='MIT', py_modules=[\n 'pytest_testplan'], install_requires=['pytest'], entry_points={\n 'pytest11': ['testplan = pytest_testplan']})\n", (76, 393), False, 'from setuptools import setup\n')] |
petr-kalinin/PaddleX | examples/industrial_quality_inspection/train_yolov3.py | e4f08b50dab01f3720570702a071188d1efd4042 | # 环境变量配置,用于控制是否使用GPU
# 说明文档:https://paddlex.readthedocs.io/zh_CN/develop/appendix/parameters.html#gpu
import os
os.environ['CUDA_VISIBLE_DEVICES'] = '0'
from paddlex.det import transforms
import paddlex as pdx
# 下载和解压铝材缺陷检测数据集
aluminum_dataset = 'https://bj.bcebos.com/paddlex/examples/industrial_quality_inspection/datasets/aluminum_inspection.tar.gz'
pdx.utils.download_and_decompress(aluminum_dataset, path='./')
# 定义训练和验证时的transforms
# API说明 https://paddlex.readthedocs.io/zh_CN/develop/apis/transforms/det_transforms.html
train_transforms = transforms.Compose([
transforms.MixupImage(mixup_epoch=250), transforms.RandomDistort(),
transforms.RandomExpand(), transforms.RandomCrop(), transforms.Resize(
target_size=608, interp='RANDOM'), transforms.RandomHorizontalFlip(),
transforms.Normalize()
])
eval_transforms = transforms.Compose([
transforms.Resize(
target_size=608, interp='CUBIC'), transforms.Normalize()
])
# 定义训练和验证所用的数据集
# API说明:https://paddlex.readthedocs.io/zh_CN/develop/apis/datasets.html#paddlex-datasets-vocdetection
train_dataset = pdx.datasets.VOCDetection(
data_dir='aluminum_inspection',
file_list='aluminum_inspection/train_list.txt',
label_list='aluminum_inspection/labels.txt',
transforms=train_transforms,
shuffle=True)
eval_dataset = pdx.datasets.VOCDetection(
data_dir='aluminum_inspection',
file_list='aluminum_inspection/val_list.txt',
label_list='aluminum_inspection/labels.txt',
transforms=eval_transforms)
# 初始化模型,并进行训练
# 可使用VisualDL查看训练指标,参考https://paddlex.readthedocs.io/zh_CN/develop/train/visualdl.html
num_classes = len(train_dataset.labels)
# API说明: https://paddlex.readthedocs.io/zh_CN/develop/apis/models/detection.html#paddlex-det-yolov3
model = pdx.det.YOLOv3(num_classes=num_classes, backbone='MobileNetV3_large')
# API说明: https://paddlex.readthedocs.io/zh_CN/develop/apis/models/detection.html#train
# 各参数介绍与调整说明:https://paddlex.readthedocs.io/zh_CN/develop/appendix/parameters.html
model.train(
num_epochs=400,
train_dataset=train_dataset,
train_batch_size=8,
eval_dataset=eval_dataset,
warmup_steps=4000,
learning_rate=0.000125,
lr_decay_epochs=[240, 320],
save_dir='output/yolov3_mobilenetv3',
use_vdl=True)
| [((355, 417), 'paddlex.utils.download_and_decompress', 'pdx.utils.download_and_decompress', (['aluminum_dataset'], {'path': '"""./"""'}), "(aluminum_dataset, path='./')\n", (388, 417), True, 'import paddlex as pdx\n'), ((1091, 1299), 'paddlex.datasets.VOCDetection', 'pdx.datasets.VOCDetection', ([], {'data_dir': '"""aluminum_inspection"""', 'file_list': '"""aluminum_inspection/train_list.txt"""', 'label_list': '"""aluminum_inspection/labels.txt"""', 'transforms': 'train_transforms', 'shuffle': '(True)'}), "(data_dir='aluminum_inspection', file_list=\n 'aluminum_inspection/train_list.txt', label_list=\n 'aluminum_inspection/labels.txt', transforms=train_transforms, shuffle=True\n )\n", (1116, 1299), True, 'import paddlex as pdx\n'), ((1321, 1507), 'paddlex.datasets.VOCDetection', 'pdx.datasets.VOCDetection', ([], {'data_dir': '"""aluminum_inspection"""', 'file_list': '"""aluminum_inspection/val_list.txt"""', 'label_list': '"""aluminum_inspection/labels.txt"""', 'transforms': 'eval_transforms'}), "(data_dir='aluminum_inspection', file_list=\n 'aluminum_inspection/val_list.txt', label_list=\n 'aluminum_inspection/labels.txt', transforms=eval_transforms)\n", (1346, 1507), True, 'import paddlex as pdx\n'), ((1766, 1835), 'paddlex.det.YOLOv3', 'pdx.det.YOLOv3', ([], {'num_classes': 'num_classes', 'backbone': '"""MobileNetV3_large"""'}), "(num_classes=num_classes, backbone='MobileNetV3_large')\n", (1780, 1835), True, 'import paddlex as pdx\n'), ((574, 612), 'paddlex.det.transforms.MixupImage', 'transforms.MixupImage', ([], {'mixup_epoch': '(250)'}), '(mixup_epoch=250)\n', (595, 612), False, 'from paddlex.det import transforms\n'), ((614, 640), 'paddlex.det.transforms.RandomDistort', 'transforms.RandomDistort', ([], {}), '()\n', (638, 640), False, 'from paddlex.det import transforms\n'), ((646, 671), 'paddlex.det.transforms.RandomExpand', 'transforms.RandomExpand', ([], {}), '()\n', (669, 671), False, 'from paddlex.det import transforms\n'), ((673, 696), 'paddlex.det.transforms.RandomCrop', 'transforms.RandomCrop', ([], {}), '()\n', (694, 696), False, 'from paddlex.det import transforms\n'), ((698, 749), 'paddlex.det.transforms.Resize', 'transforms.Resize', ([], {'target_size': '(608)', 'interp': '"""RANDOM"""'}), "(target_size=608, interp='RANDOM')\n", (715, 749), False, 'from paddlex.det import transforms\n'), ((760, 793), 'paddlex.det.transforms.RandomHorizontalFlip', 'transforms.RandomHorizontalFlip', ([], {}), '()\n', (791, 793), False, 'from paddlex.det import transforms\n'), ((799, 821), 'paddlex.det.transforms.Normalize', 'transforms.Normalize', ([], {}), '()\n', (819, 821), False, 'from paddlex.det import transforms\n'), ((869, 919), 'paddlex.det.transforms.Resize', 'transforms.Resize', ([], {'target_size': '(608)', 'interp': '"""CUBIC"""'}), "(target_size=608, interp='CUBIC')\n", (886, 919), False, 'from paddlex.det import transforms\n'), ((930, 952), 'paddlex.det.transforms.Normalize', 'transforms.Normalize', ([], {}), '()\n', (950, 952), False, 'from paddlex.det import transforms\n')] |
bartoszper/Django-REST-API-movierater | api/migrations/0004_auto_20210107_2032.py | a145f087d9c59167ea3503dde5fa74ab7f3e3e72 | # Generated by Django 3.1.4 on 2021-01-07 19:32
from django.db import migrations, models
import django.db.models.deletion
class Migration(migrations.Migration):
dependencies = [
('api', '0003_auto_20210107_2010'),
]
operations = [
migrations.AlterField(
model_name='extrainfo',
name='rodzaj',
field=models.IntegerField(choices=[(2, 'Sci-Fi'), (0, 'Nieznany'), (5, 'Komedia'), (3, 'Dramat'), (1, 'Horror')], default=0),
),
migrations.CreateModel(
name='Recenzja',
fields=[
('id', models.AutoField(auto_created=True, primary_key=True, serialize=False, verbose_name='ID')),
('opis', models.TextField(default='')),
('gwizdki', models.IntegerField(default=5)),
('film', models.ForeignKey(on_delete=django.db.models.deletion.CASCADE, to='api.film')),
],
),
]
| [((368, 490), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'choices': "[(2, 'Sci-Fi'), (0, 'Nieznany'), (5, 'Komedia'), (3, 'Dramat'), (1, 'Horror')]", 'default': '(0)'}), "(choices=[(2, 'Sci-Fi'), (0, 'Nieznany'), (5, 'Komedia'),\n (3, 'Dramat'), (1, 'Horror')], default=0)\n", (387, 490), False, 'from django.db import migrations, models\n'), ((604, 697), 'django.db.models.AutoField', 'models.AutoField', ([], {'auto_created': '(True)', 'primary_key': '(True)', 'serialize': '(False)', 'verbose_name': '"""ID"""'}), "(auto_created=True, primary_key=True, serialize=False,\n verbose_name='ID')\n", (620, 697), False, 'from django.db import migrations, models\n'), ((721, 749), 'django.db.models.TextField', 'models.TextField', ([], {'default': '""""""'}), "(default='')\n", (737, 749), False, 'from django.db import migrations, models\n'), ((780, 810), 'django.db.models.IntegerField', 'models.IntegerField', ([], {'default': '(5)'}), '(default=5)\n', (799, 810), False, 'from django.db import migrations, models\n'), ((838, 915), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'on_delete': 'django.db.models.deletion.CASCADE', 'to': '"""api.film"""'}), "(on_delete=django.db.models.deletion.CASCADE, to='api.film')\n", (855, 915), False, 'from django.db import migrations, models\n')] |
macdaliot/Wooey | wooey/migrations/0009_script_versioning.py | 3a0f40e3b3ab4d905f9acc72f5cd5d6453e14834 | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
from django.db import models, migrations
import wooey.models.mixins
class Migration(migrations.Migration):
dependencies = [
('wooey', '0008_short_param_admin'),
]
operations = [
migrations.CreateModel(
name='ScriptVersion',
fields=[
('id', models.AutoField(verbose_name='ID', serialize=False, auto_created=True, primary_key=True)),
('script_version', models.CharField(default='1', help_text='The script version.', max_length=50, blank=True)),
('script_iteration', models.PositiveSmallIntegerField(default=1)),
('script_path', models.FileField(upload_to=b'')),
('default_version', models.BooleanField(default=False)),
('created_date', models.DateTimeField(auto_now_add=True)),
('modified_date', models.DateTimeField(auto_now=True)),
('script', models.ForeignKey(related_name='script_version_new', to='wooey.Script')),
],
bases=(wooey.models.mixins.ModelDiffMixin, wooey.models.mixins.WooeyPy2Mixin, models.Model),
),
migrations.AddField(
model_name='scriptparameter',
name='script_version',
field=models.ForeignKey(null=True, to='wooey.ScriptVersion'),
preserve_default=False,
),
migrations.AddField(
model_name='scriptparametergroup',
name='script_version',
field=models.ForeignKey(null=True, to='wooey.ScriptVersion'),
preserve_default=False,
),
migrations.AddField(
model_name='wooeyjob',
name='script_version',
field=models.ForeignKey(null=True, to='wooey.ScriptVersion'),
preserve_default=False,
),
]
| [((1321, 1375), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'to': '"""wooey.ScriptVersion"""'}), "(null=True, to='wooey.ScriptVersion')\n", (1338, 1375), False, 'from django.db import models, migrations\n'), ((1553, 1607), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'to': '"""wooey.ScriptVersion"""'}), "(null=True, to='wooey.ScriptVersion')\n", (1570, 1607), False, 'from django.db import models, migrations\n'), ((1773, 1827), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'null': '(True)', 'to': '"""wooey.ScriptVersion"""'}), "(null=True, to='wooey.ScriptVersion')\n", (1790, 1827), False, 'from django.db import models, migrations\n'), ((377, 470), 'django.db.models.AutoField', 'models.AutoField', ([], {'verbose_name': '"""ID"""', 'serialize': '(False)', 'auto_created': '(True)', 'primary_key': '(True)'}), "(verbose_name='ID', serialize=False, auto_created=True,\n primary_key=True)\n", (393, 470), False, 'from django.db import models, migrations\n'), ((504, 598), 'django.db.models.CharField', 'models.CharField', ([], {'default': '"""1"""', 'help_text': '"""The script version."""', 'max_length': '(50)', 'blank': '(True)'}), "(default='1', help_text='The script version.', max_length=\n 50, blank=True)\n", (520, 598), False, 'from django.db import models, migrations\n'), ((633, 676), 'django.db.models.PositiveSmallIntegerField', 'models.PositiveSmallIntegerField', ([], {'default': '(1)'}), '(default=1)\n', (665, 676), False, 'from django.db import models, migrations\n'), ((711, 742), 'django.db.models.FileField', 'models.FileField', ([], {'upload_to': "b''"}), "(upload_to=b'')\n", (727, 742), False, 'from django.db import models, migrations\n'), ((781, 815), 'django.db.models.BooleanField', 'models.BooleanField', ([], {'default': '(False)'}), '(default=False)\n', (800, 815), False, 'from django.db import models, migrations\n'), ((851, 890), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now_add': '(True)'}), '(auto_now_add=True)\n', (871, 890), False, 'from django.db import models, migrations\n'), ((927, 962), 'django.db.models.DateTimeField', 'models.DateTimeField', ([], {'auto_now': '(True)'}), '(auto_now=True)\n', (947, 962), False, 'from django.db import models, migrations\n'), ((992, 1063), 'django.db.models.ForeignKey', 'models.ForeignKey', ([], {'related_name': '"""script_version_new"""', 'to': '"""wooey.Script"""'}), "(related_name='script_version_new', to='wooey.Script')\n", (1009, 1063), False, 'from django.db import models, migrations\n')] |
menamegaly/MR | vendor/munkireport/firewall/scripts/firewall.py | 18d042639d9b45ca81a9b58659f45c6e2c3ac87f | #!/usr/bin/python
"""
Firewall for munkireport.
By Tuxudo
Will return all details about how the firewall is configured
"""
import subprocess
import os
import sys
import platform
import re
import plistlib
import json
sys.path.insert(0,'/usr/local/munki')
sys.path.insert(0, '/usr/local/munkireport')
from munkilib import FoundationPlist
def get_firewall_info():
'''Uses system profiler to get firewall info for the machine.'''
cmd = ['/usr/sbin/system_profiler', 'SPFirewallDataType', '-xml']
proc = subprocess.Popen(cmd, shell=False, bufsize=-1,
stdin=subprocess.PIPE,
stdout=subprocess.PIPE, stderr=subprocess.PIPE)
(output, unused_error) = proc.communicate()
try:
plist = plistlib.readPlistFromString(output)
# system_profiler xml is an array
firewall_dict = plist[0]
items = firewall_dict['_items']
return items
except Exception:
return {}
def flatten_firewall_info(array):
'''Un-nest firewall info, return array with objects with relevant keys'''
firewall = {}
for obj in array:
for item in obj:
if item == '_items':
out = out + flatten_firewall_info(obj['_items'])
elif item == 'spfirewall_services':
for service in obj[item]:
if obj[item][service] == "spfirewall_allow_all":
obj[item][service] = 1
else:
obj[item][service] = 0
firewall['services'] = json.dumps(obj[item])
elif item == 'spfirewall_applications':
for application in obj[item]:
if obj[item][application] == "spfirewall_allow_all":
obj[item][application] = 1
else:
obj[item][application] = 0
firewall['applications'] = json.dumps(obj[item])
return firewall
def get_alf_preferences():
pl = FoundationPlist.readPlist("/Library/Preferences/com.apple.alf.plist")
firewall = {}
for item in pl:
if item == 'allowdownloadsignedenabled':
firewall['allowdownloadsignedenabled'] = to_bool(pl[item])
elif item == 'allowsignedenabled':
firewall['allowsignedenabled'] = to_bool(pl[item])
elif item == 'firewallunload':
firewall['firewallunload'] = to_bool(pl[item])
elif item == 'globalstate':
firewall['globalstate'] = to_bool(pl[item])
elif item == 'stealthenabled':
firewall['stealthenabled'] = to_bool(pl[item])
elif item == 'loggingenabled':
firewall['loggingenabled'] = to_bool(pl[item])
elif item == 'loggingoption':
firewall['loggingoption'] = pl[item]
elif item == 'version':
firewall['version'] = pl[item]
return firewall
def to_bool(s):
if s == True:
return 1
else:
return 0
def merge_two_dicts(x, y):
z = x.copy()
z.update(y)
return z
def main():
"""Main"""
# Skip manual check
if len(sys.argv) > 1:
if sys.argv[1] == 'manualcheck':
print 'Manual check: skipping'
exit(0)
# Create cache dir if it does not exist
cachedir = '%s/cache' % os.path.dirname(os.path.realpath(__file__))
if not os.path.exists(cachedir):
os.makedirs(cachedir)
# Set the encoding
# The "ugly hack" :P
reload(sys)
sys.setdefaultencoding('utf8')
# Get results
result = dict()
info = get_firewall_info()
result = merge_two_dicts(flatten_firewall_info(info), get_alf_preferences())
# Write firewall results to cache
output_plist = os.path.join(cachedir, 'firewall.plist')
FoundationPlist.writePlist(result, output_plist)
#print FoundationPlist.writePlistToString(result)
if __name__ == "__main__":
main()
| [] |
dpoulopoulos/cf_step | cf_step/metrics.py | c0ed1d0fbdedb863a630e90a7c7b6f95141a3e30 | # AUTOGENERATED! DO NOT EDIT! File to edit: nbs/metrics.ipynb (unless otherwise specified).
__all__ = ['recall_at_k', 'precision_at_k']
# Cell
from typing import List
# Cell
def recall_at_k(predictions: List[int], targets: List[int], k: int = 10) -> float:
"""Computes `Recall@k` from the given predictions and targets sets."""
predictions_set = set(predictions[:k])
targets_set = set(targets)
result = len(targets_set & predictions_set) / float(len(targets_set))
return result
# Cell
def precision_at_k(predictions: List[int], targets: List[int], k: int = 10) -> float:
"""Computes `Precision@k` from the given predictions and targets sets."""
predictions_set = set(predictions[:k])
targets_set = set(targets)
result = len(targets_set & predictions_set) / float(len(predictions_set))
return result | [] |
sandertyu/Simple-Geometry-Plot | bicycleparameters/period.py | 6fa4dfb50aebc4215818f75ff56f916fc32f8cfa | #!/usr/bin/env/ python
import os
from math import pi
import numpy as np
from numpy import ma
from scipy.optimize import leastsq
import matplotlib.pyplot as plt
from uncertainties import ufloat
# local modules
from .io import load_pendulum_mat_file
def average_rectified_sections(data):
'''Returns a slice of an oscillating data vector based on the max and min
of the mean of the sections created by retifiying the data.
Parameters
----------
data : ndarray, shape(n,)
Returns
-------
data : ndarray, shape(m,)
A slice where m is typically less than n.
Notes
-----
This is a function to try to handle the fact that some of the data from the
torsional pendulum had a beating like phenomena and we only want to select
a section of the data that doesn't seem to exhibit the phenomena.
'''
# subtract the mean so that there are zero crossings
meanSubData = data - np.mean(data)
# find the zero crossings
zeroCrossings = np.where(np.diff(np.sign(meanSubData)))[0]
# add a zero to the beginning
crossings = np.concatenate((np.array([0]), zeroCrossings))
# find the mean value of the rectified sections and the local indice
secMean = []
localMeanInd = []
for sec in np.split(np.abs(meanSubData), zeroCrossings):
localMeanInd.append(np.argmax(sec))
secMean.append(np.mean(sec))
meanInd = []
# make the global indices
for i, val in enumerate(crossings):
meanInd.append(val + localMeanInd[i])
# only take the top part of the data because some the zero crossings can be
# a lot at one point mainly due to the resolution of the daq box
threshold = np.mean(secMean)
secMeanOverThresh = []
indice = []
for i, val in enumerate(secMean):
if val > threshold:
secMeanOverThresh.append(val)
indice.append(meanInd[i])
# now return the data based on the max value and the min value
maxInd = indice[np.argmax(secMeanOverThresh)]
minInd = indice[np.argmin(secMeanOverThresh)]
return data[maxInd:minInd]
def calc_periods_for_files(directory, filenames, forkIsSplit):
'''Calculates the period for all filenames in directory.
Parameters
----------
directory : string
This is the path to the RawData directory.
filenames : list
List of all the mat file names in the RawData directory.
forkIsSplit : boolean
True if the fork is broken into a handlebar and fork and false if the
fork and handlebar was measured together.
Returns
-------
periods : dictionary
Contains all the periods for the mat files in the RawData directory.
'''
periods = {}
def pathParts(path):
'''Splits a path into a list of its parts.'''
components = []
while True:
(path,tail) = os.path.split(path)
if tail == "":
components.reverse()
return components
components.append(tail)
pathToRawDataParts = pathParts(directory)
pathToRawDataParts.pop()
pathToBicycleDir = os.path.join(pathToRawDataParts[0],
pathToRawDataParts[1],
pathToRawDataParts[2])
pathToPlotDir = os.path.join(pathToBicycleDir, 'Plots', 'PendulumFit')
# make sure there is a place to save the plots
if not os.path.exists(pathToPlotDir):
os.makedirs(pathToPlotDir)
for f in filenames:
print("Calculating the period for:", f)
# load the pendulum data
pathToMatFile = os.path.join(directory, f)
matData = load_pendulum_mat_file(pathToMatFile)
# generate a variable name for this period
periodKey = get_period_key(matData, forkIsSplit)
# calculate the period
sampleRate = get_sample_rate(matData)
pathToPlotFile = os.path.join(pathToPlotDir,
os.path.splitext(f)[0] + '.png')
period = get_period_from_truncated(matData['data'],
sampleRate,
pathToPlotFile)
print("The period is:", period, "\n")
# either append the the period or if it isn't there yet, then
# make a new list
try:
periods[periodKey].append(period)
except KeyError:
periods[periodKey] = [period]
# now average all the periods
for k, v in periods.items():
if k.startswith('T'):
periods[k] = np.mean(v)
return periods
def check_for_period(mp, forkIsSplit):
'''Returns whether the fork is split into two pieces and whether the period
calculations need to happen again.
Parameters
----------
mp : dictionary
Dictionary the measured parameters.
forkIsSplit : boolean
True if the fork is broken into a handlebar and fork and false if the
fork and handlebar was measured together.
Returns
-------
forcePeriodCalc : boolean
True if there wasn't enough period data in mp, false if there was.
forkIsSplit : boolean
True if the fork is broken into a handlebar and fork and false if the
fork and handlebar was measured together.
'''
forcePeriodCalc = False
#Check to see if mp contains at enough periods to not need
# recalculation
ncTSum = 0
ntTSum = 0
for key in mp.keys():
# check for any periods in the keys
if key[:2] == 'Tc':
ncTSum += 1
elif key[:2] == 'Tt':
ntTSum += 1
# if there isn't enough data then force the period cals again
if forkIsSplit:
if ncTSum < 5 or ntTSum < 11:
forcePeriodCalc = True
else:
if ncTSum < 4 or ntTSum < 8:
forcePeriodCalc = True
return forcePeriodCalc
def fit_goodness(ym, yp):
'''
Calculate the goodness of fit.
Parameters
----------
ym : ndarray, shape(n,)
The vector of measured values.
yp : ndarry, shape(n,)
The vector of predicted values.
Returns
-------
rsq : float
The r squared value of the fit.
SSE : float
The error sum of squares.
SST : float
The total sum of squares.
SSR : float
The regression sum of squares.
'''
SSR = np.sum((yp - np.mean(ym))**2)
SST = np.sum((ym - np.mean(ym))**2)
SSE = SST - SSR
rsq = SSR / SST
return rsq, SSE, SST, SSR
def get_period(data, sampleRate, pathToPlotFile):
'''Returns the period and uncertainty for data resembling a decaying
oscillation.
Parameters
----------
data : ndarray, shape(n,)
A time series that resembles a decaying oscillation.
sampleRate : int
The frequency that data was sampled at.
pathToPlotFile : string
A path to the file to print the plots.
Returns
-------
T : ufloat
The period of oscillation and its uncertainty.
'''
y = data
x = np.linspace(0., (len(y) - 1) / float(sampleRate), num=len(y))
def fitfunc(p, t):
'''Decaying oscillation function.'''
a = p[0]
b = np.exp(-p[3] * p[4] * t)
c = p[1] * np.sin(p[4] * np.sqrt(1 - p[3]**2) * t)
d = p[2] * np.cos(p[4] * np.sqrt(1 - p[3]**2) * t)
return a + b * (c + d)
# initial guesses
#p0 = np.array([1.35, -.5, -.75, 0.01, 3.93]) # guess from delft
#p0 = np.array([2.5, -.75, -.75, 0.001, 4.3]) # guess from ucd
p0 = make_guess(data, sampleRate) # tries to make a good guess
# create the error function
errfunc = lambda p, t, y: fitfunc(p, t) - y
# minimize the error function
p1, success = leastsq(errfunc, p0[:], args=(x, y))
lscurve = fitfunc(p1, x)
# find the uncertainty in the fit parameters
rsq, SSE, SST, SSR = fit_goodness(y, lscurve)
sigma = np.sqrt(SSE / (len(y) - len(p0)))
# calculate the jacobian
L = jac_fitfunc(p1, x)
# the Hessian
H = np.dot(L.T, L)
# the covariance matrix
U = sigma**2. * np.linalg.inv(H)
# the standard deviations
sigp = np.sqrt(U.diagonal())
# natural frequency
wo = ufloat(p1[4], sigp[4])
# damping ratio
zeta = ufloat(p1[3], sigp[3])
# damped natural frequency
wd = (1. - zeta**2.)**(1. / 2.) * wo
# damped natural frequency (hz)
fd = wd / 2. / pi
# period
T = 1. / fd
# plot the data and save it to file
fig = plt.figure()
plot_osfit(x, y, lscurve, p1, rsq, T, m=np.max(x), fig=fig)
plt.savefig(pathToPlotFile)
plt.close()
# return the period
return T
def get_period_from_truncated(data, sampleRate, pathToPlotFile):
#dataRec = average_rectified_sections(data)
dataRec = data
dataGood = select_good_data(dataRec, 0.1)
return get_period(dataGood, sampleRate, pathToPlotFile)
def get_period_key(matData, forkIsSplit):
'''Returns a dictionary key for the period entries.
Parameters
----------
matData : dictionary
The data imported from a pendulum mat file.
forkIsSplit : boolean
True if the fork is broken into a handlebar and fork and false if the
fork and handlebar was measured together.
Returns
-------
key : string
A key of the form 'T[pendulum][part][orientation]'. For example, if it
is the frame that was hung as a torsional pendulum at the second
orientation angle then the key would be 'TtB2'.
'''
# set up the subscripting for the period key
subscripts = {'Fwheel': 'F',
'Rwheel': 'R',
'Frame': 'B',
'Flywheel': 'D'}
# the Flywheel is for the gyro bike and it actually represents the front
# wheel and the flywheel as one rigid body. It was easier to measure the
# the inertia this way. So...the to get the actual flywheel inertia, one
# must subtract the inertia of the Fwheel, F, from the Flywheel, D.
if forkIsSplit:
subscripts['Fork'] = 'S'
subscripts['Handlebar'] = 'G'
else:
subscripts['Fork'] = 'H'
try:
subscripts[matData['rod']] = 'P'
except KeyError:
subscripts['Rod'] = 'P'
# used to convert word ordinals to numbers
ordinal = {'First' : '1',
'Second' : '2',
'Third' : '3',
'Fourth' : '4',
'Fifth' : '5',
'Sixth' : '6'}
try:
orienWord = matData['angleOrder']
except:
orienWord = matData['angle']
pend = matData['pendulum'][0].lower()
part = subscripts[matData['part']]
orienNum = ordinal[orienWord]
return 'T' + pend + part + orienNum
def get_sample_rate(matData):
'''Returns the sample rate for the data.'''
if 'ActualRate' in matData.keys():
sampleRate = matData['ActualRate']
else:
sampleRate = matData['sampleRate']
return sampleRate
def jac_fitfunc(p, t):
'''
Calculate the Jacobian of a decaying oscillation function.
Uses the analytical formulations of the partial derivatives.
Parameters
----------
p : the five parameters of the equation
t : time vector
Returns
-------
jac : The jacobian, the partial of the vector function with respect to the
parameters vector. A 5 x N matrix where N is the number of time steps.
'''
jac = np.zeros((len(p), len(t)))
e = np.exp(-p[3] * p[4] * t)
dampsq = np.sqrt(1 - p[3]**2)
s = np.sin(dampsq * p[4] * t)
c = np.cos(dampsq * p[4] * t)
jac[0] = np.ones_like(t)
jac[1] = e * s
jac[2] = e * c
jac[3] = (-p[4] * t * e * (p[1] * s + p[2] * c) + e * (-p[1] * p[3] * p[4]
* t / dampsq * c + p[2] * p[3] * p[4] * t / dampsq * s))
jac[4] = (-p[3] * t * e * (p[1] * s + p[2] * c) + e * dampsq * t * (p[1] *
c - p[2] * s))
return jac.T
def make_guess(data, sampleRate):
'''Returns a decent starting point for fitting the decaying oscillation
function.
'''
p = np.zeros(5)
# the first unknown is the shift along the y axis
p[0] = np.mean(data)
# work with the mean subtracted data from now on
data = data - p[0]
# what is the initial slope of the curve
if data[10] > data[0]:
slope = 1
else:
slope = -1
# the second is the amplitude for the sin function
p[1] = slope * np.max(data) / 2
# the third is the amplitude for the cos function
p[2] = slope * np.max(data)
# the fourth is the damping ratio and is typically small, 0.001 < zeta < 0.02
p[3] = 0.001
# the fifth is the undamped natural frequency
# first remove the data around zero
dataMasked = ma.masked_inside(data, -0.1, 0.1)
# find the zero crossings
zeroCrossings = np.where(np.diff(np.sign(dataMasked)))[0]
# remove redundant crossings
zero = []
for i, v in enumerate(zeroCrossings):
if abs(v - zeroCrossings[i - 1]) > 20:
zero.append(v)
# get the samples per period
samplesPerPeriod = 2*np.mean(np.diff(zero))
# now the frequency
p[4] = (samplesPerPeriod / float(sampleRate) /2. / pi)**-1
if np.isnan(p[4]):
p[4] = 4.
return p
def plot_osfit(t, ym, yf, p, rsq, T, m=None, fig=None):
'''Plot fitted data over the measured
Parameters
----------
t : ndarray (n,)
Measurement time in seconds
ym : ndarray (n,)
The measured voltage
yf : ndarray (n,)
p : ndarray (5,)
The fit parameters for the decaying osicallation fucntion
rsq : float
The r squared value of y (the fit)
T : float
The period
m : float
The maximum value to plot
Returns
-------
fig : the figure
'''
# figure properties
figwidth = 4. # in inches
goldenMean = (np.sqrt(5) - 1.0) / 2.0
figsize = [figwidth, figwidth * goldenMean]
params = {#'backend': 'ps',
'axes.labelsize': 8,
'axes.titlesize': 8,
'text.fontsize': 8,
'legend.fontsize': 8,
'xtick.labelsize': 6,
'ytick.labelsize': 6,
'text.usetex': True,
#'figure.figsize': figsize
}
if fig:
fig = fig
else:
fig = plt.figure(2)
fig.set_size_inches(figsize)
plt.rcParams.update(params)
ax1 = plt.axes([0.125, 0.125, 0.9-0.125, 0.65])
#if m == None:
#end = len(t)
#else:
#end = t[round(m/t[-1]*len(t))]
ax1.plot(t, ym, '.', markersize=2)
plt.plot(t, yf, 'k-')
plt.xlabel('Time [s]')
plt.ylabel('Amplitude [V]')
equation = r'$f(t)={0:1.2f}+e^{{-({3:1.3f})({4:1.1f})t}}\left[{1:1.2f}\sin{{\sqrt{{1-{3:1.3f}^2}}{4:1.1f}t}}+{2:1.2f}\cos{{\sqrt{{1-{3:1.3f}^2}}{4:1.1f}t}}\right]$'.format(p[0], p[1], p[2], p[3], p[4])
rsquare = '$r^2={0:1.3f}$'.format(rsq)
period = '$T={0} s$'.format(T)
plt.title(equation + '\n' + rsquare + ', ' + period)
plt.legend(['Measured', 'Fit'])
if m is not None:
plt.xlim((0, m))
else:
pass
return fig
def select_good_data(data, percent):
'''Returns a slice of the data from the index at maximum value to the index
at a percent of the maximum value.
Parameters
----------
data : ndarray, shape(1,)
This should be a decaying function.
percent : float
The percent of the maximum to clip.
This basically snips of the beginning and end of the data so that the super
damped tails are gone and also any weirdness at the beginning.
'''
meanSub = data - np.mean(data)
maxVal = np.max(np.abs(meanSub))
maxInd = np.argmax(np.abs(meanSub))
for i, v in reversed(list(enumerate(meanSub))):
if v > percent * maxVal:
minInd = i
break
return data[maxInd:minInd]
| [((1695, 1711), 'numpy.mean', 'np.mean', (['secMean'], {}), '(secMean)\n', (1702, 1711), True, 'import numpy as np\n'), ((3127, 3212), 'os.path.join', 'os.path.join', (['pathToRawDataParts[0]', 'pathToRawDataParts[1]', 'pathToRawDataParts[2]'], {}), '(pathToRawDataParts[0], pathToRawDataParts[1],\n pathToRawDataParts[2])\n', (3139, 3212), False, 'import os\n'), ((3301, 3355), 'os.path.join', 'os.path.join', (['pathToBicycleDir', '"""Plots"""', '"""PendulumFit"""'], {}), "(pathToBicycleDir, 'Plots', 'PendulumFit')\n", (3313, 3355), False, 'import os\n'), ((7748, 7784), 'scipy.optimize.leastsq', 'leastsq', (['errfunc', 'p0[:]'], {'args': '(x, y)'}), '(errfunc, p0[:], args=(x, y))\n', (7755, 7784), False, 'from scipy.optimize import leastsq\n'), ((8045, 8059), 'numpy.dot', 'np.dot', (['L.T', 'L'], {}), '(L.T, L)\n', (8051, 8059), True, 'import numpy as np\n'), ((8224, 8246), 'uncertainties.ufloat', 'ufloat', (['p1[4]', 'sigp[4]'], {}), '(p1[4], sigp[4])\n', (8230, 8246), False, 'from uncertainties import ufloat\n'), ((8278, 8300), 'uncertainties.ufloat', 'ufloat', (['p1[3]', 'sigp[3]'], {}), '(p1[3], sigp[3])\n', (8284, 8300), False, 'from uncertainties import ufloat\n'), ((8511, 8523), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (8521, 8523), True, 'import matplotlib.pyplot as plt\n'), ((8592, 8619), 'matplotlib.pyplot.savefig', 'plt.savefig', (['pathToPlotFile'], {}), '(pathToPlotFile)\n', (8603, 8619), True, 'import matplotlib.pyplot as plt\n'), ((8624, 8635), 'matplotlib.pyplot.close', 'plt.close', ([], {}), '()\n', (8633, 8635), True, 'import matplotlib.pyplot as plt\n'), ((11465, 11489), 'numpy.exp', 'np.exp', (['(-p[3] * p[4] * t)'], {}), '(-p[3] * p[4] * t)\n', (11471, 11489), True, 'import numpy as np\n'), ((11503, 11525), 'numpy.sqrt', 'np.sqrt', (['(1 - p[3] ** 2)'], {}), '(1 - p[3] ** 2)\n', (11510, 11525), True, 'import numpy as np\n'), ((11532, 11557), 'numpy.sin', 'np.sin', (['(dampsq * p[4] * t)'], {}), '(dampsq * p[4] * t)\n', (11538, 11557), True, 'import numpy as np\n'), ((11566, 11591), 'numpy.cos', 'np.cos', (['(dampsq * p[4] * t)'], {}), '(dampsq * p[4] * t)\n', (11572, 11591), True, 'import numpy as np\n'), ((11605, 11620), 'numpy.ones_like', 'np.ones_like', (['t'], {}), '(t)\n', (11617, 11620), True, 'import numpy as np\n'), ((12076, 12087), 'numpy.zeros', 'np.zeros', (['(5)'], {}), '(5)\n', (12084, 12087), True, 'import numpy as np\n'), ((12154, 12167), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (12161, 12167), True, 'import numpy as np\n'), ((12752, 12785), 'numpy.ma.masked_inside', 'ma.masked_inside', (['data', '(-0.1)', '(0.1)'], {}), '(data, -0.1, 0.1)\n', (12768, 12785), False, 'from numpy import ma\n'), ((13216, 13230), 'numpy.isnan', 'np.isnan', (['p[4]'], {}), '(p[4])\n', (13224, 13230), True, 'import numpy as np\n'), ((14337, 14364), 'matplotlib.pyplot.rcParams.update', 'plt.rcParams.update', (['params'], {}), '(params)\n', (14356, 14364), True, 'import matplotlib.pyplot as plt\n'), ((14375, 14418), 'matplotlib.pyplot.axes', 'plt.axes', (['[0.125, 0.125, 0.9 - 0.125, 0.65]'], {}), '([0.125, 0.125, 0.9 - 0.125, 0.65])\n', (14383, 14418), True, 'import matplotlib.pyplot as plt\n'), ((14552, 14573), 'matplotlib.pyplot.plot', 'plt.plot', (['t', 'yf', '"""k-"""'], {}), "(t, yf, 'k-')\n", (14560, 14573), True, 'import matplotlib.pyplot as plt\n'), ((14578, 14600), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""Time [s]"""'], {}), "('Time [s]')\n", (14588, 14600), True, 'import matplotlib.pyplot as plt\n'), ((14605, 14632), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""Amplitude [V]"""'], {}), "('Amplitude [V]')\n", (14615, 14632), True, 'import matplotlib.pyplot as plt\n'), ((14921, 14973), 'matplotlib.pyplot.title', 'plt.title', (["(equation + '\\n' + rsquare + ', ' + period)"], {}), "(equation + '\\n' + rsquare + ', ' + period)\n", (14930, 14973), True, 'import matplotlib.pyplot as plt\n'), ((14978, 15009), 'matplotlib.pyplot.legend', 'plt.legend', (["['Measured', 'Fit']"], {}), "(['Measured', 'Fit'])\n", (14988, 15009), True, 'import matplotlib.pyplot as plt\n'), ((939, 952), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (946, 952), True, 'import numpy as np\n'), ((1279, 1298), 'numpy.abs', 'np.abs', (['meanSubData'], {}), '(meanSubData)\n', (1285, 1298), True, 'import numpy as np\n'), ((3419, 3448), 'os.path.exists', 'os.path.exists', (['pathToPlotDir'], {}), '(pathToPlotDir)\n', (3433, 3448), False, 'import os\n'), ((3458, 3484), 'os.makedirs', 'os.makedirs', (['pathToPlotDir'], {}), '(pathToPlotDir)\n', (3469, 3484), False, 'import os\n'), ((3615, 3641), 'os.path.join', 'os.path.join', (['directory', 'f'], {}), '(directory, f)\n', (3627, 3641), False, 'import os\n'), ((7214, 7238), 'numpy.exp', 'np.exp', (['(-p[3] * p[4] * t)'], {}), '(-p[3] * p[4] * t)\n', (7220, 7238), True, 'import numpy as np\n'), ((8109, 8125), 'numpy.linalg.inv', 'np.linalg.inv', (['H'], {}), '(H)\n', (8122, 8125), True, 'import numpy as np\n'), ((12531, 12543), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (12537, 12543), True, 'import numpy as np\n'), ((14286, 14299), 'matplotlib.pyplot.figure', 'plt.figure', (['(2)'], {}), '(2)\n', (14296, 14299), True, 'import matplotlib.pyplot as plt\n'), ((15040, 15056), 'matplotlib.pyplot.xlim', 'plt.xlim', (['(0, m)'], {}), '((0, m))\n', (15048, 15056), True, 'import matplotlib.pyplot as plt\n'), ((15600, 15613), 'numpy.mean', 'np.mean', (['data'], {}), '(data)\n', (15607, 15613), True, 'import numpy as np\n'), ((15634, 15649), 'numpy.abs', 'np.abs', (['meanSub'], {}), '(meanSub)\n', (15640, 15649), True, 'import numpy as np\n'), ((15674, 15689), 'numpy.abs', 'np.abs', (['meanSub'], {}), '(meanSub)\n', (15680, 15689), True, 'import numpy as np\n'), ((1112, 1125), 'numpy.array', 'np.array', (['[0]'], {}), '([0])\n', (1120, 1125), True, 'import numpy as np\n'), ((1344, 1358), 'numpy.argmax', 'np.argmax', (['sec'], {}), '(sec)\n', (1353, 1358), True, 'import numpy as np\n'), ((1383, 1395), 'numpy.mean', 'np.mean', (['sec'], {}), '(sec)\n', (1390, 1395), True, 'import numpy as np\n'), ((1988, 2016), 'numpy.argmax', 'np.argmax', (['secMeanOverThresh'], {}), '(secMeanOverThresh)\n', (1997, 2016), True, 'import numpy as np\n'), ((2038, 2066), 'numpy.argmin', 'np.argmin', (['secMeanOverThresh'], {}), '(secMeanOverThresh)\n', (2047, 2066), True, 'import numpy as np\n'), ((2874, 2893), 'os.path.split', 'os.path.split', (['path'], {}), '(path)\n', (2887, 2893), False, 'import os\n'), ((4572, 4582), 'numpy.mean', 'np.mean', (['v'], {}), '(v)\n', (4579, 4582), True, 'import numpy as np\n'), ((8568, 8577), 'numpy.max', 'np.max', (['x'], {}), '(x)\n', (8574, 8577), True, 'import numpy as np\n'), ((12440, 12452), 'numpy.max', 'np.max', (['data'], {}), '(data)\n', (12446, 12452), True, 'import numpy as np\n'), ((13107, 13120), 'numpy.diff', 'np.diff', (['zero'], {}), '(zero)\n', (13114, 13120), True, 'import numpy as np\n'), ((13878, 13888), 'numpy.sqrt', 'np.sqrt', (['(5)'], {}), '(5)\n', (13885, 13888), True, 'import numpy as np\n'), ((1020, 1040), 'numpy.sign', 'np.sign', (['meanSubData'], {}), '(meanSubData)\n', (1027, 1040), True, 'import numpy as np\n'), ((6394, 6405), 'numpy.mean', 'np.mean', (['ym'], {}), '(ym)\n', (6401, 6405), True, 'import numpy as np\n'), ((6434, 6445), 'numpy.mean', 'np.mean', (['ym'], {}), '(ym)\n', (6441, 6445), True, 'import numpy as np\n'), ((12853, 12872), 'numpy.sign', 'np.sign', (['dataMasked'], {}), '(dataMasked)\n', (12860, 12872), True, 'import numpy as np\n'), ((3974, 3993), 'os.path.splitext', 'os.path.splitext', (['f'], {}), '(f)\n', (3990, 3993), False, 'import os\n'), ((7272, 7294), 'numpy.sqrt', 'np.sqrt', (['(1 - p[3] ** 2)'], {}), '(1 - p[3] ** 2)\n', (7279, 7294), True, 'import numpy as np\n'), ((7331, 7353), 'numpy.sqrt', 'np.sqrt', (['(1 - p[3] ** 2)'], {}), '(1 - p[3] ** 2)\n', (7338, 7353), True, 'import numpy as np\n')] |
tbenthompson/BIE_tutorials | tectosaur2/analyze.py | 02cd56ab7e63e36afc4a10db17072076541aab77 | import time
import warnings
import matplotlib.pyplot as plt
import numpy as np
import sympy as sp
from .global_qbx import global_qbx_self
from .mesh import apply_interp_mat, gauss_rule, panelize_symbolic_surface, upsample
def find_dcutoff_refine(kernel, src, tol, plot=False):
# prep step 1: find d_cutoff and d_refine
# The goal is to estimate the error due to the QBX local patch
# The local surface will have singularities at the tips where it is cut off
# These singularities will cause error in the QBX expansion. We want to make
# the local patch large enough that these singularities are irrelevant.
# To isolate the QBX patch cutoff error, we will use a very high upsampling.
# We'll also choose p to be the minimum allowed value since that will result in
# the largest cutoff error. Increasing p will reduce the cutoff error guaranteeing that
# we never need to worry about cutoff error.
density = np.ones_like(src.pts[:, 0]) # np.cos(src.pts[:,0] * src.pts[:,1])
if plot:
plt.figure(figsize=(9, 13))
params = []
d_cutoffs = [1.1, 1.3, 1.6, 2.0]
ps = np.arange(1, 55, 3)
for di, direction in enumerate([-1.0, 1.0]):
baseline = global_qbx_self(kernel, src, p=30, kappa=8, direction=direction)
baseline_v = baseline.dot(density)
# Check that the local qbx method matches the simple global qbx approach when d_cutoff is very large
d_refine_high = 8.0
with warnings.catch_warnings():
warnings.simplefilter("ignore")
local_baseline = kernel.integrate(
src.pts,
src,
d_cutoff=3.0,
tol=1e-20,
max_p=50,
d_refine=d_refine_high,
on_src_direction=direction,
)
local_baseline_v = local_baseline.dot(density)
err = np.max(np.abs(baseline_v - local_baseline_v))
print(err)
assert err < tol / 2
n_qbx_panels = []
drefine_optimal = []
p_for_full_accuracy = []
if plot:
plt.subplot(3, 2, 1 + di)
for i_d, d_cutoff in enumerate(d_cutoffs):
errs = []
for i_p, p in enumerate(ps):
# print(p, d_cutoff)
with warnings.catch_warnings():
warnings.simplefilter("ignore")
test, report = kernel.integrate(
src.pts,
src,
d_cutoff=d_cutoff,
tol=1e-15,
max_p=p,
on_src_direction=direction,
d_refine=d_refine_high,
return_report=True,
)
testv = test.dot(density)
err = np.max(np.abs(baseline_v - testv))
errs.append(err)
# print(p, err)
if err < tol:
for d_refine_decrease in np.arange(1.0, d_refine_high, 0.25):
refine_test, refine_report = kernel.integrate(
src.pts,
src,
d_cutoff=d_cutoff,
tol=1e-15,
max_p=p
+ 10, # Increase p here to have a refinement safety margin
on_src_direction=direction,
d_refine=d_refine_decrease,
return_report=True,
)
refine_testv = refine_test.dot(density)
refine_err = np.max(np.abs(baseline_v - refine_testv))
if refine_err < tol:
drefine_optimal.append(d_refine_decrease)
n_qbx_panels.append(refine_report["n_qbx_panels"])
p_for_full_accuracy.append(p)
break
if len(n_qbx_panels) <= i_d:
print(f"Failed to find parameters for {d_cutoff}")
drefine_optimal.append(1000)
n_qbx_panels.append(1e6)
p_for_full_accuracy.append(1e3)
break
if plot:
print(d_cutoff, errs)
plt.plot(ps[: i_p + 1], np.log10(errs), label=str(d_cutoff))
params.append((direction, n_qbx_panels, drefine_optimal, p_for_full_accuracy))
if plot:
plt.legend()
plt.title("interior" if direction > 0 else "exterior")
plt.xlabel(r"$p_{\textrm{max}}$")
if di == 0:
plt.ylabel(r"$\log_{10}(\textrm{error})$")
plt.yticks(-np.arange(0, 16, 3))
plt.xticks(np.arange(0, 61, 10))
plt.ylim([-15, 0])
plt.subplot(3, 2, 3 + di)
plt.plot(d_cutoffs, np.array(n_qbx_panels) / src.n_pts, "k-*")
plt.xlabel(r"$d_{\textrm{cutoff}}$")
plt.ylim([0, 8])
if di == 0:
plt.ylabel("QBX panels per point")
plt.subplot(3, 2, 5 + di)
plt.plot(d_cutoffs, np.array(drefine_optimal), "k-*")
plt.xlabel(r"$d_{\textrm{cutoff}}$")
plt.ylim([0, 6])
if di == 0:
plt.ylabel(r"$d_{\textrm{refine}}$")
if plot:
plt.tight_layout()
plt.show()
total_cost = 0
for i in [0, 1]:
direction, n_qbx_panels, drefine_optimal, p_for_full_accuracy = params[i]
appx_cost = (
np.array(p_for_full_accuracy)
* np.array(n_qbx_panels)
* np.array(drefine_optimal)
)
if plot:
print(direction, appx_cost)
total_cost += appx_cost
if plot:
plt.plot(d_cutoffs, total_cost, "k-o")
plt.show()
best_idx = np.argmin(total_cost)
d_cutoff = d_cutoffs[best_idx]
d_refine = drefine_optimal[best_idx]
return d_cutoff, d_refine
# prep step 2: find the minimum distance at which integrals are computed
# to the required tolerance
def _find_d_up_helper(kernel, nq, max_curvature, start_d, tol, kappa):
t = sp.var("t")
n_panels = 2
while True:
panel_edges = np.linspace(-1, 1, n_panels + 1)
panel_bounds = np.stack((panel_edges[:-1], panel_edges[1:]), axis=1)
circle = panelize_symbolic_surface(
t, sp.cos(sp.pi * t), sp.sin(sp.pi * t), panel_bounds, *gauss_rule(nq)
)
n_panels_new = np.max(circle.panel_length / max_curvature * circle.panel_radius)
if n_panels_new <= n_panels:
break
n_panels = np.ceil(n_panels_new).astype(int)
# print(f"\nusing {n_panels} panels with max_curvature={max_curvature}")
circle_kappa, _ = upsample(circle, kappa)
circle_upsample, interp_mat_upsample = upsample(circle_kappa, 2)
# TODO: Write more about the underlying regularity assumptions!!
# Why is it acceptable to use this test_density here? Empirically, any
# well-resolved density has approximately the same error as integrating sin(x).
# For example, integrating: 1, cos(x)^2.
# If we integrate a poorly resolved density, we do see higher errors.
#
# How poorly resolved does the density need to be in order to see higher error?
# It seems like an interpolation Linfinity error of around 1e-5 causes the d_up value to start to drift upwards.
#
# As a simple heuristic that seems to perform very well, we compute the
# error when integrating a constant and then double the required distance
# in order to account for integrands that are not quite so perfectly
# resolved.
# if assume_regularity:
# omega = 1.0
# else:
# omega = 999.0# / max_curvature
# f = lambda x: np.sin(omega * x)
# test_density = interp_mat_upsample.dot(f(circle.pts[:,0]))
# test_density_upsampled = f(circle_upsample.pts[:,0])
# print('l2 err', np.linalg.norm(test_density - test_density_upsampled) / np.linalg.norm(test_density_upsampled))
# print('linf err', np.max(np.abs(test_density - test_density_upsampled)))
# test_density = f(circle.pts[:,0])
# test_density = np.sin(999 * circle.pts[:,0])
test_density = np.ones(circle_kappa.n_pts)
d_up = 0
for direction in [-1.0, 1.0]:
d = start_d
for i in range(50):
# In actuality, we only need to test interior points because the curvature
# of the surface ensures that more source panels are near the observation
# points and, as a result, the error will be higher for any given value of d.
L = np.repeat(circle_kappa.panel_length, circle_kappa.panel_order)
dist = L * d
test_pts = (
circle_kappa.pts + direction * circle_kappa.normals * dist[:, None]
)
# Check to make sure that the closest distance to a source point is
# truly `dist`. This check might fail if the interior test_pts are
# crossing over into the other half of the circle.
min_src_dist = np.min(
np.linalg.norm((test_pts[:, None] - circle_kappa.pts[None, :]), axis=2),
axis=1,
)
if not np.allclose(min_src_dist, dist):
return False, d
upsample_mat = np.transpose(
apply_interp_mat(
kernel._direct(test_pts, circle_upsample), interp_mat_upsample
),
(0, 2, 1),
)
est_mat = np.transpose(kernel._direct(test_pts, circle_kappa), (0, 2, 1))
# err = np.max(np.abs(upsample_mat - est_mat).sum(axis=2))
err = np.max(
np.abs(upsample_mat.dot(test_density) - est_mat.dot(test_density))
)
# print(d, err)
if err < tol:
d_up = max(d, d_up)
break
d *= 1.2
return True, d_up
def find_d_up(kernel, nq, max_curvature, start_d, tol, kappa):
d = start_d
for i in range(10):
d_up = _find_d_up_helper(kernel, nq, max_curvature * (0.8) ** i, d, tol, kappa)
if d_up[0]:
return d_up[1]
d = d_up[1]
def final_check(kernel, src):
density = np.ones_like(src.pts[:, 0]) # np.cos(source.pts[:,0] * src.pts[:,1])
baseline = global_qbx_self(kernel, src, p=50, kappa=10, direction=1.0)
baseline_v = baseline.dot(density)
tols = 10.0 ** np.arange(0, -15, -1)
errs = []
runtimes = []
for tol in tols:
runs = []
for i in range(10):
start = time.time()
local_baseline, report = kernel.integrate(
src.pts,
src,
tol=tol,
on_src_direction=1.0,
return_report=True,
)
runs.append(time.time() - start)
runtimes.append(np.min(runs))
local_baseline_v = local_baseline.dot(density)
errs.append(np.max(np.abs(baseline_v - local_baseline_v)))
# print(tol, errs[-1], runtime)
# assert(np.max(np.abs(baseline_v-local_baseline_v)) < 5e-14)
plt.figure(figsize=(9, 5))
plt.subplot(1, 2, 1)
plt.plot(-np.log10(tols), np.log10(errs))
plt.subplot(1, 2, 2)
plt.plot(-np.log10(tols), runtimes)
plt.tight_layout()
plt.show()
| [((951, 980), 'numpy.ones_like', 'np.ones_like', (['src.pts[:, (0)]'], {}), '(src.pts[:, (0)])\n', (963, 980), True, 'import numpy as np\n'), ((1130, 1149), 'numpy.arange', 'np.arange', (['(1)', '(55)', '(3)'], {}), '(1, 55, 3)\n', (1139, 1149), True, 'import numpy as np\n'), ((6070, 6091), 'numpy.argmin', 'np.argmin', (['total_cost'], {}), '(total_cost)\n', (6079, 6091), True, 'import numpy as np\n'), ((6380, 6391), 'sympy.var', 'sp.var', (['"""t"""'], {}), "('t')\n", (6386, 6391), True, 'import sympy as sp\n'), ((8460, 8487), 'numpy.ones', 'np.ones', (['circle_kappa.n_pts'], {}), '(circle_kappa.n_pts)\n', (8467, 8487), True, 'import numpy as np\n'), ((10506, 10535), 'numpy.ones_like', 'np.ones_like', (['src.pts[:, (0)]'], {}), '(src.pts[:, (0)])\n', (10518, 10535), True, 'import numpy as np\n'), ((11396, 11422), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 5)'}), '(figsize=(9, 5))\n', (11406, 11422), True, 'import matplotlib.pyplot as plt\n'), ((11427, 11447), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(1)'], {}), '(1, 2, 1)\n', (11438, 11447), True, 'import matplotlib.pyplot as plt\n'), ((11498, 11518), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(1)', '(2)', '(2)'], {}), '(1, 2, 2)\n', (11509, 11518), True, 'import matplotlib.pyplot as plt\n'), ((11563, 11581), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (11579, 11581), True, 'import matplotlib.pyplot as plt\n'), ((11586, 11596), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (11594, 11596), True, 'import matplotlib.pyplot as plt\n'), ((1039, 1066), 'matplotlib.pyplot.figure', 'plt.figure', ([], {'figsize': '(9, 13)'}), '(figsize=(9, 13))\n', (1049, 1066), True, 'import matplotlib.pyplot as plt\n'), ((5574, 5592), 'matplotlib.pyplot.tight_layout', 'plt.tight_layout', ([], {}), '()\n', (5590, 5592), True, 'import matplotlib.pyplot as plt\n'), ((5601, 5611), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (5609, 5611), True, 'import matplotlib.pyplot as plt\n'), ((5996, 6034), 'matplotlib.pyplot.plot', 'plt.plot', (['d_cutoffs', 'total_cost', '"""k-o"""'], {}), "(d_cutoffs, total_cost, 'k-o')\n", (6004, 6034), True, 'import matplotlib.pyplot as plt\n'), ((6043, 6053), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (6051, 6053), True, 'import matplotlib.pyplot as plt\n'), ((6448, 6480), 'numpy.linspace', 'np.linspace', (['(-1)', '(1)', '(n_panels + 1)'], {}), '(-1, 1, n_panels + 1)\n', (6459, 6480), True, 'import numpy as np\n'), ((6504, 6557), 'numpy.stack', 'np.stack', (['(panel_edges[:-1], panel_edges[1:])'], {'axis': '(1)'}), '((panel_edges[:-1], panel_edges[1:]), axis=1)\n', (6512, 6557), True, 'import numpy as np\n'), ((6718, 6783), 'numpy.max', 'np.max', (['(circle.panel_length / max_curvature * circle.panel_radius)'], {}), '(circle.panel_length / max_curvature * circle.panel_radius)\n', (6724, 6783), True, 'import numpy as np\n'), ((10709, 10730), 'numpy.arange', 'np.arange', (['(0)', '(-15)', '(-1)'], {}), '(0, -15, -1)\n', (10718, 10730), True, 'import numpy as np\n'), ((11478, 11492), 'numpy.log10', 'np.log10', (['errs'], {}), '(errs)\n', (11486, 11492), True, 'import numpy as np\n'), ((1477, 1502), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (1500, 1502), False, 'import warnings\n'), ((1516, 1547), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (1537, 1547), False, 'import warnings\n'), ((1898, 1935), 'numpy.abs', 'np.abs', (['(baseline_v - local_baseline_v)'], {}), '(baseline_v - local_baseline_v)\n', (1904, 1935), True, 'import numpy as np\n'), ((2103, 2128), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(2)', '(1 + di)'], {}), '(3, 2, 1 + di)\n', (2114, 2128), True, 'import matplotlib.pyplot as plt\n'), ((4696, 4708), 'matplotlib.pyplot.legend', 'plt.legend', ([], {}), '()\n', (4706, 4708), True, 'import matplotlib.pyplot as plt\n'), ((4721, 4775), 'matplotlib.pyplot.title', 'plt.title', (["('interior' if direction > 0 else 'exterior')"], {}), "('interior' if direction > 0 else 'exterior')\n", (4730, 4775), True, 'import matplotlib.pyplot as plt\n'), ((4788, 4821), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$p_{\\\\textrm{max}}$"""'], {}), "('$p_{\\\\textrm{max}}$')\n", (4798, 4821), True, 'import matplotlib.pyplot as plt\n'), ((5007, 5025), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[-15, 0]'], {}), '([-15, 0])\n', (5015, 5025), True, 'import matplotlib.pyplot as plt\n'), ((5039, 5064), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(2)', '(3 + di)'], {}), '(3, 2, 3 + di)\n', (5050, 5064), True, 'import matplotlib.pyplot as plt\n'), ((5152, 5188), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$d_{\\\\textrm{cutoff}}$"""'], {}), "('$d_{\\\\textrm{cutoff}}$')\n", (5162, 5188), True, 'import matplotlib.pyplot as plt\n'), ((5201, 5217), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 8]'], {}), '([0, 8])\n', (5209, 5217), True, 'import matplotlib.pyplot as plt\n'), ((5306, 5331), 'matplotlib.pyplot.subplot', 'plt.subplot', (['(3)', '(2)', '(5 + di)'], {}), '(3, 2, 5 + di)\n', (5317, 5331), True, 'import matplotlib.pyplot as plt\n'), ((5410, 5446), 'matplotlib.pyplot.xlabel', 'plt.xlabel', (['"""$d_{\\\\textrm{cutoff}}$"""'], {}), "('$d_{\\\\textrm{cutoff}}$')\n", (5420, 5446), True, 'import matplotlib.pyplot as plt\n'), ((5459, 5475), 'matplotlib.pyplot.ylim', 'plt.ylim', (['[0, 6]'], {}), '([0, 6])\n', (5467, 5475), True, 'import matplotlib.pyplot as plt\n'), ((5850, 5875), 'numpy.array', 'np.array', (['drefine_optimal'], {}), '(drefine_optimal)\n', (5858, 5875), True, 'import numpy as np\n'), ((6617, 6634), 'sympy.cos', 'sp.cos', (['(sp.pi * t)'], {}), '(sp.pi * t)\n', (6623, 6634), True, 'import sympy as sp\n'), ((6636, 6653), 'sympy.sin', 'sp.sin', (['(sp.pi * t)'], {}), '(sp.pi * t)\n', (6642, 6653), True, 'import sympy as sp\n'), ((8863, 8925), 'numpy.repeat', 'np.repeat', (['circle_kappa.panel_length', 'circle_kappa.panel_order'], {}), '(circle_kappa.panel_length, circle_kappa.panel_order)\n', (8872, 8925), True, 'import numpy as np\n'), ((10850, 10861), 'time.time', 'time.time', ([], {}), '()\n', (10859, 10861), False, 'import time\n'), ((11145, 11157), 'numpy.min', 'np.min', (['runs'], {}), '(runs)\n', (11151, 11157), True, 'import numpy as np\n'), ((11462, 11476), 'numpy.log10', 'np.log10', (['tols'], {}), '(tols)\n', (11470, 11476), True, 'import numpy as np\n'), ((11533, 11547), 'numpy.log10', 'np.log10', (['tols'], {}), '(tols)\n', (11541, 11547), True, 'import numpy as np\n'), ((4862, 4905), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$\\\\log_{10}(\\\\textrm{error})$"""'], {}), "('$\\\\log_{10}(\\\\textrm{error})$')\n", (4872, 4905), True, 'import matplotlib.pyplot as plt\n'), ((4973, 4993), 'numpy.arange', 'np.arange', (['(0)', '(61)', '(10)'], {}), '(0, 61, 10)\n', (4982, 4993), True, 'import numpy as np\n'), ((5258, 5292), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""QBX panels per point"""'], {}), "('QBX panels per point')\n", (5268, 5292), True, 'import matplotlib.pyplot as plt\n'), ((5364, 5389), 'numpy.array', 'np.array', (['drefine_optimal'], {}), '(drefine_optimal)\n', (5372, 5389), True, 'import numpy as np\n'), ((5516, 5552), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""$d_{\\\\textrm{refine}}$"""'], {}), "('$d_{\\\\textrm{refine}}$')\n", (5526, 5552), True, 'import matplotlib.pyplot as plt\n'), ((5769, 5798), 'numpy.array', 'np.array', (['p_for_full_accuracy'], {}), '(p_for_full_accuracy)\n', (5777, 5798), True, 'import numpy as np\n'), ((5813, 5835), 'numpy.array', 'np.array', (['n_qbx_panels'], {}), '(n_qbx_panels)\n', (5821, 5835), True, 'import numpy as np\n'), ((6858, 6879), 'numpy.ceil', 'np.ceil', (['n_panels_new'], {}), '(n_panels_new)\n', (6865, 6879), True, 'import numpy as np\n'), ((9349, 9422), 'numpy.linalg.norm', 'np.linalg.norm', (['(test_pts[:, (None)] - circle_kappa.pts[(None), :])'], {'axis': '(2)'}), '(test_pts[:, (None)] - circle_kappa.pts[(None), :], axis=2)\n', (9363, 9422), True, 'import numpy as np\n'), ((9479, 9510), 'numpy.allclose', 'np.allclose', (['min_src_dist', 'dist'], {}), '(min_src_dist, dist)\n', (9490, 9510), True, 'import numpy as np\n'), ((11241, 11278), 'numpy.abs', 'np.abs', (['(baseline_v - local_baseline_v)'], {}), '(baseline_v - local_baseline_v)\n', (11247, 11278), True, 'import numpy as np\n'), ((2301, 2326), 'warnings.catch_warnings', 'warnings.catch_warnings', ([], {}), '()\n', (2324, 2326), False, 'import warnings\n'), ((2348, 2379), 'warnings.simplefilter', 'warnings.simplefilter', (['"""ignore"""'], {}), "('ignore')\n", (2369, 2379), False, 'import warnings\n'), ((4541, 4555), 'numpy.log10', 'np.log10', (['errs'], {}), '(errs)\n', (4549, 4555), True, 'import numpy as np\n'), ((4929, 4948), 'numpy.arange', 'np.arange', (['(0)', '(16)', '(3)'], {}), '(0, 16, 3)\n', (4938, 4948), True, 'import numpy as np\n'), ((5097, 5119), 'numpy.array', 'np.array', (['n_qbx_panels'], {}), '(n_qbx_panels)\n', (5105, 5119), True, 'import numpy as np\n'), ((11100, 11111), 'time.time', 'time.time', ([], {}), '()\n', (11109, 11111), False, 'import time\n'), ((2851, 2877), 'numpy.abs', 'np.abs', (['(baseline_v - testv)'], {}), '(baseline_v - testv)\n', (2857, 2877), True, 'import numpy as np\n'), ((3035, 3070), 'numpy.arange', 'np.arange', (['(1.0)', 'd_refine_high', '(0.25)'], {}), '(1.0, d_refine_high, 0.25)\n', (3044, 3070), True, 'import numpy as np\n'), ((3769, 3802), 'numpy.abs', 'np.abs', (['(baseline_v - refine_testv)'], {}), '(baseline_v - refine_testv)\n', (3775, 3802), True, 'import numpy as np\n')] |
hustbeta/python-examples | celery-getting-started/celeryconfig.py | 9052a080cb27b1c8c2bc36222ece409e236ba076 | # -*- coding: utf-8 -*-
BROKER_URL = 'amqp://guest@localhost//'
CELERY_ACCEPT_CONTENT = ['json'],
CELERY_RESULT_BACKEND = 'amqp://guest@localhost//'
CELERY_RESULT_SERIALIZER = 'json'
CELERY_TASK_SERIALIZER = 'json'
CELERY_TIMEZONE = 'Asia/Shanghai'
CELERY_ENABLE_UTC = False
| [] |
msgi/nlp-tour | smartnlp/utils/basic_log.py | ffed8c32da69c2427c92a7043f47bfc91e7feb64 | import logging as log
class Log:
def __init__(self, level):
self.level = level
log.basicConfig(format='%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s',
level=level)
self.log = log
def info(self, msg):
self.log.info(msg)
def debug(self, msg):
self.log.debug(msg)
def warn(self, msg):
self.log.warn(msg)
def error(self, msg):
self.log.error(msg)
| [((101, 221), 'logging.basicConfig', 'log.basicConfig', ([], {'format': '"""%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s"""', 'level': 'level'}), "(format=\n '%(asctime)s - %(pathname)s[line:%(lineno)d] - %(levelname)s: %(message)s',\n level=level)\n", (116, 221), True, 'import logging as log\n')] |
dluvizon/3d-pose-consensus | people/losses-bkp.py | 7a829d5713d2c45c6b265c9886add0b69e0050a8 |
def structural_loss_dst68j3d(p_pred, v_pred):
v_pred = K.stop_gradient(v_pred)
def getlength(v):
return K.sqrt(K.sum(K.square(v), axis=-1))
"""Arms segments"""
joints_arms = p_pred[:, :, 16:37+1, :]
conf_arms = v_pred[:, :, 16:37+1]
diff_arms_r = joints_arms[:, :, 2:-1:2, :] - joints_arms[:, :, 0:-3:2, :]
diff_arms_l = joints_arms[:, :, 3::2, :] - joints_arms[:, :, 1:-2:2, :]
c2_arms_r = conf_arms[:, :, 2:-1:2] * conf_arms[:, :, 0:-3:2]
c2_arms_l = conf_arms[:, :, 3::2] * conf_arms[:, :, 1:-2:2]
"""Legs segments"""
joints_legs = p_pred[:, :, 48:67+1, :]
conf_legs = v_pred[:, :, 48:67+1]
diff_legs_r = joints_legs[:, :, 2:-1:2, :] - joints_legs[:, :, 0:-3:2, :]
diff_legs_l = joints_legs[:, :, 3::2, :] - joints_legs[:, :, 1:-2:2, :]
c2_legs_r = conf_legs[:, :, 2:-1:2] * conf_legs[:, :, 0:-3:2]
c2_legs_l = conf_legs[:, :, 3::2] * conf_legs[:, :, 1:-2:2]
"""Limbs segments"""
segs_limbs_r = getlength(K.concatenate([diff_arms_r, diff_legs_r], axis=-2))
segs_limbs_l = getlength(K.concatenate([diff_arms_l, diff_legs_l], axis=-2))
c2_limbs_r = K.concatenate([c2_arms_r, c2_legs_r], axis=-1)
c2_limbs_l = K.concatenate([c2_arms_l, c2_legs_l], axis=-1)
len_upperarm_r = K.sum(segs_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
len_upperarm_l = K.sum(segs_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
len_forearm_r = K.sum(segs_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
len_forearm_l = K.sum(segs_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
len_hand_r = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_hand_l = K.sum(segs_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_upperarm_r = K.sum(c2_limbs_r[:, :, 2:5], axis=-1, keepdims=True)
c2_upperarm_l = K.sum(c2_limbs_l[:, :, 2:5], axis=-1, keepdims=True)
c2_forearm_r = K.sum(c2_limbs_r[:, :, 5:8], axis=-1, keepdims=True)
c2_forearm_l = K.sum(c2_limbs_l[:, :, 5:8], axis=-1, keepdims=True)
c2_hand_r = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
c2_hand_l = K.sum(c2_limbs_r[:, :, 8:10], axis=-1, keepdims=True)
len_femur_r = K.sum(K.concatenate([
segs_limbs_r[:, :, 10:11],
segs_limbs_r[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
len_femur_l = K.sum(K.concatenate([
segs_limbs_l[:, :, 10:11],
segs_limbs_l[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
c2_femur_r = K.sum(K.concatenate([
c2_limbs_r[:, :, 10:11],
c2_limbs_r[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
c2_femur_l = K.sum(K.concatenate([
c2_limbs_l[:, :, 10:11],
c2_limbs_l[:, :, 12:14],
], axis=-1), axis=-1, keepdims=True)
len_shin_r = K.sum(segs_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
len_shin_l = K.sum(segs_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
len_feet_r = K.sum(segs_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
len_feet_l = K.sum(segs_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
c2_shin_r = K.sum(c2_limbs_r[:, :, 14:17], axis=-1, keepdims=True)
c2_shin_l = K.sum(c2_limbs_l[:, :, 14:17], axis=-1, keepdims=True)
c2_feet_r = K.sum(c2_limbs_r[:, :, 17:19], axis=-1, keepdims=True)
c2_feet_l = K.sum(c2_limbs_l[:, :, 17:19], axis=-1, keepdims=True)
joints_head = K.concatenate([
p_pred[:, :, 11:11+1, :], p_pred[:, :, 11:11+1, :],
p_pred[:, :, 12:15+1, :],
p_pred[:, :, 8:8+1, :], p_pred[:, :, 8:8+1, :],
p_pred[:, :, 14:15+1, :],
], axis=-2)
conf_head = K.concatenate([
v_pred[:, :, 11:11+1], v_pred[:, :, 11:11+1],
v_pred[:, :, 12:15+1],
v_pred[:, :, 8:8+1], v_pred[:, :, 8:8+1],
v_pred[:, :, 14:15+1],
], axis=-1)
diff_head_r = joints_head[:, :, 2:-1:2, :] - joints_head[:, :, 0:-3:2, :]
diff_head_l = joints_head[:, :, 3::2, :] - joints_head[:, :, 1:-2:2, :]
c2_head_r = conf_head[:, :, 2:-1:2] * conf_head[:, :, 0:-3:2]
c2_head_l = conf_head[:, :, 3::2] * conf_head[:, :, 1:-2:2]
diff_cross_r = K.concatenate([
p_pred[:, :, 3:3+1, :] - p_pred[:, :, 20:20+1, :],
p_pred[:, :, 49:49+1, :] - p_pred[:, :, 3:3+1, :],
], axis=-2)
diff_cross_l = K.concatenate([
p_pred[:, :, 3:3+1, :] - p_pred[:, :, 21:21+1, :],
p_pred[:, :, 48:48+1, :] - p_pred[:, :, 3:3+1, :],
], axis=-2)
diff_spine = K.concatenate([
p_pred[:, :, 0:0+1, :] - p_pred[:, :, 7:7+1, :], # euclidean
p_pred[:, :, 1:7+1, :] - p_pred[:, :, 0:6+1, :], # geodesic
], axis=-2)
segs_spine = getlength(diff_spine)
spine_euclidian = K.stop_gradient(segs_spine[:, :, :1])
len_spine = K.sum(segs_spine[:, :, 1:], axis=-1, keepdims=True)
segs_midhead = getlength(p_pred[:, :, 9:11+1, :] - p_pred[:, :, 8:10+1, :])
len_midhead = K.sum(segs_midhead, axis=-1, keepdims=True)
segs_ears = getlength(K.concatenate([
p_pred[:, :, 12:12+1, :] - p_pred[:, :, 14:14+1, :],
p_pred[:, :, 9:9+1, :] - p_pred[:, :, 12:12+1, :],
p_pred[:, :, 13:13+1, :] - p_pred[:, :, 9:9+1, :],
p_pred[:, :, 15:15+1, :] - p_pred[:, :, 13:13+1, :]
], axis=-2))
len_ears = K.sum(segs_ears, axis=-1, keepdims=True)
len_cross_r = K.sum(getlength(diff_cross_r), axis=-1, keepdims=True)
len_cross_l = K.sum(getlength(diff_cross_l), axis=-1, keepdims=True)
ref_length = K.stop_gradient(
K.clip((len_cross_r + len_cross_l) / 2., 0.1, 1.))
"""Reference lengths based on ground truth poses from Human3.6M:
Spine wrt. ref: 0.715 (0.032 std.)
Spine wrt. euclidean: 1.430 (maximum) (0.046 std.)
MidHead wrt. ref: 0.266 (0.019 std.)
Shoulder wrt. ref: 0.150 (?? std.)
Upper arms wrt. ref: 0.364 (0.019 std.)
Fore arms wrt. ref: 0.326 (0.025 std.)
Hands wrt. ref: 0.155 (0.014 std.)
Femur wrt. ref: 0.721 (0.040 std.)
Shin wrt. ref: 0.549 (0.063 std.)
Feet wrt. ref: 0.294 (0.060 std.)
"""
rules_loss = K.concatenate([
c2_limbs_r * c2_limbs_l * (segs_limbs_r - segs_limbs_l),
len_spine - 0.715 * ref_length,
len_midhead - 0.266 * ref_length,
c2_upperarm_r * (len_upperarm_r - 0.364 * ref_length),
c2_upperarm_l * (len_upperarm_l - 0.364 * ref_length),
c2_forearm_r * (len_forearm_r - 0.326 * ref_length),
c2_forearm_l * (len_forearm_l - 0.326 * ref_length),
c2_hand_r * (len_hand_r - 0.155 * ref_length),
c2_hand_l * (len_hand_l - 0.155 * ref_length),
c2_femur_r * (len_femur_r - 0.721 * ref_length),
c2_femur_l * (len_femur_l - 0.721 * ref_length),
c2_shin_r * (len_shin_r - 0.549 * ref_length),
c2_shin_l * (len_shin_l - 0.549 * ref_length),
c2_feet_r * (len_feet_r - 0.294 * ref_length),
c2_feet_l * (len_feet_l - 0.294 * ref_length),
len_ears - 0.213 * ref_length,
], axis=-1)
rules = K.sum(K.square(rules_loss), axis=-1)
spine_bent = K.squeeze(K.maximum(0., len_spine - 1.430 * spine_euclidian),
axis=-1)
return K.mean(spine_bent + rules, axis=-1)
| [] |
Muzammil-khan/Aspose.Email-Python-Dotnet | Examples/IMAP/FilteringMessagesFromIMAPMailbox.py | 04ca3a6f440339f3ddf316218f92d15d66f24e7e | import aspose.email
from aspose.email.clients.imap import ImapClient
from aspose.email.clients import SecurityOptions
from aspose.email.clients.imap import ImapQueryBuilder
import datetime as dt
def run():
dataDir = ""
#ExStart: FetchEmailMessageFromServer
client = ImapClient("imap.gmail.com", 993, "username", "password")
client.select_folder("Inbox")
builder = ImapQueryBuilder()
builder.subject.contains("Newsletter")
builder.internal_date.on(dt.datetime.now())
query = builder.get_query()
msgsColl = client.list_messages(query)
print("Total Messages fulfilling search criterion: " + str(len(msgsColl)))
#ExEnd: FetchEmailMessageFromServer
if __name__ == '__main__':
run()
| [((281, 338), 'aspose.email.clients.imap.ImapClient', 'ImapClient', (['"""imap.gmail.com"""', '(993)', '"""username"""', '"""password"""'], {}), "('imap.gmail.com', 993, 'username', 'password')\n", (291, 338), False, 'from aspose.email.clients.imap import ImapClient\n'), ((387, 405), 'aspose.email.clients.imap.ImapQueryBuilder', 'ImapQueryBuilder', ([], {}), '()\n', (403, 405), False, 'from aspose.email.clients.imap import ImapQueryBuilder\n'), ((478, 495), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (493, 495), True, 'import datetime as dt\n')] |
010001111/Vx-Suites | Python.FancyBear/settings.py | 6b4b90a60512cce48aa7b87aec5e5ac1c4bb9a79 | # Server UID
SERVER_UID = 45158729
# Setup Logging system #########################################
#
import os
from FileConsoleLogger import FileConsoleLogger
ServerLogger = FileConsoleLogger( os.path.join(os.path.dirname(os.path.abspath(__file__)), "_w3server.log") )
W3Logger = FileConsoleLogger( os.path.join(os.path.dirname(os.path.abspath(__file__)), "_w3.log") )
#
# Setup Level 2 Protocol - P2Scheme #########################################
#
from P2Scheme import P2Scheme
P2_URL_TOKEN = '760e25f9eb3124'.decode('hex')
P2_SUBJECT_TOKEN = '\x55\xaa\x63\x68\x69\x6e\x61'
P2_DATA_TOKEN = '\x55\xaa\x63\x68\x69\x6e\x61'
# P2_DATA_TOKEN = 'd85a8c54fbe5e6'.decode('hex')
MARK = 'itwm='
B64_JUNK_LEN = 9
BIN_JUNK_LEN = 4
P2_Scheme = P2Scheme(_url_token=P2_URL_TOKEN, _data_token=P2_DATA_TOKEN, _mark=MARK, _subj_token=P2_SUBJECT_TOKEN,\
_b64junk_len=B64_JUNK_LEN, _binary_junk_len=BIN_JUNK_LEN)
#
# Setup Level 3 Protocol - P3Scheme #########################################
#
from P3Scheme import P3Scheme
#
P3_PRIVATE_TOKEN = 'a20e25f9aa3fe4'.decode('hex')
P3_SERVICE_TOKEN = '015a1354acf1b1'.decode('hex')
#
P3_Scheme = P3Scheme(private_token=P3_PRIVATE_TOKEN, service_token=P3_SERVICE_TOKEN)
#
# Setup HTTP checker
#
#from HTTPHeadersChecker import HTTPHeadersChecker
#
#HTTPChecker = HTTPHeadersChecker()
# Setup LocalStorage
#
from FSLocalStorage import FSLocalStorage
LocalStorage = FSLocalStorage()
############################################################
# Initialize Server instance #
#
#from W3Server import W3Server
#MAIN_HANDLER = W3Server(p2_scheme=P2_Scheme, p3_scheme=P3_Scheme, http_checker=HTTPChecker, local_storage=LocalStorage, logger=ServerLogger)
############################################################
# Mail Parameters
POP3_MAIL_IP = 'pop.gmail.com'
POP3_PORT = 995
POP3_ADDR = '[email protected]'
POP3_PASS = '30Jass11'
SMTP_MAIL_IP = 'smtp.gmail.com'
SMTP_PORT = 587
SMTP_TO_ADDR = '[email protected]'
SMTP_FROM_ADDR = '[email protected]'
SMTP_PASS = '75Gina75'
# C&C Parametrs
#
XAS_IP = '104.152.187.66'
XAS_GATE = '/updates/'
############################################################
# Setup P3 communication
# wsgi2
#
LS_TIMEOUT = 1 # big loop timeout
FILES_PER_ITER = 5 # count of requests per iter
############################################################
| [((741, 909), 'P2Scheme.P2Scheme', 'P2Scheme', ([], {'_url_token': 'P2_URL_TOKEN', '_data_token': 'P2_DATA_TOKEN', '_mark': 'MARK', '_subj_token': 'P2_SUBJECT_TOKEN', '_b64junk_len': 'B64_JUNK_LEN', '_binary_junk_len': 'BIN_JUNK_LEN'}), '(_url_token=P2_URL_TOKEN, _data_token=P2_DATA_TOKEN, _mark=MARK,\n _subj_token=P2_SUBJECT_TOKEN, _b64junk_len=B64_JUNK_LEN,\n _binary_junk_len=BIN_JUNK_LEN)\n', (749, 909), False, 'from P2Scheme import P2Scheme\n'), ((1153, 1225), 'P3Scheme.P3Scheme', 'P3Scheme', ([], {'private_token': 'P3_PRIVATE_TOKEN', 'service_token': 'P3_SERVICE_TOKEN'}), '(private_token=P3_PRIVATE_TOKEN, service_token=P3_SERVICE_TOKEN)\n', (1161, 1225), False, 'from P3Scheme import P3Scheme\n'), ((1422, 1438), 'FSLocalStorage.FSLocalStorage', 'FSLocalStorage', ([], {}), '()\n', (1436, 1438), False, 'from FSLocalStorage import FSLocalStorage\n'), ((225, 250), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (240, 250), False, 'import os\n'), ((331, 356), 'os.path.abspath', 'os.path.abspath', (['__file__'], {}), '(__file__)\n', (346, 356), False, 'import os\n')] |
HoundThe/retdec-regression-tests | tools/fileinfo/features/certificates-info/test.py | 760639deb1ee52e88a14523b4a908d3e69d6fcd3 | from regression_tests import *
class Test1(Test):
settings = TestSettings(
tool='fileinfo',
input='8b280f2b7788520de214fa8d6ea32a30ebb2a51038381448939530fd0f7dfc16',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert len(first_sig['allCertificates']) == 5
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == 'F6B86E97AEB3E567F58901F799E18FC6F89CC92E'
assert first_sig['signedDigest'] == 'F6B86E97AEB3E567F58901F799E18FC6F89CC92E'
assert first_sig['programName'] == "Broadband Download, Thunder in a Flash!"
assert first_sig['allCertificates'][0]['subject'] == "CN=Symantec Time Stamping Services CA - G2,O=Symantec Corporation,C=US"
assert first_sig['allCertificates'][0]['issuer'] == "CN=Thawte Timestamping CA,OU=Thawte Certification,O=Thawte,L=Durbanville,ST=Western Cape,C=ZA"
assert first_sig['allCertificates'][0]['subjectOneline'] == "/C=US/O=Symantec Corporation/CN=Symantec Time Stamping Services CA - G2"
assert first_sig['allCertificates'][0]['issuerOneline'] == "/C=ZA/ST=Western Cape/L=Durbanville/O=Thawte/OU=Thawte Certification/CN=Thawte Timestamping CA"
assert first_sig['allCertificates'][0]['serialNumber'] == "7e:93:eb:fb:7c:c6:4e:59:ea:4b:9a:77:d4:06:fc:3b"
assert first_sig['allCertificates'][0]['publicKeyAlgorithm'] == "rsaEncryption"
assert first_sig['allCertificates'][0]['signatureAlgorithm'] == "sha1WithRSAEncryption"
assert first_sig['allCertificates'][0]['validSince'] == "Dec 21 00:00:00 2012 GMT"
assert first_sig['allCertificates'][0]['validUntil'] == "Dec 30 23:59:59 2020 GMT"
assert first_sig['allCertificates'][0]['sha1'] == "6C07453FFDDA08B83707C09B82FB3D15F35336B1"
assert first_sig['allCertificates'][0]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
assert first_sig['allCertificates'][0]['publicKey'] == (
'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsayzSVRLl'
'xwSCtgleZEiVypv3LgmxENza8K/LlBa+xTCdo5DASVDtKHiRfTot3vDdMwi17SUAAL3Te2/tLdEJGvNX0U70UTOQxJzF4KLabQry5kerHIbJk'
'1xH7Ex3ftRYQJTpqr1SSwFeEWlL4nO55nn/oziVz89xpLcSvh7M+R5CvvwdYhBnP/FA1GZqtdsn5Nph2Upg4XCYBTEyMk7FNrAgfAfDXTekiK'
'ryvf7dHwn5vdKG3+nw54trorqpuaqJxZ9YfeYcRG84lChS+Vd+uUOpyyfqmUg09iW6Mh8pU5IRP8Z4kQHkgvXaISAXWp4ZEXNYEZ+VMETfMV58cnBcQIDAQAB')
attributes = first_sig['allCertificates'][0]['attributes']
assert attributes['subject']['country'] == "US"
assert attributes['subject']['organization'] == "Symantec Corporation"
assert attributes['subject']['commonName'] == "Symantec Time Stamping Services CA - G2"
assert attributes['issuer']['country'] == "ZA"
assert attributes['issuer']['organization'] == "Thawte"
assert attributes['issuer']['organizationalUnit'] == "Thawte Certification"
assert attributes['issuer']['state'] == "Western Cape"
assert attributes['issuer']['commonName'] == "Thawte Timestamping CA"
assert attributes['issuer']['locality'] == "Durbanville"
assert first_sig['allCertificates'][1]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig['allCertificates'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
assert first_sig['allCertificates'][3]['sha256'] == "8FB47562286677514075BC38D1CFD2B73481D93CB3F9C23F9AC3E6414EF34A6F"
assert first_sig['allCertificates'][4]['sha256'] == "582DC1D97A790EF04FE2567B1EC88C26B03BF6E99937CAE6A0B50397AD20BBF8"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "96D052BD1B13E983FC6FE41911F6B49CEB5961B9"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
assert first_sig_signer['chain'][0]['sha256'] == "8FB47562286677514075BC38D1CFD2B73481D93CB3F9C23F9AC3E6414EF34A6F"
assert first_sig_signer['chain'][1]['sha256'] == "582DC1D97A790EF04FE2567B1EC88C26B03BF6E99937CAE6A0B50397AD20BBF8"
assert first_sig_signer['chain'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Jun 25 14:19:05 2016 GMT"
assert first_sig_countersig['digest'] == '8F22E222461E03492E8D67948463100465B1B9D0'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
assert first_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '9FC3902927BFEDA2A3F61D650B0D2CBEC6D84597989EA6244D4EF954C67CA0B3'
assert second_sig['signedDigest'] == '9FC3902927BFEDA2A3F61D650B0D2CBEC6D84597989EA6244D4EF954C67CA0B3'
assert second_sig['programName'] == "Broadband Download, Thunder in a Flash!"
assert len(second_sig['allCertificates']) == 6
assert second_sig['allCertificates'][0]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
assert second_sig['allCertificates'][1]['sha256'] == "8FB47562286677514075BC38D1CFD2B73481D93CB3F9C23F9AC3E6414EF34A6F"
assert second_sig['allCertificates'][2]['sha256'] == "582DC1D97A790EF04FE2567B1EC88C26B03BF6E99937CAE6A0B50397AD20BBF8"
assert second_sig['allCertificates'][3]['sha256'] == "43CE166BC567F9887D650A2E624473BE7A43A6F378ABE03CB32FA63F7ABB1E45"
assert second_sig['allCertificates'][4]['sha256'] == "6B6C1E01F590F5AFC5FCF85CD0B9396884048659FC2C6D1170D68B045216C3FD"
assert second_sig['allCertificates'][5]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "E421C1A7625B9CD410B64A0EBEA7D991EA1DBAC65A3404A227235E1C0AB781F1"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 3
assert second_sig_signer['chain'][0]['sha256'] == "8FB47562286677514075BC38D1CFD2B73481D93CB3F9C23F9AC3E6414EF34A6F"
assert second_sig_signer['chain'][1]['sha256'] == "582DC1D97A790EF04FE2567B1EC88C26B03BF6E99937CAE6A0B50397AD20BBF8"
assert second_sig_signer['chain'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 0
assert second_sig_countersig['signTime'] == "Jun 25 14:19:29 2016 GMT"
assert second_sig_countersig['digest'] == 'B36785DD22C1E070DB8A198A16C81BD93FB87F4D5B6301ACB2656C23E4EF80F5'
assert second_sig_countersig['digestAlgorithm'] == 'sha256'
assert len(second_sig_countersig['chain']) == 3
assert second_sig_countersig['chain'][0]['sha256'] == "43CE166BC567F9887D650A2E624473BE7A43A6F378ABE03CB32FA63F7ABB1E45"
assert second_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
assert second_sig_countersig['chain'][2]['sha256'] == "6B6C1E01F590F5AFC5FCF85CD0B9396884048659FC2C6D1170D68B045216C3FD"
class Test2(Test):
settings = TestSettings(
tool='fileinfo',
input='avgcfgex.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert len(first_sig['allCertificates']) == 5
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '3E7B33AB316770BD369BFADF5FB5354730C89991'
assert first_sig['signedDigest'] == '3E7B33AB316770BD369BFADF5FB5354730C89991'
assert first_sig['allCertificates'][0]['subject'] == "CN=Symantec Time Stamping Services CA - G2,O=Symantec Corporation,C=US"
assert first_sig['allCertificates'][0]['issuer'] == "CN=Thawte Timestamping CA,OU=Thawte Certification,O=Thawte,L=Durbanville,ST=Western Cape,C=ZA"
assert first_sig['allCertificates'][0]['serialNumber'] == "7e:93:eb:fb:7c:c6:4e:59:ea:4b:9a:77:d4:06:fc:3b"
assert first_sig['allCertificates'][0]['publicKeyAlgorithm'] == "rsaEncryption"
assert first_sig['allCertificates'][0]['signatureAlgorithm'] == "sha1WithRSAEncryption"
assert first_sig['allCertificates'][0]['validSince'] == "Dec 21 00:00:00 2012 GMT"
assert first_sig['allCertificates'][0]['validUntil'] == "Dec 30 23:59:59 2020 GMT"
assert first_sig['allCertificates'][0]['sha1'] == "6C07453FFDDA08B83707C09B82FB3D15F35336B1"
assert first_sig['allCertificates'][0]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
assert first_sig['allCertificates'][0]['publicKey'] == (
'MIIBIjANBgkqhkiG9w0BAQEFAAOCAQ8AMIIBCgKCAQEAsayzSVRLl'
'xwSCtgleZEiVypv3LgmxENza8K/LlBa+xTCdo5DASVDtKHiRfTot3vDdMwi17SUAAL3Te2/tLdEJGvNX0U70UTOQxJzF4KLabQry5kerHIbJk'
'1xH7Ex3ftRYQJTpqr1SSwFeEWlL4nO55nn/oziVz89xpLcSvh7M+R5CvvwdYhBnP/FA1GZqtdsn5Nph2Upg4XCYBTEyMk7FNrAgfAfDXTekiK'
'ryvf7dHwn5vdKG3+nw54trorqpuaqJxZ9YfeYcRG84lChS+Vd+uUOpyyfqmUg09iW6Mh8pU5IRP8Z4kQHkgvXaISAXWp4ZEXNYEZ+VMETfMV58cnBcQIDAQAB')
attributes = first_sig['allCertificates'][0]['attributes']
assert attributes['subject']['country'] == "US"
assert attributes['subject']['organization'] == "Symantec Corporation"
assert attributes['subject']['commonName'] == "Symantec Time Stamping Services CA - G2"
assert attributes['issuer']['country'] == "ZA"
assert attributes['issuer']['organization'] == "Thawte"
assert attributes['issuer']['organizationalUnit'] == "Thawte Certification"
assert attributes['issuer']['state'] == "Western Cape"
assert attributes['issuer']['commonName'] == "Thawte Timestamping CA"
assert attributes['issuer']['locality'] == "Durbanville"
assert first_sig['allCertificates'][1]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig['allCertificates'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
assert first_sig['allCertificates'][3]['sha256'] == "3B0ABE047D7E84F3BBD12B5E399BED55E4D7E9FCC3F629B8953A8C060EF6D746"
assert first_sig['allCertificates'][4]['sha256'] == "0CFC19DB681B014BFE3F23CB3A78B67208B4E3D8D7B6A7B1807F7CD6ECB2A54E"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "229A2D7B4C8F2E8EC5B6943D0F0E53B9F59E33B5"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
assert first_sig_signer['chain'][0]['sha256'] == "3B0ABE047D7E84F3BBD12B5E399BED55E4D7E9FCC3F629B8953A8C060EF6D746"
assert first_sig_signer['chain'][1]['sha256'] == "0CFC19DB681B014BFE3F23CB3A78B67208B4E3D8D7B6A7B1807F7CD6ECB2A54E"
assert first_sig_signer['chain'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Feb 1 14:02:52 2016 GMT"
assert first_sig_countersig['digest'] == '0DAAC35A77C75EAEA723AE13E61C927F676080A2'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
assert first_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '6BE0FA5AB9336DDCC6ACE35ED2BC9744860E80088F35E5D77AF254F246228CDE'
assert second_sig['signedDigest'] == '6BE0FA5AB9336DDCC6ACE35ED2BC9744860E80088F35E5D77AF254F246228CDE'
assert len(second_sig['allCertificates']) == 6
assert second_sig['allCertificates'][0]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
assert second_sig['allCertificates'][1]['sha256'] == "3A0682AB7FB478BA82FD11CE4DB9B0ADEA55DA05558A0CF737453D51572163D0"
assert second_sig['allCertificates'][2]['sha256'] == "0CFC19DB681B014BFE3F23CB3A78B67208B4E3D8D7B6A7B1807F7CD6ECB2A54E"
assert second_sig['allCertificates'][3]['sha256'] == "43CE166BC567F9887D650A2E624473BE7A43A6F378ABE03CB32FA63F7ABB1E45"
assert second_sig['allCertificates'][4]['sha256'] == "6B6C1E01F590F5AFC5FCF85CD0B9396884048659FC2C6D1170D68B045216C3FD"
assert second_sig['allCertificates'][5]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "0183B70327A59E8006B666E908D798CCD309BC4C2FFFD10E551E040B9B1DC449"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 3
assert second_sig_signer['chain'][0]['sha256'] == "3A0682AB7FB478BA82FD11CE4DB9B0ADEA55DA05558A0CF737453D51572163D0"
assert second_sig_signer['chain'][1]['sha256'] == "0CFC19DB681B014BFE3F23CB3A78B67208B4E3D8D7B6A7B1807F7CD6ECB2A54E"
assert second_sig_signer['chain'][2]['sha256'] == "8420DFBE376F414BF4C0A81E6936D24CCC03F304835B86C7A39142FCA723A689"
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 0
assert second_sig_countersig['signTime'] == "Feb 1 14:02:54 2016 GMT"
assert second_sig_countersig['digest'] == '1C5206936E053F3D79A046D0E359FB32926AA9D8C269812A80A188AE04DC3E34'
assert second_sig_countersig['digestAlgorithm'] == 'sha256'
assert len(second_sig_countersig['chain']) == 3
assert second_sig_countersig['chain'][0]['sha256'] == "43CE166BC567F9887D650A2E624473BE7A43A6F378ABE03CB32FA63F7ABB1E45"
assert second_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
assert second_sig_countersig['chain'][2]['sha256'] == "6B6C1E01F590F5AFC5FCF85CD0B9396884048659FC2C6D1170D68B045216C3FD"
class Test3(Test):
settings = TestSettings(
tool='fileinfo',
input='c339b87d932b3f86c298b1745db1a28b1214fb7635ba3805851ef8699290f9b8',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '0C13D3C2B3C6F48FA3485B36E08AC822C579C1E0'
assert first_sig['signedDigest'] == '0C13D3C2B3C6F48FA3485B36E08AC822C579C1E0'
# 2 certificates are there indeed stored twice, confirmed with LIEF
assert len(first_sig['allCertificates']) == 7
assert first_sig['allCertificates'][0]['sha256'] == "FCB433D6D1AFBEC9E8F5447C2C0FA4AE7553986D5C2703BE82524BE608F35F61"
assert first_sig['allCertificates'][1]['sha256'] == "53793CFC1B2B5096CC4EDBEC527ABC5CBC20470C788162D9E54C370D51625F4A"
assert first_sig['allCertificates'][2]['sha256'] == "C766A9BEF2D4071C863A31AA4920E813B2D198608CB7B7CFE21143B836DF09EA"
assert first_sig['allCertificates'][3]['sha256'] == "53793CFC1B2B5096CC4EDBEC527ABC5CBC20470C788162D9E54C370D51625F4A"
assert first_sig['allCertificates'][4]['sha256'] == "C766A9BEF2D4071C863A31AA4920E813B2D198608CB7B7CFE21143B836DF09EA"
assert first_sig['allCertificates'][5]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig['allCertificates'][6]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "AC1E29C0611678FA7E5B98A11106A1F9D69B224F"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
assert first_sig_signer['chain'][0]['sha256'] == "FCB433D6D1AFBEC9E8F5447C2C0FA4AE7553986D5C2703BE82524BE608F35F61"
assert first_sig_signer['chain'][1]['sha256'] == "53793CFC1B2B5096CC4EDBEC527ABC5CBC20470C788162D9E54C370D51625F4A"
assert first_sig_signer['chain'][2]['sha256'] == "C766A9BEF2D4071C863A31AA4920E813B2D198608CB7B7CFE21143B836DF09EA"
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Feb 1 15:47:14 2016 GMT"
assert first_sig_countersig['digest'] == 'DE8E927CEC0175F4544CAFBBAC55D584DAE15C20'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
assert first_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '838379A390118A6562F3E06BE818F5A6407FD7F4FEA9ADF4C36A8B6952B1336B'
assert second_sig['signedDigest'] == '838379A390118A6562F3E06BE818F5A6407FD7F4FEA9ADF4C36A8B6952B1336B'
assert len(second_sig['allCertificates']) == 8
assert second_sig['allCertificates'][0]['sha256'] == "D09EDDF7DA800BCC3AC114852614124706D94EA473A98DB19BC4F4CB6AEE16A4"
assert second_sig['allCertificates'][1]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig['allCertificates'][2]['sha256'] == "4F32D5DC00F715250ABCC486511E37F501A899DEB3BF7EA8ADBBD3AEF1C412DA"
assert second_sig['allCertificates'][3]['sha256'] == "687FA451382278FFF0C8B11F8D43D576671C6EB2BCEAB413FB83D965D06D2FF2"
assert second_sig['allCertificates'][4]['sha256'] == "52F0E1C4E58EC629291B60317F074671B85D7EA80D5B07273463534B32B40234"
assert second_sig['allCertificates'][5]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig['allCertificates'][6]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert second_sig['allCertificates'][7]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "FB26A5FA064C2789EEE8560B4F8A82B7FE968B0D776CE02F52AA3BA11D8CB22C"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 3
assert second_sig_signer['chain'][0]['sha256'] == "D09EDDF7DA800BCC3AC114852614124706D94EA473A98DB19BC4F4CB6AEE16A4"
assert second_sig_signer['chain'][1]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig_signer['chain'][2]['sha256'] == "52F0E1C4E58EC629291B60317F074671B85D7EA80D5B07273463534B32B40234"
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 0
assert second_sig_countersig['signTime'] == "Feb 1 15:47:15 2016 GMT"
assert second_sig_countersig['digest'] == 'C361F36F13601CEAF01F3480C58F98660205981A'
assert second_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(second_sig_countersig['chain']) == 2
assert second_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert second_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
class Test4(Test):
settings = TestSettings(
tool='fileinfo',
input='c58e6118bbe12d2c56b2db014c4eb0d3fd32cde7bca1f32a2da8169be1301e23',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == 'F9D74771FD4A1A2233D266F1F73B53464328EE1E'
assert first_sig['signedDigest'] == 'F9D74771FD4A1A2233D266F1F73B53464328EE1E'
assert first_sig['programName'] == 'Alveo'
assert len(first_sig['allCertificates']) == 5
assert first_sig['allCertificates'][0]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
assert first_sig['allCertificates'][1]['sha256'] == "3A2FBE92891E57FE05D57087F48E730F17E5A5F53EF403D618E5B74D7A7E6ECB"
assert first_sig['allCertificates'][2]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig['allCertificates'][3]['sha256'] == "973A41276FFD01E027A2AAD49E34C37846D3E976FF6A620B6712E33832041AA6"
assert first_sig['allCertificates'][4]['sha256'] == "E2DBA399BE32992B74DF8A86CFD9886C2304CCC19DA8A9BE2B87809DA006379E"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "C2072238EB76B1C42F366FD72B85304A88AE5037"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
assert first_sig_signer['chain'][0]['sha256'] == "E2DBA399BE32992B74DF8A86CFD9886C2304CCC19DA8A9BE2B87809DA006379E"
assert first_sig_signer['chain'][1]['sha256'] == "973A41276FFD01E027A2AAD49E34C37846D3E976FF6A620B6712E33832041AA6"
assert first_sig_signer['chain'][2]['sha256'] == "3A2FBE92891E57FE05D57087F48E730F17E5A5F53EF403D618E5B74D7A7E6ECB"
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Jul 1 20:02:53 2016 GMT"
assert first_sig_countersig['digest'] == 'BFFD2E4E2707EE7BF5EB9B1381F100771CCCCD45'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
assert first_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
class Test5(Test):
settings = TestSettings(
tool='fileinfo',
input='crashreporter.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '65901089C84EF122BE9397F508580A3EFC674D1D'
assert first_sig['signedDigest'] == '65901089C84EF122BE9397F508580A3EFC674D1D'
assert len(first_sig['allCertificates']) == 5
assert first_sig['allCertificates'][0]['sha1'] == "0563B8630D62D75ABBC8AB1E4BDFB5A899B24D43"
assert first_sig['allCertificates'][1]['sha1'] == "92C1588E85AF2201CE7915E8538B492F605B80C6"
assert first_sig['allCertificates'][2]['sha1'] == "50600FD631998451C8F75EF3F618E31FC74D1585"
assert first_sig['allCertificates'][3]['sha1'] == "65439929B67973EB192D6FF243E6767ADF0834E4"
assert first_sig['allCertificates'][4]['sha1'] == "6C07453FFDDA08B83707C09B82FB3D15F35336B1"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "21C4C8CCB2A4B1A878D8347D5F07B8BE4A44693E"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
assert first_sig_signer['chain'][0]['sha256'] == "1A73BF16814D061CF5930634FBBD8A55E53DF2A556469C48FDF2623DFEEEE8A8"
assert first_sig_signer['chain'][1]['sha256'] == "51044706BD237B91B89B781337E6D62656C69F0FCFFBE8E43741367948127862"
assert first_sig_signer['chain'][2]['sha256'] == "3E9099B5015E8F486C00BCEA9D111EE721FABA355A89BCF1DF69561E3DC6325C"
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Jan 24 02:14:31 2016 GMT"
assert first_sig_countersig['digest'] == 'F5D8409366948F3B1185F0D7032759C5A1E2FAF5'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
assert first_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert first_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
class Test6(Test):
settings = TestSettings(
tool='fileinfo',
input='f77acb4e1523b882f5307864345e5f7d20a657a7f40863bd7ae41d2521703fec',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == 'A6BE6C062A26427A722571FD634838DD2FE3743D'
assert first_sig['signedDigest'] == 'A6BE6C062A26427A722571FD634838DD2FE3743D'
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert len(first_sig['allCertificates']) == 7
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "5370C469214E0A599238F7FA851BD86E633FB4E2"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert first_sig_countersig['signTime'] == "Feb 1 14:55:04 2016 GMT"
assert first_sig_countersig['digest'] == 'B49D4C25284D735D3DCD7B3BBCE6FDA6828F774E'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['warnings']) == 0
assert len(first_sig_countersig['chain']) == 2
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '54227373068BB3F2721F0E9B849142F3B68FDDD43571A9327C3F9CA44420EEA8'
assert second_sig['signedDigest'] == '54227373068BB3F2721F0E9B849142F3B68FDDD43571A9327C3F9CA44420EEA8'
assert len(second_sig['allCertificates']) == 8
assert second_sig['allCertificates'][0]['sha256'] == "D09EDDF7DA800BCC3AC114852614124706D94EA473A98DB19BC4F4CB6AEE16A4"
assert second_sig['allCertificates'][1]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig['allCertificates'][2]['sha256'] == "4F32D5DC00F715250ABCC486511E37F501A899DEB3BF7EA8ADBBD3AEF1C412DA"
assert second_sig['allCertificates'][3]['sha256'] == "687FA451382278FFF0C8B11F8D43D576671C6EB2BCEAB413FB83D965D06D2FF2"
assert second_sig['allCertificates'][4]['sha256'] == "52F0E1C4E58EC629291B60317F074671B85D7EA80D5B07273463534B32B40234"
assert second_sig['allCertificates'][5]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig['allCertificates'][6]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert second_sig['allCertificates'][7]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "400EAD9ABBA5A18062E513E78DB4E7535A81F2B250C9E35A50D158DFA82CFD45"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 3
assert second_sig_signer['chain'][0]['sha256'] == "D09EDDF7DA800BCC3AC114852614124706D94EA473A98DB19BC4F4CB6AEE16A4"
assert second_sig_signer['chain'][1]['sha256'] == "5E6D2F88F617DC8B809AEE712445A41B3CDE26AF874A221A9DC98EA1DC68E3D5"
assert second_sig_signer['chain'][2]['sha256'] == "52F0E1C4E58EC629291B60317F074671B85D7EA80D5B07273463534B32B40234"
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 0
assert second_sig_countersig['signTime'] == "Feb 1 14:55:06 2016 GMT"
assert second_sig_countersig['digest'] == '46A76769C69B78945E9B12594F638A943017F26E'
assert second_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(second_sig_countersig['chain']) == 2
assert second_sig_countersig['chain'][0]['sha256'] == "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0"
assert second_sig_countersig['chain'][1]['sha256'] == "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95"
class Test7(Test):
settings = TestSettings(
tool='fileinfo',
input='msenvmnu.dll',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '798D33E74F6F28A62A336C61CF81AE0277F47516'
assert first_sig['signedDigest'] == '798D33E74F6F28A62A336C61CF81AE0277F47516'
assert first_sig['programName'] == 'msenvmnu.dll'
assert len(first_sig['allCertificates']) == 4
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "BC70A3256BE34E5FBB8874E3E6D58664F3F27BE5"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 2
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert first_sig_countersig['signTime'] == "Jul 7 07:30:56 2015 GMT"
assert first_sig_countersig['digest'] == '7F95DBB284EFE07428573201F47342592CA9E007'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['warnings']) == 0
assert len(first_sig_countersig['chain']) == 2
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '5BFB3AB09F359E11D76D95640BACB3A6CD65F2EF0D1763DC47D0B7F7203D22B7'
assert second_sig['signedDigest'] == '5BFB3AB09F359E11D76D95640BACB3A6CD65F2EF0D1763DC47D0B7F7203D22B7'
assert first_sig['programName'] == 'msenvmnu.dll'
assert len(second_sig['allCertificates']) == 2
assert second_sig['allCertificates'][0]['sha1'] == "76DAF3E30F95B244CA4D6107E0243BB97F7DF965"
assert second_sig['allCertificates'][1]['sha1'] == "F252E794FE438E35ACE6E53762C0A234A2C52135"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "2B80E8B619EDC847B62A8A58785C70830B10ACA6863FE30C590F5AE4034258E9"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 2
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 1
assert second_sig_countersig['warnings'][0] == "Couldn't parse signature"
class Test8(Test):
settings = TestSettings(
tool='fileinfo',
input='PdfConv_32.dll',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '714A802FB13B89160538890320E519F7A9260E84'
assert first_sig['signedDigest'] == '714A802FB13B89160538890320E519F7A9260E84'
assert len(first_sig['allCertificates']) == 4
assert first_sig['allCertificates'][0]['sha1'] == "DF946A5E503015777FD22F46B5624ECD27BEE376"
assert first_sig['allCertificates'][1]['sha1'] == "DF540F8FEDBA6454E039DD5E21B3B7C99E327B51"
assert first_sig['allCertificates'][2]['sha1'] == "F5AD0BCC1AD56CD150725B1C866C30AD92EF21B0"
assert first_sig['allCertificates'][3]['sha1'] == "B69E752BBE88B4458200A7C0F4F5B3CCE6F35B47"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "807D00A61C50095D308F33F29EDD644A06E5C514"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Aug 14 07:58:15 2015 GMT"
assert first_sig_countersig['digest'] == 'FDD38655C08F04B887C4992656CD4F35DE6E6A07'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 1
assert first_sig_countersig['chain'][0]['sha256'] == "12F0A1DDF83D265B205B4F3BCA43B3FA89A748E9834EC24004774FD2FDE34073"
class Test9(Test):
settings = TestSettings(
tool='fileinfo',
input='thunderbird.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '0813562802948CCB60D288A84147671FBFC10CD4'
assert first_sig['signedDigest'] == '0813562802948CCB60D288A84147671FBFC10CD4'
assert len(first_sig['allCertificates']) == 5
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "A6549FE9A61275AD574F53D2A299138E534780E6"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert len(first_sig_countersig['warnings']) == 0
assert first_sig_countersig['signTime'] == "Feb 11 22:09:49 2016 GMT"
assert first_sig_countersig['digest'] == 'BEFD25FA1E19A6D90B1918D4E06E465FE3BC57E3'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['chain']) == 2
class Test10(Test):
settings = TestSettings(
tool='fileinfo',
input='VSTST-FileConverter.ex',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == '427DC17A763807D2DEAD406DDFD3AAE93F5CE235'
assert first_sig['signedDigest'] == '427DC17A763807D2DEAD406DDFD3AAE93F5CE235'
assert first_sig['programName'] == 'VSTST-FileConverter.exe'
assert first_sig['signatureVerified'] == True
assert len(first_sig['warnings']) == 0
assert len(first_sig['allCertificates']) == 4
assert first_sig['allCertificates'][0]['sha256'] == "E43F82BC40029F17DBB516613D1E1A96EC2940CE76E0A9CD5F53BA50175A8766"
assert first_sig['allCertificates'][1]['sha256'] == "67C529AD57B2AEDD4D248993324270C7064D4F6BDAAF70044D772D05C56001A4"
assert first_sig['allCertificates'][2]['sha256'] == "9CBF22FAE0DD53A7395556CE6154AA14A0D03360AA8C51CFEA05D1FD8819E043"
assert first_sig['allCertificates'][3]['sha256'] == "4F987BBE4E0D1DCF48FCEFC9239AC6E62EE9DF38CAC2D32993B8533CD95C2E49"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "C66CA59AF0B63A5758EC97F74FA33C686DBD06D0"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 2
first_sig_countersig = first_sig_signer['counterSigners'][0]
assert first_sig_countersig['signTime'] == "Jul 7 07:34:43 2015 GMT"
assert first_sig_countersig['digest'] == 'C29360ED776638FE506A2641A5F13A9975EA9945'
assert first_sig_countersig['digestAlgorithm'] == 'sha1'
assert len(first_sig_countersig['warnings']) == 0
assert len(first_sig_countersig['chain']) == 2
second_sig = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_sig['signatureVerified'] == True
assert len(second_sig['warnings']) == 0
assert second_sig['digestAlgorithm'] == 'sha256'
assert second_sig['fileDigest'] == '7E6B06384FF2B27537F0AC76E311C116434D02DBC735FAF113B6EFD6D629F74C'
assert second_sig['signedDigest'] == '7E6B06384FF2B27537F0AC76E311C116434D02DBC735FAF113B6EFD6D629F74C'
assert first_sig['programName'] == 'VSTST-FileConverter.exe'
assert len(second_sig['allCertificates']) == 2
assert second_sig['allCertificates'][0]['sha256'] == "BD3FCED7A02EA9A18CEBC0628AF487A2925960BE8A88A35609666FA7901987AA"
assert second_sig['allCertificates'][1]['sha256'] == "56DA8722AFD94066FFE1E4595473A4854892B843A0827D53FB7D8F4AEED1E18B"
second_sig_signer = second_sig['signer']
assert second_sig_signer['digest'] == "61A1F261448BCD1CC8AB9F03DF0209951734455840B2B0C2CFB11FC1DB0C1A81"
assert second_sig_signer['digestAlgorithm'] == 'sha256'
assert len(second_sig_signer['chain']) == 2
second_sig_countersig = second_sig_signer['counterSigners'][0]
assert len(second_sig_countersig['warnings']) == 1
assert second_sig_countersig['warnings'][0] == "Couldn't parse signature"
class TestEscaping(Test):
settings = TestSettings(
tool='fileinfo',
input='3708882e564ba289416f65cb4cb2b4de',
args='--json --verbose'
)
def test_certificates(self):
assert self.fileinfo.succeeded
self.assertEqual(
len(self.fileinfo.output["digitalSignatures"]["signatures"][0]['allCertificates']), 4)
self.assertEqual(self.fileinfo.output["digitalSignatures"]['signatures'][0]['signer']['chain'][0]
["sha256"], "9D5DC543A16E3B97AA12ABB6A09C9393C1F6778E475D95C81607335D5D19AF8B")
self.assertEqual(self.fileinfo.output["digitalSignatures"]['signatures'][0]['signer']['chain'][1]
["sha256"], "0D34394100E961CE4318DBA9B8DD38EBC25BB07AEF78FDA3FFF632685549BA0F")
self.assertEqual(self.fileinfo.output["digitalSignatures"]['signatures'][0]['signer']['counterSigners'][0]['chain'][0]
["sha256"], "0374881C9B74D31F28DC580B0F2B9D2B14A97CE31CBEC2A05AEB377DCDDCC2B0")
self.assertEqual(self.fileinfo.output["digitalSignatures"]['signatures'][0]['signer']['counterSigners'][0]['chain'][1]
["sha256"], "0625FEE1A80D7B897A9712249C2F55FF391D6661DBD8B87F9BE6F252D88CED95")
self.assertEqual(self.fileinfo.output["digitalSignatures"]['signatures'][0]['signer']['chain'][0]
["attributes"]["subject"]["locality"], R"M\xfcnchen")
class Test11(Test):
settings = TestSettings(
tool='fileinfo',
args='--json --verbose',
input='x86-pe-ff6717faf307cdc5ba2d07e320cb8e33'
)
def test_certificates(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_sig = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert len(first_sig['warnings']) == 0
assert first_sig['digestAlgorithm'] == 'sha1'
assert first_sig['fileDigest'] == 'F48199821F5D51C334E00532FABB05E3F2D3D92C'
assert first_sig['signedDigest'] == 'F48199821F5D51C334E00532FABB05E3F2D3D92C'
assert len(first_sig['allCertificates']) == 3
assert first_sig['allCertificates'][0]['sha1'] == "C5DAAAEAA82AAF90C2963CE7432E934A8DE17D51"
assert first_sig['allCertificates'][1]['sha1'] == "7C4656C3061F7F4C0D67B319A855F60EBC11FC44"
assert first_sig['allCertificates'][2]['sha1'] == "2796BAE63F1801E277261BA0D77770028F20EEE4"
first_sig_signer = first_sig['signer']
assert first_sig_signer['digest'] == "9C6BCEE73B8C669764AEDB8046C064C71C5B6A27"
assert first_sig_signer['digestAlgorithm'] == 'sha1'
assert len(first_sig_signer['chain']) == 3
class Test12(Test):
settings = TestSettings(
tool='fileinfo',
input='002720d5ed0df9fe550d52145a44268d24b6368c61065be070e3319b9a67b082',
args='-j -v'
)
def test(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 2
assert len(self.fileinfo.output['digitalSignatures']['signatures']) == 2
first_signature = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_signature['signatureVerified'] == True
assert len(first_signature['warnings']) == 0
assert len(first_signature['allCertificates']) == 6
assert first_signature['fileDigest'] == 'D643405056A4A16042D47942A8C6A59524BDA64A'
assert first_signature['fileDigest'] == first_signature['signedDigest']
assert first_signature['digestAlgorithm'] == 'sha1'
signer = first_signature['signer']
assert len(signer['warnings']) == 0
assert signer['digest'] == '2C39C585984D98957CA03802F8C255EE4359D8EE'
assert signer['digestAlgorithm'] == 'sha1'
assert len(signer['chain']) == 4
assert len(signer['counterSigners']) == 1
counter_signer = signer['counterSigners'][0]
assert len(counter_signer['warnings']) == 0
assert len(counter_signer['chain']) == 2
assert counter_signer['signTime'] == 'Aug 21 14:53:13 2017 GMT'
assert counter_signer['digest'] == '1530CD732860961182222E7C955AEF70BD0BA570'
assert counter_signer['digestAlgorithm'] == 'sha1'
#######################################################################
second_signature = self.fileinfo.output['digitalSignatures']['signatures'][1]
assert second_signature['signatureVerified'] == True
assert len(second_signature['warnings']) == 0
assert len(first_signature['allCertificates']) == 6
assert second_signature['fileDigest'] == '75CACDF5BE7BAEECB89C70BC01343FB7C9E8FD000CC191F08D2A996359D617FE'
assert second_signature['fileDigest'] == second_signature['signedDigest']
assert second_signature['digestAlgorithm'] == 'sha256'
signer = second_signature['signer']
assert len(signer['warnings']) == 0
assert signer['digest'] == '018A36C7429C0058101D3F087E69E27824CC68FEC8A745B8AF59D5D225BBDB77'
assert signer['digestAlgorithm'] == 'sha256'
assert len(signer['chain']) == 4
assert len(signer['counterSigners']) == 1
counter_signer = signer['counterSigners'][0]
assert len(counter_signer['warnings']) == 0
assert len(counter_signer['chain']) == 2
assert counter_signer['signTime'] == 'Aug 21 14:53:39 2017 GMT'
assert counter_signer['digest'] == '32344850DE23CE4A6312A69CC355AC6D16968964'
assert counter_signer['digestAlgorithm'] == 'sha1'
class TestProgramName(Test):
settings = TestSettings(
tool='fileinfo',
input='0059fb3f225c5784789622eeccb97197d591972851b63d59f5bd107ddfdb7a21',
args='-j -v'
)
def test(self):
assert self.fileinfo.succeeded
assert self.fileinfo.output['digitalSignatures']['numberOfSignatures'] == 1
first_signature = self.fileinfo.output['digitalSignatures']['signatures'][0]
assert first_signature['programName'] == "GoTo Opener"
| [] |
AmitSrourDev/darn | app/darn.py | c04b681881620ffed2e1e0788d9cd80da7f806c4 | import subprocess
def run(cmd):
subprocess.run(cmd.split(' '))
def ls():
subprocess.call(["ls", "-l"]) | [((82, 111), 'subprocess.call', 'subprocess.call', (["['ls', '-l']"], {}), "(['ls', '-l'])\n", (97, 111), False, 'import subprocess\n')] |
lakhlaifi/RedHat-Ansible | virt/ansible-latest/lib/python2.7/site-packages/ansible/plugins/become/runas.py | 27c5077cced9d416081fcd5d69ea44bca0317fa4 | # -*- coding: utf-8 -*-
# Copyright: (c) 2018, Ansible Project
# GNU General Public License v3.0+ (see COPYING or https://www.gnu.org/licenses/gpl-3.0.txt)
from __future__ import (absolute_import, division, print_function)
__metaclass__ = type
DOCUMENTATION = """
become: runas
short_description: Run As user
description:
- This become plugins allows your remote/login user to execute commands as another user via the windows runas facility.
author: ansible (@core)
version_added: "2.8"
options:
become_user:
description: User you 'become' to execute the task
ini:
- section: privilege_escalation
key: become_user
- section: runas_become_plugin
key: user
vars:
- name: ansible_become_user
- name: ansible_runas_user
env:
- name: ANSIBLE_BECOME_USER
- name: ANSIBLE_RUNAS_USER
required: True
become_flags:
description: Options to pass to runas, a space delimited list of k=v pairs
default: ''
ini:
- section: privilege_escalation
key: become_flags
- section: runas_become_plugin
key: flags
vars:
- name: ansible_become_flags
- name: ansible_runas_flags
env:
- name: ANSIBLE_BECOME_FLAGS
- name: ANSIBLE_RUNAS_FLAGS
become_pass:
description: password
ini:
- section: runas_become_plugin
key: password
vars:
- name: ansible_become_password
- name: ansible_become_pass
- name: ansible_runas_runas
env:
- name: ANSIBLE_BECOME_PASS
- name: ANSIBLE_RUNAS_PASS
notes:
- runas is really implemented in the powershell module handler and as such can only be used with winrm connections.
- This plugin ignores the 'become_exe' setting as it uses an API and not an executable.
"""
from ansible.plugins.become import BecomeBase
class BecomeModule(BecomeBase):
name = 'runas'
def build_become_command(self, cmd, shell):
# runas is implemented inside the winrm connection plugin
return cmd
| [] |
JustHitTheCore/ctf_workshops | 2017/lab_dh/utils.py | d50e8a5c90e80cdae3e17a92bce83955f0618570 | #!/usr/bin/env python
# -*- coding: utf-8 -*-
'''
~Gros
'''
from hashlib import sha256
import random
def add_padding(data, block_size=16):
"""add PKCS#7 padding"""
size = block_size - (len(data)%block_size)
return data+chr(size)*size
def strip_padding(data, block_size=16):
"""strip PKCS#7 padding"""
padding = ord(data[-1])
if padding == 0 or padding > block_size or data[-padding:] != chr(padding)*padding:
raise Exception("Invalid padding")
return data[:-padding]
def random_bytes(amount=1):
return ''.join([chr(random.randint(0,255)) for x in range(amount)])
def derive_key(key_int, block_size=16):
return sha256(str(key_int)).digest()[:16] | [((563, 585), 'random.randint', 'random.randint', (['(0)', '(255)'], {}), '(0, 255)\n', (577, 585), False, 'import random\n')] |
nparkstar/nauta | applications/cli/commands/model/tests/test_export.py | 1bda575a01f782d1dc2cd5221122651f184f7167 | #
# Copyright (c) 2019 Intel Corporation
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
#
from click.testing import CliRunner
from cli_text_consts import ModelExportCmdTexts as Texts
from commands.model.common import workflow_description
from commands.model.export import export
from platform_resources.workflow import ArgoWorkflow, QUEUED_PHASE
FEM_NAME = "EXPORT_1"
SEM_NAME = "EXPORT_2"
FEM_PARAMETERS = "PARAMS_1"
SEM_PARAMETERS = "PARAMS_2"
FEM_START_DATE = '2000-01-01'
FEM_NAMESPACE = 'test-namespace'
TEST_AGROWORKFLOW = ArgoWorkflow(name=FEM_NAME, started_at=FEM_START_DATE, finished_at=None,
namespace=FEM_NAMESPACE, phase=None)
TWO_MODEL_OUTPUT = [workflow_description(name=FEM_NAME, parameters=FEM_PARAMETERS),
workflow_description(name=SEM_NAME, parameters=SEM_PARAMETERS)]
def setup_mocks(mocker):
mocker.patch('commands.model.export.get_kubectl_current_context_namespace',
return_value='fake-namespace')
mocker.patch('platform_resources.workflow.ArgoWorkflow.from_yaml',
return_value=mocker.MagicMock())
mocker.patch('platform_resources.workflow.ArgoWorkflow.get',
return_value=TEST_AGROWORKFLOW)
mocker.patch('os.listdir', return_value=['openvino.yaml', 'tensorflow.yaml', 'some_other_file'])
mocker.patch('commands.model.export.NAUTAConfigMap', return_value=mocker.MagicMock(registry='fake-addr'))
mocker.patch('commands.model.export.Config')
mocker.patch('os.path.isdir', return_value=True)
def test_export(mocker):
setup_mocks(mocker)
result = CliRunner().invoke(export, ["/fake/path", "openvino"])
assert result.exit_code == 0
assert "Successfully created export workflow" in result.output
assert QUEUED_PHASE in result.output
assert FEM_NAME in result.output
assert FEM_START_DATE in result.output
assert FEM_NAMESPACE in result.output
def test_export_inexistent_format(mocker):
setup_mocks(mocker)
result = CliRunner().invoke(export, ["/fake/path", "bad"])
assert result.exit_code == 2
assert "Format: bad does not exist. Choose from:" in result.output
def test_export_failure(mocker):
setup_mocks(mocker)
mocker.patch('platform_resources.workflow.ArgoWorkflow.from_yaml',
return_value=mocker.MagicMock(create=lambda: RuntimeError))
result = CliRunner().invoke(export, ["/fake/path", "openvino"])
assert result.exit_code == 1
assert "Failed to create export workflow" in result.output
def test_export_list(mocker):
mocker.patch("commands.model.export.get_list_of_workflows", return_value=TWO_MODEL_OUTPUT)
result = CliRunner().invoke(export, ["formats"])
assert FEM_NAME in result.output
assert SEM_NAME in result.output
assert FEM_PARAMETERS in result.output
assert SEM_PARAMETERS in result.output
def test_export_list_error(mocker):
mocker.patch("commands.model.export.get_list_of_workflows", side_effect=RuntimeError)
result = CliRunner().invoke(export, ["formats"])
assert Texts.EXPORT_LIST_ERROR_MSG in result.output
def test_export_missing_format(mocker):
setup_mocks(mocker)
result = CliRunner().invoke(export, ["wrong-option"])
assert Texts.MISSING_EXPORT_FORMAT.format(formats=["openvino", "tensorflow"]) in result.output
| [((1034, 1147), 'platform_resources.workflow.ArgoWorkflow', 'ArgoWorkflow', ([], {'name': 'FEM_NAME', 'started_at': 'FEM_START_DATE', 'finished_at': 'None', 'namespace': 'FEM_NAMESPACE', 'phase': 'None'}), '(name=FEM_NAME, started_at=FEM_START_DATE, finished_at=None,\n namespace=FEM_NAMESPACE, phase=None)\n', (1046, 1147), False, 'from platform_resources.workflow import ArgoWorkflow, QUEUED_PHASE\n'), ((1198, 1260), 'commands.model.common.workflow_description', 'workflow_description', ([], {'name': 'FEM_NAME', 'parameters': 'FEM_PARAMETERS'}), '(name=FEM_NAME, parameters=FEM_PARAMETERS)\n', (1218, 1260), False, 'from commands.model.common import workflow_description\n'), ((1282, 1344), 'commands.model.common.workflow_description', 'workflow_description', ([], {'name': 'SEM_NAME', 'parameters': 'SEM_PARAMETERS'}), '(name=SEM_NAME, parameters=SEM_PARAMETERS)\n', (1302, 1344), False, 'from commands.model.common import workflow_description\n'), ((3760, 3830), 'cli_text_consts.ModelExportCmdTexts.MISSING_EXPORT_FORMAT.format', 'Texts.MISSING_EXPORT_FORMAT.format', ([], {'formats': "['openvino', 'tensorflow']"}), "(formats=['openvino', 'tensorflow'])\n", (3794, 3830), True, 'from cli_text_consts import ModelExportCmdTexts as Texts\n'), ((2113, 2124), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2122, 2124), False, 'from click.testing import CliRunner\n'), ((2515, 2526), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2524, 2526), False, 'from click.testing import CliRunner\n'), ((2891, 2902), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (2900, 2902), False, 'from click.testing import CliRunner\n'), ((3184, 3195), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3193, 3195), False, 'from click.testing import CliRunner\n'), ((3527, 3538), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3536, 3538), False, 'from click.testing import CliRunner\n'), ((3703, 3714), 'click.testing.CliRunner', 'CliRunner', ([], {}), '()\n', (3712, 3714), False, 'from click.testing import CliRunner\n')] |
LiamBindle/spack | var/spack/repos/builtin/packages/py-mdanalysis/package.py | e90d5ad6cfff2ba3de7b537d6511adccd9d5fcf1 | # Copyright 2013-2021 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class PyMdanalysis(PythonPackage):
"""MDAnalysis is a Python toolkit to analyze molecular dynamics
trajectories generated by a wide range of popular simulation
packages including DL_Poly, CHARMM, Amber, NAMD, LAMMPS, and
Gromacs. (See the lists of supported trajectory formats and
topology formats.)"""
homepage = "https://www.mdanalysis.org"
pypi = "MDAnalysis/MDAnalysis-0.19.2.tar.gz"
version('1.0.0', sha256='f45a024aca45e390ff1c45ca90beb2180b78881be377e2a1aa9cd6c109bcfa81')
version('0.20.1', sha256='d04b71b193b9716d2597ffb9938b93f43487fa535da1bb5c1f2baccf356d7df9')
version('0.19.2', sha256='c5395bbafa5efca2e1aee4715d26129844140c47cb8301da0293106cb969de7d')
version('0.19.1', sha256='ff1d694f8598c0833ec340de6a6adb3b5e62b92d0fa94ee6401718ba972db3cc')
version('0.19.0', sha256='248e3b37fc6150e31c609cc18a3927c32aee37b76d29cbfedf635e7e1aa982cf')
version('0.18.0', sha256='a08acea1755112411e7db55e3f282e164b47a59e15794b38744cce6c596f252a')
version('0.17.0', sha256='9bd61760334698cc7b8a57ad26456451e926e9c9e66722594ad8816561348cde')
version('0.16.2', sha256='407d9a9ff1ab8a5e47973714d06fabff220f8d08a28792dee93e88e70e995b0a')
version('0.16.1', sha256='3dc8f5d639ab3a0d152cbd7259ae9372ec8a9bac0f8cb7d3b80ce5adc1e3ee57')
version('0.16.0', sha256='c4824fa1fddd336daa39371436187ebb023366885fb250c2827ed7fce2546bd4')
version('0.15.0', sha256='9088786048b47339cba1f8a586977bbb3bb04ae1bcd0462b59e45bda37e25533')
variant('analysis', default=True,
description='Enable analysis packages: matplotlib, scipy, seaborn')
variant('amber', default=False,
description='Support AMBER netcdf format.')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('py-setuptools', type='build')
depends_on('[email protected]:', type='build')
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', type=('build', 'run'))
depends_on('[email protected]:', when='@0.17.0:', type=('build', 'run'))
depends_on('[email protected]:', when='@0.16.0:', type=('build', 'run'))
depends_on('py-mock', when='@0.18.0:', type=('build', 'run'))
depends_on('[email protected]:', when='@1.0.0:', type=('build', 'run'))
depends_on('py-joblib', when='@0.16.0:0.20.1', type=('build', 'run'))
depends_on('[email protected]:', when='@1.0.0:', type=('build', 'run'))
depends_on('[email protected]:', when='@:0.15.0', type=('build', 'run'))
depends_on('[email protected]:', when='@0.16.0:0.19.2', type=('build', 'run'))
depends_on('[email protected]:', when='@0.20.1:', type=('build', 'run'))
depends_on('[email protected]:', when='@:0.17.0', type=('build', 'run'))
depends_on('[email protected]:', when='@0.18.0:', type=('build', 'run'))
depends_on('[email protected]:', when='@:0.16.2', type=('build', 'run'))
depends_on('[email protected]:', when='@0.17.0:', type=('build', 'run'))
depends_on('py-matplotlib', when='@:0.15.0+analysis', type=('build', 'run'))
depends_on('[email protected]:', when='@0.16.0:0.16.1+analysis', type=('build', 'run'))
depends_on('[email protected]:', when='@0.16.2:', type=('build', 'run'))
depends_on('py-scipy', when='@:0.16.1+analysis', type=('build', 'run'))
depends_on('py-scipy', when='@0.16.2:0.17.0', type=('build', 'run'))
depends_on('[email protected]:', when='@0.18.0:', type=('build', 'run'))
depends_on('py-scikit-learn', when='@0.16.0:+analysis', type=('build', 'run'))
depends_on('py-seaborn', when='+analysis', type=('build', 'run'))
depends_on('[email protected]:', when='+amber', type=('build', 'run'))
depends_on('hdf5', when='+amber', type=('run'))
| [] |
lesley-byte/enviroplus-python | lesley-byte/graphpressure.py | df08c238c8b550c9041ff06a0b6bef6b330af611 | from requests import get
import matplotlib.pyplot as plt
import matplotlib.animation as animation
import datetime as dt
from bme280 import BME280
try:
from smbus2 import SMBus
except ImportError:
from smbus import SMBus
fig = plt.figure()
ax = fig.add_subplot(1, 1, 1)
xs = []
ys =[]
bus = SMBus(1)
bme280 = BME280(i2c_dev=bus)
def animate(i, xs, ys):
pressure = bme280.get_pressure()
xs.append(dt.datetime.now().strftime('%H:%M:%S'))
ys.append(pressure)
xs = xs[-20:]
ys = ys[-20:]
ax.clear()
ax.plot(xs, ys)
plt.xticks(rotation=45, ha='right')
plt.subplots_adjust(bottom=0.30)
plt.title('Pressure over time')
plt.ylabel("pressure")
ani = animation.FuncAnimation(fig, animate, fargs=(xs, ys), interval=60000)
plt.show()
| [((237, 249), 'matplotlib.pyplot.figure', 'plt.figure', ([], {}), '()\n', (247, 249), True, 'import matplotlib.pyplot as plt\n'), ((302, 310), 'smbus.SMBus', 'SMBus', (['(1)'], {}), '(1)\n', (307, 310), False, 'from smbus import SMBus\n'), ((320, 339), 'bme280.BME280', 'BME280', ([], {'i2c_dev': 'bus'}), '(i2c_dev=bus)\n', (326, 339), False, 'from bme280 import BME280\n'), ((702, 771), 'matplotlib.animation.FuncAnimation', 'animation.FuncAnimation', (['fig', 'animate'], {'fargs': '(xs, ys)', 'interval': '(60000)'}), '(fig, animate, fargs=(xs, ys), interval=60000)\n', (725, 771), True, 'import matplotlib.animation as animation\n'), ((772, 782), 'matplotlib.pyplot.show', 'plt.show', ([], {}), '()\n', (780, 782), True, 'import matplotlib.pyplot as plt\n'), ((555, 590), 'matplotlib.pyplot.xticks', 'plt.xticks', ([], {'rotation': '(45)', 'ha': '"""right"""'}), "(rotation=45, ha='right')\n", (565, 590), True, 'import matplotlib.pyplot as plt\n'), ((595, 626), 'matplotlib.pyplot.subplots_adjust', 'plt.subplots_adjust', ([], {'bottom': '(0.3)'}), '(bottom=0.3)\n', (614, 626), True, 'import matplotlib.pyplot as plt\n'), ((632, 663), 'matplotlib.pyplot.title', 'plt.title', (['"""Pressure over time"""'], {}), "('Pressure over time')\n", (641, 663), True, 'import matplotlib.pyplot as plt\n'), ((668, 690), 'matplotlib.pyplot.ylabel', 'plt.ylabel', (['"""pressure"""'], {}), "('pressure')\n", (678, 690), True, 'import matplotlib.pyplot as plt\n'), ((416, 433), 'datetime.datetime.now', 'dt.datetime.now', ([], {}), '()\n', (431, 433), True, 'import datetime as dt\n')] |
brett-smith/bootstrap-vz | bootstrapvz/plugins/ova/tasks.py | 2eaa98db684b85186f3ecd6e5d1304aaceca6b73 | from bootstrapvz.base import Task
from bootstrapvz.common import phases
from bootstrapvz.common.tasks import workspace
import os
import shutil
assets = os.path.normpath(os.path.join(os.path.dirname(__file__), 'assets'))
class CheckOVAPath(Task):
description = 'Checking if the OVA file already exists'
phase = phases.preparation
@classmethod
def run(cls, info):
ova_basename = info.manifest.name.format(**info.manifest_vars)
ova_name = ova_basename + '.ova'
ova_path = os.path.join(info.manifest.bootstrapper['workspace'], ova_name)
if os.path.exists(ova_path):
from bootstrapvz.common.exceptions import TaskError
msg = 'The OVA `{name}\' already exists at `{path}\''.format(name=ova_name, path=ova_path)
raise TaskError(msg)
info._ova['ova_basename'] = ova_basename
info._ova['ova_name'] = ova_name
info._ova['ova_path'] = ova_path
class CreateOVADir(Task):
description = 'Creating directory for the OVA'
phase = phases.preparation
predecessors = [workspace.CreateWorkspace, CheckOVAPath]
@classmethod
def run(cls, info):
info._ova['folder'] = os.path.join(info.workspace, 'ova')
os.mkdir(info._ova['folder'])
class PackageOVA(Task):
description = 'Packaging the volume as an OVA'
phase = phases.image_registration
@classmethod
def run(cls, info):
import random
mac_address = '080027{mac:06X}'.format(mac=random.randrange(16 ** 6))
from bootstrapvz.common.tools import log_check_call
disk_name = info._ova['ova_basename'] + '.' + info.volume.extension
disk_link = os.path.join(info._ova['folder'], disk_name)
log_check_call(['ln', '-s', info.volume.image_path, disk_link])
ovf_path = os.path.join(info._ova['folder'], info._ova['ova_basename'] + '.ovf')
cls.write_ovf(info, ovf_path, mac_address, disk_name)
ova_files = os.listdir(info._ova['folder'])
log_check_call(['ovftool', ovf_path, info._ova['ova_path']]
)
import logging
logging.getLogger(__name__).info('The OVA has been placed at ' + info._ova['ova_path'])
@classmethod
def write_ovf(cls, info, destination, mac_address, disk_name):
namespaces = {'ovf': 'http://schemas.dmtf.org/ovf/envelope/1',
'rasd': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_ResourceAllocationSettingData',
'vssd': 'http://schemas.dmtf.org/wbem/wscim/1/cim-schema/2/CIM_VirtualSystemSettingData',
'xsi': 'http://www.w3.org/2001/XMLSchema-instance',
'vbox': 'http://www.virtualbox.org/ovf/machine',
}
def attr(element, name, value=None):
for prefix, ns in namespaces.iteritems():
name = name.replace(prefix + ':', '{' + ns + '}')
if value is None:
return element.attrib[name]
else:
element.attrib[name] = str(value)
template_path = os.path.join(assets, 'default.ovf')
if 'ovf' in info.manifest.plugins['ova']:
template_path = info.manifest.plugins['ova']['ovf']
import xml.etree.ElementTree as ET
template = ET.parse(template_path)
root = template.getroot()
[disk_ref] = root.findall('./ovf:References/ovf:File', namespaces)
attr(disk_ref, 'ovf:href', disk_name)
# List of OVF disk format URIs
# Snatched from VBox source (src/VBox/Main/src-server/ApplianceImpl.cpp:47)
# ISOURI = "http://www.ecma-international.org/publications/standards/Ecma-119.htm"
# VMDKStreamURI = "http://www.vmware.com/interfaces/specifications/vmdk.html#streamOptimized"
# VMDKSparseURI = "http://www.vmware.com/specifications/vmdk.html#sparse"
# VMDKCompressedURI = "http://www.vmware.com/specifications/vmdk.html#compressed"
# VMDKCompressedURI2 = "http://www.vmware.com/interfaces/specifications/vmdk.html#compressed"
# VHDURI = "http://go.microsoft.com/fwlink/?LinkId=137171"
volume_uuid = info.volume.get_uuid()
[disk] = root.findall('./ovf:DiskSection/ovf:Disk', namespaces)
attr(disk, 'ovf:capacity', info.volume.size.bytes.get_qty_in('B'))
attr(disk, 'ovf:format', info.volume.ovf_uri)
attr(disk, 'vbox:uuid', volume_uuid)
[system] = root.findall('./ovf:VirtualSystem', namespaces)
attr(system, 'ovf:id', info._ova['ova_basename'])
# Set the operating system
[os_section] = system.findall('./ovf:OperatingSystemSection', namespaces)
os_info = {'i386': {'id': 96, 'name': 'Debian'},
'amd64': {'id': 96, 'name': 'Debian_64'}
}.get(info.manifest.system['architecture'])
attr(os_section, 'ovf:id', os_info['id'])
[os_desc] = os_section.findall('./ovf:Description', namespaces)
os_desc.text = os_info['name']
[os_type] = os_section.findall('./vbox:OSType', namespaces)
os_type.text = os_info['name']
[sysid] = system.findall('./ovf:VirtualHardwareSection/ovf:System/'
'vssd:VirtualSystemIdentifier', namespaces)
sysid.text = info._ova['ova_basename']
[machine] = system.findall('./vbox:Machine', namespaces)
import uuid
del machine.attrib['uuid']
attr(machine, 'uuid', uuid.uuid4())
del machine.attrib['name']
attr(machine, 'name', info._ova['ova_basename'])
from datetime import datetime
del machine.attrib['lastStateChange']
attr(machine, 'lastStateChange', datetime.now().strftime('%Y-%m-%dT%H:%M:%SZ'))
[nic] = machine.findall('./ovf:Hardware/ovf:Network/ovf:Adapter', namespaces)
attr(machine, 'MACAddress', mac_address)
[device_img] = machine.findall('./ovf:StorageControllers'
'/ovf:StorageController[1]'
'/ovf:AttachedDevice/ovf:Image', namespaces)
attr(device_img, 'uuid', '{' + str(volume_uuid) + '}')
template.write(destination, xml_declaration=True) # , default_namespace=namespaces['ovf']
class RemoveOVADir(Task):
description = 'Removing the OVA directory'
phase = phases.cleaning
successors = [workspace.DeleteWorkspace]
@classmethod
def run(cls, info):
shutil.rmtree(info._ova['folder'])
del info._ova['folder']
| [((183, 208), 'os.path.dirname', 'os.path.dirname', (['__file__'], {}), '(__file__)\n', (198, 208), False, 'import os\n'), ((483, 546), 'os.path.join', 'os.path.join', (["info.manifest.bootstrapper['workspace']", 'ova_name'], {}), "(info.manifest.bootstrapper['workspace'], ova_name)\n", (495, 546), False, 'import os\n'), ((552, 576), 'os.path.exists', 'os.path.exists', (['ova_path'], {}), '(ova_path)\n', (566, 576), False, 'import os\n'), ((1086, 1121), 'os.path.join', 'os.path.join', (['info.workspace', '"""ova"""'], {}), "(info.workspace, 'ova')\n", (1098, 1121), False, 'import os\n'), ((1124, 1153), 'os.mkdir', 'os.mkdir', (["info._ova['folder']"], {}), "(info._ova['folder'])\n", (1132, 1153), False, 'import os\n'), ((1528, 1572), 'os.path.join', 'os.path.join', (["info._ova['folder']", 'disk_name'], {}), "(info._ova['folder'], disk_name)\n", (1540, 1572), False, 'import os\n'), ((1575, 1638), 'bootstrapvz.common.tools.log_check_call', 'log_check_call', (["['ln', '-s', info.volume.image_path, disk_link]"], {}), "(['ln', '-s', info.volume.image_path, disk_link])\n", (1589, 1638), False, 'from bootstrapvz.common.tools import log_check_call\n'), ((1653, 1722), 'os.path.join', 'os.path.join', (["info._ova['folder']", "(info._ova['ova_basename'] + '.ovf')"], {}), "(info._ova['folder'], info._ova['ova_basename'] + '.ovf')\n", (1665, 1722), False, 'import os\n'), ((1794, 1825), 'os.listdir', 'os.listdir', (["info._ova['folder']"], {}), "(info._ova['folder'])\n", (1804, 1825), False, 'import os\n'), ((1828, 1888), 'bootstrapvz.common.tools.log_check_call', 'log_check_call', (["['ovftool', ovf_path, info._ova['ova_path']]"], {}), "(['ovftool', ovf_path, info._ova['ova_path']])\n", (1842, 1888), False, 'from bootstrapvz.common.tools import log_check_call\n'), ((2801, 2836), 'os.path.join', 'os.path.join', (['assets', '"""default.ovf"""'], {}), "(assets, 'default.ovf')\n", (2813, 2836), False, 'import os\n'), ((2989, 3012), 'xml.etree.ElementTree.parse', 'ET.parse', (['template_path'], {}), '(template_path)\n', (2997, 3012), True, 'import xml.etree.ElementTree as ET\n'), ((5851, 5885), 'shutil.rmtree', 'shutil.rmtree', (["info._ova['folder']"], {}), "(info._ova['folder'])\n", (5864, 5885), False, 'import shutil\n'), ((736, 750), 'bootstrapvz.common.exceptions.TaskError', 'TaskError', (['msg'], {}), '(msg)\n', (745, 750), False, 'from bootstrapvz.common.exceptions import TaskError\n'), ((4952, 4964), 'uuid.uuid4', 'uuid.uuid4', ([], {}), '()\n', (4962, 4964), False, 'import uuid\n'), ((1360, 1385), 'random.randrange', 'random.randrange', (['(16 ** 6)'], {}), '(16 ** 6)\n', (1376, 1385), False, 'import random\n'), ((1926, 1953), 'logging.getLogger', 'logging.getLogger', (['__name__'], {}), '(__name__)\n', (1943, 1953), False, 'import logging\n'), ((5153, 5167), 'datetime.datetime.now', 'datetime.now', ([], {}), '()\n', (5165, 5167), False, 'from datetime import datetime\n')] |
PhilippJunk/homelette | docs/conf.py | d6e585a215d7eef75ef6c837d1faf2d0ad8025c1 | # Configuration file for the Sphinx documentation builder.
#
# This file only contains a selection of the most common options. For a full
# list see the documentation:
# https://www.sphinx-doc.org/en/master/usage/configuration.html
# -- Path setup --------------------------------------------------------------
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import shutil
import sys
sys.path.insert(0, os.path.abspath('..'))
# -- Project information -----------------------------------------------------
project = 'homelette'
copyright = '2021, Philipp Junk, Christina Kiel'
author = 'Philipp Junk, Christina Kiel'
# -- General configuration ---------------------------------------------------
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
'sphinx.ext.napoleon',
'nbsphinx',
'sphinx_rtd_theme',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
# This pattern also affects html_static_path and html_extra_path.
exclude_patterns = ['_build', 'Thumbs.db', '.DS_Store']
# -- Options for HTML output -------------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
#
html_theme = 'sphinx_rtd_theme'
html_logo = 'logo.png'
html_theme_options = {
'logo_only': False,
'style_nav_header_background': '#000000',
}
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
# html_static_path = ['_static']
# -- Options for LaTex output ------------------------------------------------
latex_elements = {
'preamble': r'''
\setcounter{tocdepth}{1}
\renewcommand{\hyperref}[2][]{#2}
'''
}
# -- Extension configuration: autodoc ----------------------------------------
autodoc_default_options = {
'member-order': 'bysource',
}
autoclass_content = 'class'
autodoc_mock_imports = ['altmod', 'modeller', 'ost', 'promod3', 'qmean',
'pandas']
# -- Extension configuration: napoleon ---------------------------------------
napoleon_use_ivar = True
# -- Copy notebooks to include in the documentation --------------------------
notebooks = [
'../examples/Tutorial1_Basics.ipynb',
'../examples/Tutorial2_Modelling.ipynb',
'../examples/Tutorial3_Evaluation.ipynb',
'../examples/Tutorial4_ExtendingHomelette.ipynb',
'../examples/Tutorial5_Parallelization.ipynb',
'../examples/Tutorial6_ComplexModelling.ipynb',
'../examples/Tutorial7_AssemblingPipelines.ipynb',
'../examples/Tutorial8_AlignmentGeneration.ipynb',
]
for notebook in notebooks:
if os.path.exists(notebook):
shutil.copy(notebook, '.')
# -- Copy logo ---------------------------------------------------------------
if os.path.exists('../logo/logo.png'):
shutil.copy('../logo/logo.png', '.')
| [((3421, 3455), 'os.path.exists', 'os.path.exists', (['"""../logo/logo.png"""'], {}), "('../logo/logo.png')\n", (3435, 3455), False, 'import os\n'), ((607, 628), 'os.path.abspath', 'os.path.abspath', (['""".."""'], {}), "('..')\n", (622, 628), False, 'import os\n'), ((3276, 3300), 'os.path.exists', 'os.path.exists', (['notebook'], {}), '(notebook)\n', (3290, 3300), False, 'import os\n'), ((3461, 3497), 'shutil.copy', 'shutil.copy', (['"""../logo/logo.png"""', '"""."""'], {}), "('../logo/logo.png', '.')\n", (3472, 3497), False, 'import shutil\n'), ((3310, 3336), 'shutil.copy', 'shutil.copy', (['notebook', '"""."""'], {}), "(notebook, '.')\n", (3321, 3336), False, 'import shutil\n')] |
Cologler/bytecode2ast-python | bytecode2ast/parsers/bases.py | 407b261a493e018bc86388040ddfb6fb0e4b96d9 | # -*- coding: utf-8 -*-
#
# Copyright (c) 2019~2999 - Cologler <[email protected]>
# ----------
# some object for parser
# ----------
from typing import List
import enum
import dis
from collections import defaultdict
class ID:
def __init__(self, name):
self._name = name # a name use to debug
def __repr__(self):
return f'ID({self._name})'
def __str__(self):
return repr(self)
class Scope(enum.IntEnum):
NONE = enum.auto()
LOOP = enum.auto()
WITH = enum.auto()
EXCEPT = enum.auto()
FINALLY = enum.auto()
class CodeState:
def __init__(self, *, scope=Scope.NONE):
self._ast_stack = []
self._load_stack = []
self._scope = scope
self._state: dict = None if scope is Scope.NONE else {}
self._blocks = [[]] # ensure has last block
self._instrs = [] # all handled instrs in this state
def __repr__(self):
return f'b({self._blocks!r}), l({self._load_stack!r})'
@property
def scope(self):
return self._scope
# state
@property
def state(self):
return self._state
def add_state(self, id, value):
''' add a state, also ensure it does not exists. '''
assert id not in self._state
self._state[id] = value
# instrs
def add_instr(self, instr: dis.Instruction):
''' add a handled instruction in this state '''
self._instrs.append(instr)
def get_instrs(self, key=None) -> List[dis.Instruction]:
''' get all instructions by key from this state '''
if key is None:
return self._instrs.copy()
else:
return [i for i in self._instrs if i.opcode == key or i.opname == key]
def copy(self):
''' copy a `CodeState` '''
state = CodeState()
state._load_stack = self._load_stack.copy()
state._ast_stack = self._ast_stack.copy()
return state
def copy_with_load(self, load_count):
''' copy a `CodeState` with empty ast stack. '''
state = CodeState()
state._load_stack = self._load_stack[-load_count:]
return state
def push(self, node):
''' push a node into load stack. '''
self._load_stack.append(node)
def pop(self):
''' pop the top node from load stack. '''
return self._load_stack.pop()
def pop_seq(self, count: int) -> list:
''' pop a list of top nodes from load stack. '''
assert count >= 0
if count > 0:
items = self._load_stack[-count:]
self._load_stack = self._load_stack[0:-count]
return items
else:
return []
def dup_top(self):
''' repeat top once. '''
self._load_stack.append(self._load_stack[-1])
def store(self, node):
''' store a node '''
self.add_node(node)
def add_node(self, node):
''' add a final node into ast stmt tree '''
self._blocks[-1].append(node)
def get_value(self) -> list:
''' get stmts from single block. '''
# ensure all status was handled
assert not self._state, self._state
assert not self._load_stack, self._load_stack
# get value
assert len(self._blocks) == 1, self._blocks
return self._blocks[-1]
def new_block(self):
''' make a new stmts block '''
self._blocks.append([])
def get_blocks(self) -> list:
''' get all stmts blocks. '''
# ensure all status was handled
assert not self._state, self._state
assert not self._load_stack, self._load_stack
# get value
return self._blocks
def get_block_count(self) -> int:
''' get count of stmts blocks. '''
return len(self._blocks)
class CodeReaderIter:
__slots__ = ('_reader', '_condition')
def __init__(self, reader, condition):
self._reader: CodeReader = reader
self._condition = condition
def __iter__(self):
while self._condition():
yield self._reader.pop()
def fill_state(self, state: CodeState):
''' iter self into the `CodeState` and return it. '''
for instr in self:
handler = get_instr_handler(instr)
handler(self._reader, state, instr)
state.add_instr(instr)
return state
def get_state(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return the `CodeState` '''
state = CodeState(scope=scope)
return self.fill_state(state)
def get_value(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return value from `CodeState`. '''
return self.get_state(scope=scope).get_value()
def get_blocks(self, *, scope=Scope.NONE):
''' iter self into a new `CodeState`, return blocks from `CodeState`. '''
return self.get_state(scope=scope).get_blocks()
class CodeReader:
def __init__(self, instructions):
# reversed will fast
self._instructions = list(reversed(instructions))
self._lineno = None
def __bool__(self):
return bool(self._instructions)
def __repr__(self):
return repr(list(reversed(self._instructions)))
@property
def co_consts(self):
return self._co_consts
def get_instrs_count(self) -> int:
return len(self._instructions)
def get_lineno(self) -> int:
return self._lineno
def peek(self) -> dis.Instruction:
''' peek one instr '''
if not self._instructions:
return None
return self._instructions[-1]
def pop(self) -> dis.Instruction:
''' pop one instr '''
instr = self._instructions.pop()
if instr.starts_line is not None:
self._lineno = instr.starts_line
return instr
def pop_assert(self, opcode: int) -> dis.Instruction:
instr = self.pop()
assert instr.opcode == opcode
return instr
def pop_if(self, opcode: int) -> dis.Instruction:
if self._instructions and self._instructions[-1].opcode == opcode:
return self.pop()
# read methods
def read_until_end(self):
''' read until reader end. '''
return CodeReaderIter(self, lambda: self)
def read_until_offset(self, offset: int):
''' read until come to the offset '''
return CodeReaderIter(self, lambda: self.peek().offset != offset)
def read_until_opcodes(self, *opcodes):
''' read until visit some opcodes '''
return CodeReaderIter(self, lambda: self.peek().opcode not in opcodes)
def read_until_count(self, count: int):
''' read until handled count of instrs '''
end_count = self.get_instrs_count() - count
return CodeReaderIter(self, lambda: self.get_instrs_count() > end_count)
def read_until_scoped_count(self, count: int):
''' read until handled count of instrs in current scope. '''
if count <= 0:
raise ValueError(count)
def cond():
nonlocal count
count -= 1
return count >= 0
return CodeReaderIter(self, cond)
_OPCODE_MAP = {}
def op(opname, opcode, **kwargs):
def wrapper(func):
def func_wrapper(reader, state, instr: dis.Instruction):
func(reader, state, instr, **kwargs)
assert opcode not in _OPCODE_MAP
_OPCODE_MAP[(opname, opcode)] = func_wrapper
return func
return wrapper
def get_instr_handler(instr):
'''
the return function `(reader, state, instr) -> None`
'''
k = (instr.opname, instr.opcode)
try:
return _OPCODE_MAP[k]
except KeyError:
raise NotImplementedError(k, instr)
| [((458, 469), 'enum.auto', 'enum.auto', ([], {}), '()\n', (467, 469), False, 'import enum\n'), ((481, 492), 'enum.auto', 'enum.auto', ([], {}), '()\n', (490, 492), False, 'import enum\n'), ((504, 515), 'enum.auto', 'enum.auto', ([], {}), '()\n', (513, 515), False, 'import enum\n'), ((529, 540), 'enum.auto', 'enum.auto', ([], {}), '()\n', (538, 540), False, 'import enum\n'), ((555, 566), 'enum.auto', 'enum.auto', ([], {}), '()\n', (564, 566), False, 'import enum\n')] |
orphanedgamboa/netbox | netbox/extras/forms.py | 5cdc38ec3adb5278480b267a6c8e674e9d3fca39 | from django import forms
from django.contrib.auth.models import User
from django.contrib.contenttypes.models import ContentType
from django.utils.safestring import mark_safe
from django.utils.translation import gettext as _
from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup
from tenancy.models import Tenant, TenantGroup
from utilities.forms import (
add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect,
CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField,
JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES,
)
from virtualization.models import Cluster, ClusterGroup
from .choices import *
from .models import ConfigContext, CustomField, ImageAttachment, JournalEntry, ObjectChange, Tag
from .utils import FeatureQuery
#
# Custom fields
#
class CustomFieldForm(forms.Form):
"""
Extend Form to include custom field support.
"""
model = None
def __init__(self, *args, **kwargs):
if self.model is None:
raise NotImplementedError("CustomFieldForm must specify a model class.")
self.custom_fields = []
super().__init__(*args, **kwargs)
# Append relevant custom fields to the form instance
obj_type = ContentType.objects.get_for_model(self.model)
for cf in CustomField.objects.filter(content_types=obj_type):
field_name = 'cf_{}'.format(cf.name)
self.fields[field_name] = cf.to_form_field()
# Annotate the field in the list of CustomField form fields
self.custom_fields.append(field_name)
class CustomFieldModelForm(forms.ModelForm):
"""
Extend ModelForm to include custom field support.
"""
def __init__(self, *args, **kwargs):
self.obj_type = ContentType.objects.get_for_model(self._meta.model)
self.custom_fields = []
super().__init__(*args, **kwargs)
self._append_customfield_fields()
def _append_customfield_fields(self):
"""
Append form fields for all CustomFields assigned to this model.
"""
# Append form fields; assign initial values if modifying and existing object
for cf in CustomField.objects.filter(content_types=self.obj_type):
field_name = 'cf_{}'.format(cf.name)
if self.instance.pk:
self.fields[field_name] = cf.to_form_field(set_initial=False)
self.fields[field_name].initial = self.instance.custom_field_data.get(cf.name)
else:
self.fields[field_name] = cf.to_form_field()
# Annotate the field in the list of CustomField form fields
self.custom_fields.append(field_name)
def clean(self):
# Save custom field data on instance
for cf_name in self.custom_fields:
self.instance.custom_field_data[cf_name[3:]] = self.cleaned_data.get(cf_name)
return super().clean()
class CustomFieldModelCSVForm(CSVModelForm, CustomFieldModelForm):
def _append_customfield_fields(self):
# Append form fields
for cf in CustomField.objects.filter(content_types=self.obj_type):
field_name = 'cf_{}'.format(cf.name)
self.fields[field_name] = cf.to_form_field(for_csv_import=True)
# Annotate the field in the list of CustomField form fields
self.custom_fields.append(field_name)
class CustomFieldBulkEditForm(BulkEditForm):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
self.custom_fields = []
self.obj_type = ContentType.objects.get_for_model(self.model)
# Add all applicable CustomFields to the form
custom_fields = CustomField.objects.filter(content_types=self.obj_type)
for cf in custom_fields:
# Annotate non-required custom fields as nullable
if not cf.required:
self.nullable_fields.append(cf.name)
self.fields[cf.name] = cf.to_form_field(set_initial=False, enforce_required=False)
# Annotate this as a custom field
self.custom_fields.append(cf.name)
class CustomFieldFilterForm(forms.Form):
def __init__(self, *args, **kwargs):
self.obj_type = ContentType.objects.get_for_model(self.model)
super().__init__(*args, **kwargs)
# Add all applicable CustomFields to the form
custom_fields = CustomField.objects.filter(content_types=self.obj_type).exclude(
filter_logic=CustomFieldFilterLogicChoices.FILTER_DISABLED
)
for cf in custom_fields:
field_name = 'cf_{}'.format(cf.name)
self.fields[field_name] = cf.to_form_field(set_initial=True, enforce_required=False)
#
# Tags
#
class TagForm(BootstrapMixin, forms.ModelForm):
slug = SlugField()
class Meta:
model = Tag
fields = [
'name', 'slug', 'color', 'description'
]
fieldsets = (
('Tag', ('name', 'slug', 'color', 'description')),
)
class TagCSVForm(CSVModelForm):
slug = SlugField()
class Meta:
model = Tag
fields = Tag.csv_headers
help_texts = {
'color': mark_safe('RGB color in hexadecimal (e.g. <code>00ff00</code>)'),
}
class AddRemoveTagsForm(forms.Form):
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Add add/remove tags fields
self.fields['add_tags'] = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False
)
self.fields['remove_tags'] = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False
)
class TagFilterForm(BootstrapMixin, forms.Form):
model = Tag
q = forms.CharField(
required=False,
label=_('Search')
)
content_type_id = ContentTypeMultipleChoiceField(
queryset=ContentType.objects.filter(FeatureQuery('tags').get_query()),
required=False,
label=_('Tagged object type')
)
class TagBulkEditForm(BootstrapMixin, BulkEditForm):
pk = forms.ModelMultipleChoiceField(
queryset=Tag.objects.all(),
widget=forms.MultipleHiddenInput
)
color = forms.CharField(
max_length=6,
required=False,
widget=ColorSelect()
)
description = forms.CharField(
max_length=200,
required=False
)
class Meta:
nullable_fields = ['description']
#
# Config contexts
#
class ConfigContextForm(BootstrapMixin, forms.ModelForm):
regions = DynamicModelMultipleChoiceField(
queryset=Region.objects.all(),
required=False
)
site_groups = DynamicModelMultipleChoiceField(
queryset=SiteGroup.objects.all(),
required=False
)
sites = DynamicModelMultipleChoiceField(
queryset=Site.objects.all(),
required=False
)
device_types = DynamicModelMultipleChoiceField(
queryset=DeviceType.objects.all(),
required=False
)
roles = DynamicModelMultipleChoiceField(
queryset=DeviceRole.objects.all(),
required=False
)
platforms = DynamicModelMultipleChoiceField(
queryset=Platform.objects.all(),
required=False
)
cluster_groups = DynamicModelMultipleChoiceField(
queryset=ClusterGroup.objects.all(),
required=False
)
clusters = DynamicModelMultipleChoiceField(
queryset=Cluster.objects.all(),
required=False
)
tenant_groups = DynamicModelMultipleChoiceField(
queryset=TenantGroup.objects.all(),
required=False
)
tenants = DynamicModelMultipleChoiceField(
queryset=Tenant.objects.all(),
required=False
)
tags = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
required=False
)
data = JSONField(
label=''
)
class Meta:
model = ConfigContext
fields = (
'name', 'weight', 'description', 'is_active', 'regions', 'site_groups', 'sites', 'roles', 'device_types',
'platforms', 'cluster_groups', 'clusters', 'tenant_groups', 'tenants', 'tags', 'data',
)
class ConfigContextBulkEditForm(BootstrapMixin, BulkEditForm):
pk = forms.ModelMultipleChoiceField(
queryset=ConfigContext.objects.all(),
widget=forms.MultipleHiddenInput
)
weight = forms.IntegerField(
required=False,
min_value=0
)
is_active = forms.NullBooleanField(
required=False,
widget=BulkEditNullBooleanSelect()
)
description = forms.CharField(
required=False,
max_length=100
)
class Meta:
nullable_fields = [
'description',
]
class ConfigContextFilterForm(BootstrapMixin, forms.Form):
field_order = [
'q', 'region_id', 'site_group_id', 'site_id', 'role_id', 'platform_id', 'cluster_group_id', 'cluster_id',
'tenant_group_id', 'tenant_id',
]
q = forms.CharField(
required=False,
label=_('Search')
)
region_id = DynamicModelMultipleChoiceField(
queryset=Region.objects.all(),
required=False,
label=_('Regions')
)
site_group_id = DynamicModelMultipleChoiceField(
queryset=SiteGroup.objects.all(),
required=False,
label=_('Site groups')
)
site_id = DynamicModelMultipleChoiceField(
queryset=Site.objects.all(),
required=False,
label=_('Sites')
)
device_type_id = DynamicModelMultipleChoiceField(
queryset=DeviceType.objects.all(),
required=False,
label=_('Device types')
)
role_id = DynamicModelMultipleChoiceField(
queryset=DeviceRole.objects.all(),
required=False,
label=_('Roles')
)
platform_id = DynamicModelMultipleChoiceField(
queryset=Platform.objects.all(),
required=False,
label=_('Platforms')
)
cluster_group_id = DynamicModelMultipleChoiceField(
queryset=ClusterGroup.objects.all(),
required=False,
label=_('Cluster groups')
)
cluster_id = DynamicModelMultipleChoiceField(
queryset=Cluster.objects.all(),
required=False,
label=_('Clusters')
)
tenant_group_id = DynamicModelMultipleChoiceField(
queryset=TenantGroup.objects.all(),
required=False,
label=_('Tenant groups')
)
tenant_id = DynamicModelMultipleChoiceField(
queryset=Tenant.objects.all(),
required=False,
label=_('Tenant')
)
tag = DynamicModelMultipleChoiceField(
queryset=Tag.objects.all(),
to_field_name='slug',
required=False,
label=_('Tags')
)
#
# Filter form for local config context data
#
class LocalConfigContextFilterForm(forms.Form):
local_context_data = forms.NullBooleanField(
required=False,
label=_('Has local config context data'),
widget=StaticSelect2(
choices=BOOLEAN_WITH_BLANK_CHOICES
)
)
#
# Image attachments
#
class ImageAttachmentForm(BootstrapMixin, forms.ModelForm):
class Meta:
model = ImageAttachment
fields = [
'name', 'image',
]
#
# Journal entries
#
class JournalEntryForm(BootstrapMixin, forms.ModelForm):
comments = CommentField()
class Meta:
model = JournalEntry
fields = ['assigned_object_type', 'assigned_object_id', 'kind', 'comments']
widgets = {
'assigned_object_type': forms.HiddenInput,
'assigned_object_id': forms.HiddenInput,
}
class JournalEntryBulkEditForm(BootstrapMixin, BulkEditForm):
pk = forms.ModelMultipleChoiceField(
queryset=JournalEntry.objects.all(),
widget=forms.MultipleHiddenInput
)
kind = forms.ChoiceField(
choices=JournalEntryKindChoices,
required=False
)
comments = forms.CharField(
required=False,
widget=forms.Textarea()
)
class Meta:
nullable_fields = []
class JournalEntryFilterForm(BootstrapMixin, forms.Form):
model = JournalEntry
q = forms.CharField(
required=False,
label=_('Search')
)
created_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
created_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
created_by_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
assigned_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
kind = forms.ChoiceField(
choices=add_blank_choice(JournalEntryKindChoices),
required=False,
widget=StaticSelect2()
)
#
# Change logging
#
class ObjectChangeFilterForm(BootstrapMixin, forms.Form):
model = ObjectChange
q = forms.CharField(
required=False,
label=_('Search')
)
time_after = forms.DateTimeField(
required=False,
label=_('After'),
widget=DateTimePicker()
)
time_before = forms.DateTimeField(
required=False,
label=_('Before'),
widget=DateTimePicker()
)
action = forms.ChoiceField(
choices=add_blank_choice(ObjectChangeActionChoices),
required=False,
widget=StaticSelect2()
)
user_id = DynamicModelMultipleChoiceField(
queryset=User.objects.all(),
required=False,
label=_('User'),
widget=APISelectMultiple(
api_url='/api/users/users/',
)
)
changed_object_type_id = DynamicModelMultipleChoiceField(
queryset=ContentType.objects.all(),
required=False,
label=_('Object Type'),
widget=APISelectMultiple(
api_url='/api/extras/content-types/',
)
)
#
# Scripts
#
class ScriptForm(BootstrapMixin, forms.Form):
_commit = forms.BooleanField(
required=False,
initial=True,
label="Commit changes",
help_text="Commit changes to the database (uncheck for a dry-run)"
)
def __init__(self, *args, **kwargs):
super().__init__(*args, **kwargs)
# Move _commit to the end of the form
commit = self.fields.pop('_commit')
self.fields['_commit'] = commit
@property
def requires_input(self):
"""
A boolean indicating whether the form requires user input (ignore the _commit field).
"""
return bool(len(self.fields) > 1)
| [((4906, 4917), 'utilities.forms.SlugField', 'SlugField', ([], {}), '()\n', (4915, 4917), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((5175, 5186), 'utilities.forms.SlugField', 'SlugField', ([], {}), '()\n', (5184, 5186), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((6485, 6532), 'django.forms.CharField', 'forms.CharField', ([], {'max_length': '(200)', 'required': '(False)'}), '(max_length=200, required=False)\n', (6500, 6532), False, 'from django import forms\n'), ((8011, 8030), 'utilities.forms.JSONField', 'JSONField', ([], {'label': '""""""'}), "(label='')\n", (8020, 8030), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((8550, 8597), 'django.forms.IntegerField', 'forms.IntegerField', ([], {'required': '(False)', 'min_value': '(0)'}), '(required=False, min_value=0)\n', (8568, 8597), False, 'from django import forms\n'), ((8751, 8798), 'django.forms.CharField', 'forms.CharField', ([], {'required': '(False)', 'max_length': '(100)'}), '(required=False, max_length=100)\n', (8766, 8798), False, 'from django import forms\n'), ((11508, 11522), 'utilities.forms.CommentField', 'CommentField', ([], {}), '()\n', (11520, 11522), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((11999, 12065), 'django.forms.ChoiceField', 'forms.ChoiceField', ([], {'choices': 'JournalEntryKindChoices', 'required': '(False)'}), '(choices=JournalEntryKindChoices, required=False)\n', (12016, 12065), False, 'from django import forms\n'), ((14456, 14600), 'django.forms.BooleanField', 'forms.BooleanField', ([], {'required': '(False)', 'initial': '(True)', 'label': '"""Commit changes"""', 'help_text': '"""Commit changes to the database (uncheck for a dry-run)"""'}), "(required=False, initial=True, label='Commit changes',\n help_text='Commit changes to the database (uncheck for a dry-run)')\n", (14474, 14600), False, 'from django import forms\n'), ((1341, 1386), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['self.model'], {}), '(self.model)\n', (1374, 1386), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((1869, 1920), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['self._meta.model'], {}), '(self._meta.model)\n', (1902, 1920), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((3681, 3726), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['self.model'], {}), '(self.model)\n', (3714, 3726), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((4340, 4385), 'django.contrib.contenttypes.models.ContentType.objects.get_for_model', 'ContentType.objects.get_for_model', (['self.model'], {}), '(self.model)\n', (4373, 4385), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((5301, 5365), 'django.utils.safestring.mark_safe', 'mark_safe', (['"""RGB color in hexadecimal (e.g. <code>00ff00</code>)"""'], {}), "('RGB color in hexadecimal (e.g. <code>00ff00</code>)')\n", (5310, 5365), False, 'from django.utils.safestring import mark_safe\n'), ((5959, 5970), 'django.utils.translation.gettext', '_', (['"""Search"""'], {}), "('Search')\n", (5960, 5970), True, 'from django.utils.translation import gettext as _\n'), ((6148, 6171), 'django.utils.translation.gettext', '_', (['"""Tagged object type"""'], {}), "('Tagged object type')\n", (6149, 6171), True, 'from django.utils.translation import gettext as _\n'), ((6447, 6460), 'utilities.forms.ColorSelect', 'ColorSelect', ([], {}), '()\n', (6458, 6460), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((6761, 6781), 'dcim.models.Region.objects.all', 'Region.objects.all', ([], {}), '()\n', (6779, 6781), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((6880, 6903), 'dcim.models.SiteGroup.objects.all', 'SiteGroup.objects.all', ([], {}), '()\n', (6901, 6903), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((6996, 7014), 'dcim.models.Site.objects.all', 'Site.objects.all', ([], {}), '()\n', (7012, 7014), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((7114, 7138), 'dcim.models.DeviceType.objects.all', 'DeviceType.objects.all', ([], {}), '()\n', (7136, 7138), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((7231, 7255), 'dcim.models.DeviceRole.objects.all', 'DeviceRole.objects.all', ([], {}), '()\n', (7253, 7255), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((7352, 7374), 'dcim.models.Platform.objects.all', 'Platform.objects.all', ([], {}), '()\n', (7372, 7374), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((7476, 7502), 'virtualization.models.ClusterGroup.objects.all', 'ClusterGroup.objects.all', ([], {}), '()\n', (7500, 7502), False, 'from virtualization.models import Cluster, ClusterGroup\n'), ((7598, 7619), 'virtualization.models.Cluster.objects.all', 'Cluster.objects.all', ([], {}), '()\n', (7617, 7619), False, 'from virtualization.models import Cluster, ClusterGroup\n'), ((7720, 7745), 'tenancy.models.TenantGroup.objects.all', 'TenantGroup.objects.all', ([], {}), '()\n', (7743, 7745), False, 'from tenancy.models import Tenant, TenantGroup\n'), ((7840, 7860), 'tenancy.models.Tenant.objects.all', 'Tenant.objects.all', ([], {}), '()\n', (7858, 7860), False, 'from tenancy.models import Tenant, TenantGroup\n'), ((8699, 8726), 'utilities.forms.BulkEditNullBooleanSelect', 'BulkEditNullBooleanSelect', ([], {}), '()\n', (8724, 8726), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((9207, 9218), 'django.utils.translation.gettext', '_', (['"""Search"""'], {}), "('Search')\n", (9208, 9218), True, 'from django.utils.translation import gettext as _\n'), ((9291, 9311), 'dcim.models.Region.objects.all', 'Region.objects.all', ([], {}), '()\n', (9309, 9311), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((9351, 9363), 'django.utils.translation.gettext', '_', (['"""Regions"""'], {}), "('Regions')\n", (9352, 9363), True, 'from django.utils.translation import gettext as _\n'), ((9440, 9463), 'dcim.models.SiteGroup.objects.all', 'SiteGroup.objects.all', ([], {}), '()\n', (9461, 9463), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((9503, 9519), 'django.utils.translation.gettext', '_', (['"""Site groups"""'], {}), "('Site groups')\n", (9504, 9519), True, 'from django.utils.translation import gettext as _\n'), ((9590, 9608), 'dcim.models.Site.objects.all', 'Site.objects.all', ([], {}), '()\n', (9606, 9608), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((9648, 9658), 'django.utils.translation.gettext', '_', (['"""Sites"""'], {}), "('Sites')\n", (9649, 9658), True, 'from django.utils.translation import gettext as _\n'), ((9736, 9760), 'dcim.models.DeviceType.objects.all', 'DeviceType.objects.all', ([], {}), '()\n', (9758, 9760), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((9800, 9817), 'django.utils.translation.gettext', '_', (['"""Device types"""'], {}), "('Device types')\n", (9801, 9817), True, 'from django.utils.translation import gettext as _\n'), ((9888, 9912), 'dcim.models.DeviceRole.objects.all', 'DeviceRole.objects.all', ([], {}), '()\n', (9910, 9912), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((9952, 9962), 'django.utils.translation.gettext', '_', (['"""Roles"""'], {}), "('Roles')\n", (9953, 9962), True, 'from django.utils.translation import gettext as _\n'), ((10037, 10059), 'dcim.models.Platform.objects.all', 'Platform.objects.all', ([], {}), '()\n', (10057, 10059), False, 'from dcim.models import DeviceRole, DeviceType, Platform, Region, Site, SiteGroup\n'), ((10099, 10113), 'django.utils.translation.gettext', '_', (['"""Platforms"""'], {}), "('Platforms')\n", (10100, 10113), True, 'from django.utils.translation import gettext as _\n'), ((10193, 10219), 'virtualization.models.ClusterGroup.objects.all', 'ClusterGroup.objects.all', ([], {}), '()\n', (10217, 10219), False, 'from virtualization.models import Cluster, ClusterGroup\n'), ((10259, 10278), 'django.utils.translation.gettext', '_', (['"""Cluster groups"""'], {}), "('Cluster groups')\n", (10260, 10278), True, 'from django.utils.translation import gettext as _\n'), ((10352, 10373), 'virtualization.models.Cluster.objects.all', 'Cluster.objects.all', ([], {}), '()\n', (10371, 10373), False, 'from virtualization.models import Cluster, ClusterGroup\n'), ((10413, 10426), 'django.utils.translation.gettext', '_', (['"""Clusters"""'], {}), "('Clusters')\n", (10414, 10426), True, 'from django.utils.translation import gettext as _\n'), ((10505, 10530), 'tenancy.models.TenantGroup.objects.all', 'TenantGroup.objects.all', ([], {}), '()\n', (10528, 10530), False, 'from tenancy.models import Tenant, TenantGroup\n'), ((10570, 10588), 'django.utils.translation.gettext', '_', (['"""Tenant groups"""'], {}), "('Tenant groups')\n", (10571, 10588), True, 'from django.utils.translation import gettext as _\n'), ((10661, 10681), 'tenancy.models.Tenant.objects.all', 'Tenant.objects.all', ([], {}), '()\n', (10679, 10681), False, 'from tenancy.models import Tenant, TenantGroup\n'), ((10721, 10732), 'django.utils.translation.gettext', '_', (['"""Tenant"""'], {}), "('Tenant')\n", (10722, 10732), True, 'from django.utils.translation import gettext as _\n'), ((10886, 10895), 'django.utils.translation.gettext', '_', (['"""Tags"""'], {}), "('Tags')\n", (10887, 10895), True, 'from django.utils.translation import gettext as _\n'), ((11088, 11122), 'django.utils.translation.gettext', '_', (['"""Has local config context data"""'], {}), "('Has local config context data')\n", (11089, 11122), True, 'from django.utils.translation import gettext as _\n'), ((11139, 11188), 'utilities.forms.StaticSelect2', 'StaticSelect2', ([], {'choices': 'BOOLEAN_WITH_BLANK_CHOICES'}), '(choices=BOOLEAN_WITH_BLANK_CHOICES)\n', (11152, 11188), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((12159, 12175), 'django.forms.Textarea', 'forms.Textarea', ([], {}), '()\n', (12173, 12175), False, 'from django import forms\n'), ((12376, 12387), 'django.utils.translation.gettext', '_', (['"""Search"""'], {}), "('Search')\n", (12377, 12387), True, 'from django.utils.translation import gettext as _\n'), ((12473, 12483), 'django.utils.translation.gettext', '_', (['"""After"""'], {}), "('After')\n", (12474, 12483), True, 'from django.utils.translation import gettext as _\n'), ((12500, 12516), 'utilities.forms.DateTimePicker', 'DateTimePicker', ([], {}), '()\n', (12514, 12516), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((12603, 12614), 'django.utils.translation.gettext', '_', (['"""Before"""'], {}), "('Before')\n", (12604, 12614), True, 'from django.utils.translation import gettext as _\n'), ((12631, 12647), 'utilities.forms.DateTimePicker', 'DateTimePicker', ([], {}), '()\n', (12645, 12647), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((12724, 12742), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (12740, 12742), False, 'from django.contrib.auth.models import User\n'), ((12782, 12791), 'django.utils.translation.gettext', '_', (['"""User"""'], {}), "('User')\n", (12783, 12791), True, 'from django.utils.translation import gettext as _\n'), ((12808, 12854), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', ([], {'api_url': '"""/api/users/users/"""'}), "(api_url='/api/users/users/')\n", (12825, 12854), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((12964, 12989), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ([], {}), '()\n', (12987, 12989), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((13029, 13045), 'django.utils.translation.gettext', '_', (['"""Object Type"""'], {}), "('Object Type')\n", (13030, 13045), True, 'from django.utils.translation import gettext as _\n'), ((13062, 13117), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', ([], {'api_url': '"""/api/extras/content-types/"""'}), "(api_url='/api/extras/content-types/')\n", (13079, 13117), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13193, 13234), 'utilities.forms.add_blank_choice', 'add_blank_choice', (['JournalEntryKindChoices'], {}), '(JournalEntryKindChoices)\n', (13209, 13234), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13275, 13290), 'utilities.forms.StaticSelect2', 'StaticSelect2', ([], {}), '()\n', (13288, 13290), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13467, 13478), 'django.utils.translation.gettext', '_', (['"""Search"""'], {}), "('Search')\n", (13468, 13478), True, 'from django.utils.translation import gettext as _\n'), ((13561, 13571), 'django.utils.translation.gettext', '_', (['"""After"""'], {}), "('After')\n", (13562, 13571), True, 'from django.utils.translation import gettext as _\n'), ((13588, 13604), 'utilities.forms.DateTimePicker', 'DateTimePicker', ([], {}), '()\n', (13602, 13604), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13688, 13699), 'django.utils.translation.gettext', '_', (['"""Before"""'], {}), "('Before')\n", (13689, 13699), True, 'from django.utils.translation import gettext as _\n'), ((13716, 13732), 'utilities.forms.DateTimePicker', 'DateTimePicker', ([], {}), '()\n', (13730, 13732), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13787, 13830), 'utilities.forms.add_blank_choice', 'add_blank_choice', (['ObjectChangeActionChoices'], {}), '(ObjectChangeActionChoices)\n', (13803, 13830), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13871, 13886), 'utilities.forms.StaticSelect2', 'StaticSelect2', ([], {}), '()\n', (13884, 13886), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((13957, 13975), 'django.contrib.auth.models.User.objects.all', 'User.objects.all', ([], {}), '()\n', (13973, 13975), False, 'from django.contrib.auth.models import User\n'), ((14015, 14024), 'django.utils.translation.gettext', '_', (['"""User"""'], {}), "('User')\n", (14016, 14024), True, 'from django.utils.translation import gettext as _\n'), ((14041, 14087), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', ([], {'api_url': '"""/api/users/users/"""'}), "(api_url='/api/users/users/')\n", (14058, 14087), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n'), ((14196, 14221), 'django.contrib.contenttypes.models.ContentType.objects.all', 'ContentType.objects.all', ([], {}), '()\n', (14219, 14221), False, 'from django.contrib.contenttypes.models import ContentType\n'), ((14261, 14277), 'django.utils.translation.gettext', '_', (['"""Object Type"""'], {}), "('Object Type')\n", (14262, 14277), True, 'from django.utils.translation import gettext as _\n'), ((14294, 14349), 'utilities.forms.APISelectMultiple', 'APISelectMultiple', ([], {'api_url': '"""/api/extras/content-types/"""'}), "(api_url='/api/extras/content-types/')\n", (14311, 14349), False, 'from utilities.forms import add_blank_choice, APISelectMultiple, BootstrapMixin, BulkEditForm, BulkEditNullBooleanSelect, ColorSelect, CommentField, ContentTypeMultipleChoiceField, CSVModelForm, DateTimePicker, DynamicModelMultipleChoiceField, JSONField, SlugField, StaticSelect2, BOOLEAN_WITH_BLANK_CHOICES\n')] |
zgjslc/Film-Recovery-master1 | unwarp_models.py | 4497a9930398c9e826ac364056a79e5bcbf6c953 | import torch
import torch.nn as nn
import torch.nn.functional as F
from models.misc import modules
constrain_path = {
('threeD', 'normal'): (True, True, ''),
('threeD', 'depth'): (True, True, ''),
('normal', 'depth'): (True, True, ''),
('depth', 'normal'): (True, True, ''),
}
class UnwarpNet(nn.Module):
def __init__(self, use_simple=False, combine_num=3, use_constrain=True, constrain_configure=None):
super(UnwarpNet, self).__init__()
self.combine_num = combine_num
self.use_simple = use_simple
self.use_constrain = use_constrain
self.constrain_configure = constrain_configure
self.geo_encoder = modules.Encoder(downsample=6, in_channels=3)
self.threeD_decoder = modules.Decoder(downsample=6, out_channels=3, combine_num=self.combine_num)
self.normal_decoder = modules.Decoder(downsample=6, out_channels=3, combine_num=self.combine_num)
self.depth_decoder = modules.Decoder(downsample=6, out_channels=1, combine_num=self.combine_num)
self.mask_decoder = modules.Decoder(downsample=6, out_channels=1, combine_num=0)
bottle_neck = sum([2 ** (i + 4) for i in range(self.combine_num)])
self.second_encoder = modules.Encoder(downsample=6, in_channels=bottle_neck * 3 + 3)
self.uv_decoder = modules.Decoder(downsample=6, out_channels=2, combine_num=0)
# self.albedo_decoder = modules.AlbedoDecoder(downsample=6, out_channels=1)
self.albedo_decoder = modules.Decoder(downsample=6, out_channels=1, combine_num=0)
self.deform_decoder = modules.Decoder(downsample=6, out_channels=2, combine_num=0)
self.dep2nor = None
self.threeD_to_nor2dep = None
self.nor2dep = None
def forward(self, x):
gxvals, gx_encode = self.geo_encoder(x)
threeD_map, threeD_feature = self.threeD_decoder(gxvals, gx_encode)
threeD_map = nn.functional.tanh(threeD_map)
dep_map, dep_feature = self.depth_decoder(gxvals, gx_encode)
dep_map = nn.functional.tanh(dep_map)
nor_map, nor_feature = self.normal_decoder(gxvals, gx_encode)
nor_map = nn.functional.tanh(nor_map)
mask_map, mask_feature = self.mask_decoder(gxvals, gx_encode)
mask_map = torch.nn.functional.sigmoid(mask_map)
# geo_feature = torch.cat([threeD_feature, nor_feature, dep_feature], dim=1)
geo_feature = torch.cat([threeD_feature, nor_feature, dep_feature, x], dim=1)
b, c, h, w = geo_feature.size()
geo_feature_mask = geo_feature.mul(mask_map.expand(b, c, h, w))
secvals, sec_encode = self.second_encoder(geo_feature_mask)
uv_map, _ = self.uv_decoder(secvals, sec_encode)
uv_map = nn.functional.tanh(uv_map)
alb_map, _ = self.albedo_decoder(secvals, sec_encode)
alb_map = nn.functional.tanh(alb_map)
deform_map, _ = self.deform_decoder(secvals, sec_encode)
deform_map = nn.functional.tanh(deform_map)
return uv_map, threeD_map, nor_map, alb_map, dep_map, mask_map, \
None, None, None, None, None, deform_map
| [((680, 724), 'models.misc.modules.Encoder', 'modules.Encoder', ([], {'downsample': '(6)', 'in_channels': '(3)'}), '(downsample=6, in_channels=3)\n', (695, 724), False, 'from models.misc import modules\n'), ((755, 830), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(3)', 'combine_num': 'self.combine_num'}), '(downsample=6, out_channels=3, combine_num=self.combine_num)\n', (770, 830), False, 'from models.misc import modules\n'), ((861, 936), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(3)', 'combine_num': 'self.combine_num'}), '(downsample=6, out_channels=3, combine_num=self.combine_num)\n', (876, 936), False, 'from models.misc import modules\n'), ((966, 1041), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(1)', 'combine_num': 'self.combine_num'}), '(downsample=6, out_channels=1, combine_num=self.combine_num)\n', (981, 1041), False, 'from models.misc import modules\n'), ((1070, 1130), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(1)', 'combine_num': '(0)'}), '(downsample=6, out_channels=1, combine_num=0)\n', (1085, 1130), False, 'from models.misc import modules\n'), ((1236, 1298), 'models.misc.modules.Encoder', 'modules.Encoder', ([], {'downsample': '(6)', 'in_channels': '(bottle_neck * 3 + 3)'}), '(downsample=6, in_channels=bottle_neck * 3 + 3)\n', (1251, 1298), False, 'from models.misc import modules\n'), ((1325, 1385), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(2)', 'combine_num': '(0)'}), '(downsample=6, out_channels=2, combine_num=0)\n', (1340, 1385), False, 'from models.misc import modules\n'), ((1500, 1560), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(1)', 'combine_num': '(0)'}), '(downsample=6, out_channels=1, combine_num=0)\n', (1515, 1560), False, 'from models.misc import modules\n'), ((1591, 1651), 'models.misc.modules.Decoder', 'modules.Decoder', ([], {'downsample': '(6)', 'out_channels': '(2)', 'combine_num': '(0)'}), '(downsample=6, out_channels=2, combine_num=0)\n', (1606, 1651), False, 'from models.misc import modules\n'), ((1926, 1956), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['threeD_map'], {}), '(threeD_map)\n', (1944, 1956), True, 'import torch.nn as nn\n'), ((2044, 2071), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['dep_map'], {}), '(dep_map)\n', (2062, 2071), True, 'import torch.nn as nn\n'), ((2160, 2187), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['nor_map'], {}), '(nor_map)\n', (2178, 2187), True, 'import torch.nn as nn\n'), ((2277, 2314), 'torch.nn.functional.sigmoid', 'torch.nn.functional.sigmoid', (['mask_map'], {}), '(mask_map)\n', (2304, 2314), False, 'import torch\n'), ((2422, 2485), 'torch.cat', 'torch.cat', (['[threeD_feature, nor_feature, dep_feature, x]'], {'dim': '(1)'}), '([threeD_feature, nor_feature, dep_feature, x], dim=1)\n', (2431, 2485), False, 'import torch\n'), ((2740, 2766), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['uv_map'], {}), '(uv_map)\n', (2758, 2766), True, 'import torch.nn as nn\n'), ((2847, 2874), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['alb_map'], {}), '(alb_map)\n', (2865, 2874), True, 'import torch.nn as nn\n'), ((2961, 2991), 'torch.nn.functional.tanh', 'nn.functional.tanh', (['deform_map'], {}), '(deform_map)\n', (2979, 2991), True, 'import torch.nn as nn\n')] |
pansila/Auto-Test-System | endpoint/test_endpoint/update.py | bfe51a277466939a32daa08f27a89cf3c1900def | import configparser
import os
import hashlib
import json
import shutil
import sys
import tempfile
import subprocess
import tarfile
import re
import stat
from functools import cmp_to_key
from contextlib import closing
from gzip import GzipFile
from pathlib import Path
from urllib.error import HTTPError
from urllib.request import Request
from urllib.request import urlopen
WINDOWS = sys.platform == "win32"
BOOTSTRAP = """\
import os, sys
import re
import subprocess
def _which_python():
allowed_executables = ["python3", "python"]
if sys.platform == 'win32':
# in favor of 32 bit python to be compatible with the 32bit dlls of test libraries
allowed_executables[:0] = ["py.exe -3-32", "py.exe -2-32", "py.exe -3-64", "py.exe -2-64"]
# \d in regex ensures we can convert to int later
version_matcher = re.compile(r"^Python (?P<major>\d+)\.(?P<minor>\d+)\..+$")
fallback = None
for executable in allowed_executables:
try:
raw_version = subprocess.check_output(
executable + " --version", stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
except subprocess.CalledProcessError:
continue
match = version_matcher.match(raw_version.strip())
if match and tuple(map(int, match.groups())) >= (3, 0):
# favor the first py3 executable we can find.
return executable
if fallback is None:
# keep this one as the fallback; it was the first valid executable we found.
fallback = executable
if fallback is None:
# Avoid breaking existing scripts
fallback = "python"
return fallback
if __name__ == '__main__':
py_executable = _which_python()
subprocess.run(py_executable + r' {collie_bin} ' + ' '.join(sys.argv[1:]), shell=True)
"""
BIN = """#!/usr/bin/env python
# -*- coding: utf-8 -*-
import sys
import os
import argparse
lib = os.path.normpath(os.path.join(os.path.realpath(__file__), "..", "..", "lib", "collie"))
sys.path.insert(0, lib)
from test_endpoint.app import main
if __name__ == "__main__":
sys.exit(main())
"""
BAT = '@echo off\r\n{python_executable} "{collie_bootstrap}" %*\r\n'
SH = '#!/bin/sh\npython3 "{collie_bootstrap}" $*\n'
def expanduser(path):
"""
Expand ~ and ~user constructions.
Includes a workaround for http://bugs.python.org/issue14768
"""
expanded = os.path.expanduser(path)
if path.startswith("~/") and expanded.startswith("//"):
expanded = expanded[1:]
return expanded
class SelfUpdate:
VERSION_REGEX = re.compile(
r"v?(\d+)(?:\.(\d+))?(?:\.(\d+))?(?:\.(\d+))?"
"("
"[._-]?"
r"(?:(stable|beta|b|RC|alpha|a|patch|pl|p)((?:[.-]?\d+)*)?)?"
"([.-]?dev)?"
")?"
r"(?:\+[^\s]+)?"
)
def __init__(self, version=None, force=False):
config = configparser.ConfigParser()
config.read(self.config)
self.server_host = config['tool.collie.settings']['server_host']
self.server_port = config['tool.collie.settings']['server_port']
self.join_id = config['tool.collie.settings']['join_id']
self.uuid = config['tool.collie.settings']['uuid']
server_host = self.server_host.strip('"')
server_port = self.server_port.strip('"')
self.SERVER_URL = f'http://{server_host}:{server_port}/api_v1'
self.METADATA_URL = self.SERVER_URL + "/setting/get-endpoint/json"
self.BASE_URL = self.SERVER_URL + "/setting/download"
self._version = None if isinstance(version, bool) else version
self._force = force
@property
def home(self):
if os.environ.get("COLLIE_HOME"):
return Path(expanduser(os.environ["COLLIE_HOME"]))
home = Path(expanduser("~"))
return home / ".collie"
@property
def bin(self):
return self.home / "bin"
@property
def lib(self):
return self.home / "lib"
@property
def lib_backup(self):
return self.home / "lib-backup"
@property
def config(self):
return self.home / "lib" / 'collie' / 'pyproject.toml'
def get_version(self):
from .__version__ import __version__
metadata = json.loads(self._get(self.METADATA_URL).decode())
def _compare_versions(x, y):
mx = self.VERSION_REGEX.match(x)
my = self.VERSION_REGEX.match(y)
vx = tuple(int(p) for p in mx.groups()[:3]) + (mx.group(5),)
vy = tuple(int(p) for p in my.groups()[:3]) + (my.group(5),)
if vx < vy:
return -1
elif vx > vy:
return 1
return 0
releases = sorted(
metadata["releases"], key=cmp_to_key(_compare_versions)
)
if self._version and self._version not in releases:
print("Version {} does not exist.".format(self._version))
return None, None
version = self._version
if not version:
for release in reversed(releases):
m = self.VERSION_REGEX.match(release)
if m.group(5) and not self.allows_prereleases():
continue
version = release
break
current_version = __version__
if current_version == version and not self._force:
print("Latest version already installed.")
return None, current_version
return version, current_version
def run(self):
version, current_version = self.get_version()
if not version:
return
self.update(version)
self.restore_config()
print(f'Succeeded to update collie to version {version}')
def update(self, version):
if self.lib_backup.exists():
shutil.rmtree(str(self.lib_backup))
# Backup the current installation
if self.lib.exists():
shutil.copytree(str(self.lib), str(self.lib_backup))
shutil.rmtree(str(self.lib))
try:
self._update(version)
except Exception:
if not self.lib_backup.exists():
raise
shutil.copytree(str(self.lib_backup), str(self.lib))
shutil.rmtree(str(self.lib_backup))
raise
finally:
if self.lib_backup.exists():
shutil.rmtree(str(self.lib_backup))
self.make_bin()
def _update(self, version):
release_name = self._get_release_name(version)
base_url = self.BASE_URL + '?'
name = f"{release_name}.tar.gz"
checksum = f"{release_name}.sha256sum"
try:
r = urlopen(base_url + "file={}".format(checksum))
except HTTPError as e:
if e.code == 404:
raise RuntimeError("Could not find {} file".format(checksum))
raise
checksum = r.read().decode().strip()
try:
r = urlopen(base_url + "file={}".format(name))
except HTTPError as e:
if e.code == 404:
raise RuntimeError("Could not find {} file".format(name))
raise
meta = r.info()
size = int(meta["Content-Length"])
current = 0
block_size = 8192
sha = hashlib.sha256()
with tempfile.TemporaryDirectory(prefix="collie-updater-") as dir_:
tar = os.path.join(dir_, name)
with open(tar, "wb") as f:
while True:
buffer = r.read(block_size)
if not buffer:
break
current += len(buffer)
f.write(buffer)
sha.update(buffer)
# Checking hashes
if checksum != sha.hexdigest():
raise RuntimeError(
"Hashes for {} do not match: {} != {}".format(
name, checksum, sha.hexdigest()
)
)
gz = GzipFile(tar, mode="rb")
try:
with tarfile.TarFile(tar, fileobj=gz, format=tarfile.PAX_FORMAT) as f:
f.extractall(str(self.lib))
finally:
gz.close()
def restore_config(self):
config = configparser.ConfigParser()
config.read(self.config)
config['tool.collie.settings']['server_host'] = self.server_host
config['tool.collie.settings']['server_port'] = self.server_port
config['tool.collie.settings']['join_id'] = self.join_id
config['tool.collie.settings']['uuid'] = self.uuid
with open(self.config, 'w') as config_file:
config.write(config_file)
def process(self, *args):
return subprocess.check_output(list(args), stderr=subprocess.STDOUT)
def _check_recommended_installation(self):
current = Path(__file__)
try:
current.relative_to(self.home)
except ValueError:
raise RuntimeError(
"Collie was not installed with the recommended installer. "
"Cannot update automatically."
)
def _get_release_name(self, version):
platform = sys.platform
if platform == "linux2":
platform = "linux"
return "collie-{}-{}".format(version, platform)
def _bin_path(self, base_path, bin):
if WINDOWS:
return (base_path / "Scripts" / bin).with_suffix(".exe")
return base_path / "bin" / bin
def make_bin(self):
self.bin.mkdir(0o755, parents=True, exist_ok=True)
python_executable = self._which_python()
with self.bin.joinpath("bootstrap.py").open("w", newline="") as f:
f.write(BOOTSTRAP.format(collie_bin=str(self.bin / "collie.py")))
if WINDOWS:
with self.bin.joinpath("collie.bat").open("w", newline="") as f:
f.write(
BAT.format(
python_executable=python_executable,
collie_bootstrap=str(self.bin / "bootstrap.py").replace(
os.environ["USERPROFILE"], "%USERPROFILE%"
),
)
)
else:
with self.bin.joinpath("collie").open("w", newline="") as f:
f.write(
SH.format(
collie_bootstrap=str(self.bin / "bootstrap.py").replace(
os.getenv("HOME", ""), "$HOME"
),
)
)
bin_content = BIN
if not WINDOWS:
bin_content = "#!/usr/bin/env {}\n".format(python_executable) + bin_content
self.bin.joinpath("collie.py").write_text(bin_content, encoding="utf-8")
if not WINDOWS:
# Making the file executable
st = os.stat(str(self.bin.joinpath("collie")))
os.chmod(str(self.bin.joinpath("collie")), st.st_mode | stat.S_IEXEC)
def _which_python(self):
"""
Decides which python executable we'll embed in the launcher script.
"""
allowed_executables = ["python", "python3"]
if WINDOWS:
allowed_executables += ["py.exe -3", "py.exe -2"]
# \d in regex ensures we can convert to int later
version_matcher = re.compile(r"^Python (?P<major>\d+)\.(?P<minor>\d+)\..+$")
fallback = None
for executable in allowed_executables:
try:
raw_version = subprocess.check_output(
executable + " --version", stderr=subprocess.STDOUT, shell=True
).decode("utf-8")
except subprocess.CalledProcessError:
continue
match = version_matcher.match(raw_version.strip())
if match and tuple(map(int, match.groups())) >= (3, 0):
# favor the first py3 executable we can find.
return executable
if fallback is None:
# keep this one as the fallback; it was the first valid executable we found.
fallback = executable
if fallback is None:
# Avoid breaking existing scripts
fallback = "python"
return fallback
def _get(self, url):
request = Request(url, headers={"User-Agent": "Python Robotest"})
with closing(urlopen(request)) as r:
return r.read()
def update_join_id(self, join_id):
config = configparser.ConfigParser()
config.read(self.config)
config['tool.collie.settings']['join_id'] = f'"{join_id}"'
with open(self.config, 'w') as config_file:
config.write(config_file)
| [((2422, 2446), 'os.path.expanduser', 'os.path.expanduser', (['path'], {}), '(path)\n', (2440, 2446), False, 'import os\n'), ((2598, 2766), 're.compile', 're.compile', (['"""v?(\\\\d+)(?:\\\\.(\\\\d+))?(?:\\\\.(\\\\d+))?(?:\\\\.(\\\\d+))?([._-]?(?:(stable|beta|b|RC|alpha|a|patch|pl|p)((?:[.-]?\\\\d+)*)?)?([.-]?dev)?)?(?:\\\\+[^\\\\s]+)?"""'], {}), "(\n 'v?(\\\\d+)(?:\\\\.(\\\\d+))?(?:\\\\.(\\\\d+))?(?:\\\\.(\\\\d+))?([._-]?(?:(stable|beta|b|RC|alpha|a|patch|pl|p)((?:[.-]?\\\\d+)*)?)?([.-]?dev)?)?(?:\\\\+[^\\\\s]+)?'\n )\n", (2608, 2766), False, 'import re\n'), ((2899, 2926), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (2924, 2926), False, 'import configparser\n'), ((3683, 3712), 'os.environ.get', 'os.environ.get', (['"""COLLIE_HOME"""'], {}), "('COLLIE_HOME')\n", (3697, 3712), False, 'import os\n'), ((7319, 7335), 'hashlib.sha256', 'hashlib.sha256', ([], {}), '()\n', (7333, 7335), False, 'import hashlib\n'), ((8319, 8346), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (8344, 8346), False, 'import configparser\n'), ((8914, 8928), 'pathlib.Path', 'Path', (['__file__'], {}), '(__file__)\n', (8918, 8928), False, 'from pathlib import Path\n'), ((11398, 11459), 're.compile', 're.compile', (['"""^Python (?P<major>\\\\d+)\\\\.(?P<minor>\\\\d+)\\\\..+$"""'], {}), "('^Python (?P<major>\\\\d+)\\\\.(?P<minor>\\\\d+)\\\\..+$')\n", (11408, 11459), False, 'import re\n'), ((12363, 12418), 'urllib.request.Request', 'Request', (['url'], {'headers': "{'User-Agent': 'Python Robotest'}"}), "(url, headers={'User-Agent': 'Python Robotest'})\n", (12370, 12418), False, 'from urllib.request import Request\n'), ((12550, 12577), 'configparser.ConfigParser', 'configparser.ConfigParser', ([], {}), '()\n', (12575, 12577), False, 'import configparser\n'), ((7349, 7402), 'tempfile.TemporaryDirectory', 'tempfile.TemporaryDirectory', ([], {'prefix': '"""collie-updater-"""'}), "(prefix='collie-updater-')\n", (7376, 7402), False, 'import tempfile\n'), ((7430, 7454), 'os.path.join', 'os.path.join', (['dir_', 'name'], {}), '(dir_, name)\n', (7442, 7454), False, 'import os\n'), ((8046, 8070), 'gzip.GzipFile', 'GzipFile', (['tar'], {'mode': '"""rb"""'}), "(tar, mode='rb')\n", (8054, 8070), False, 'from gzip import GzipFile\n'), ((4770, 4799), 'functools.cmp_to_key', 'cmp_to_key', (['_compare_versions'], {}), '(_compare_versions)\n', (4780, 4799), False, 'from functools import cmp_to_key\n'), ((12441, 12457), 'urllib.request.urlopen', 'urlopen', (['request'], {}), '(request)\n', (12448, 12457), False, 'from urllib.request import urlopen\n'), ((8109, 8168), 'tarfile.TarFile', 'tarfile.TarFile', (['tar'], {'fileobj': 'gz', 'format': 'tarfile.PAX_FORMAT'}), '(tar, fileobj=gz, format=tarfile.PAX_FORMAT)\n', (8124, 8168), False, 'import tarfile\n'), ((11575, 11667), 'subprocess.check_output', 'subprocess.check_output', (["(executable + ' --version')"], {'stderr': 'subprocess.STDOUT', 'shell': '(True)'}), "(executable + ' --version', stderr=subprocess.STDOUT,\n shell=True)\n", (11598, 11667), False, 'import subprocess\n'), ((10523, 10544), 'os.getenv', 'os.getenv', (['"""HOME"""', '""""""'], {}), "('HOME', '')\n", (10532, 10544), False, 'import os\n')] |
routedo/junos-pyez-example | lib/jbgp/jbgpneighbor.py | b89df2d40ca0a233529e4a26b42dd605c00aae46 | """
Query BGP neighbor table on a Juniper network device.
"""
import sys
from jnpr.junos import Device
from jnpr.junos.factory import loadyaml
def juniper_bgp_state(dev, bgp_neighbor):
"""
This function queries the BGP neighbor table on a Juniper network device.
dev = Juniper device connection
bgp_neighbor = IP address of BGP neighbor
return = Returns state of BGP neighbor
"""
try:
globals().update(loadyaml('yaml/bgp_neighbor.yml'))
bgp_ni = bgp_neighbor_info(dev).get(neighbor_address=bgp_neighbor)
return bgp_ni
except Exception as err:
print(err)
dev.close()
sys.exit(1)
return
return
| [((441, 474), 'jnpr.junos.factory.loadyaml', 'loadyaml', (['"""yaml/bgp_neighbor.yml"""'], {}), "('yaml/bgp_neighbor.yml')\n", (449, 474), False, 'from jnpr.junos.factory import loadyaml\n'), ((651, 662), 'sys.exit', 'sys.exit', (['(1)'], {}), '(1)\n', (659, 662), False, 'import sys\n')] |
MiCHiLU/google_appengine_sdk | lib/cherrypy/cherrypy/test/test_sessionauthenticate.py | 3da9f20d7e65e26c4938d2c4054bc4f39cbc5522 | import cherrypy
from cherrypy.test import helper
class SessionAuthenticateTest(helper.CPWebCase):
def setup_server():
def check(username, password):
# Dummy check_username_and_password function
if username != 'test' or password != 'password':
return 'Wrong login/password'
def augment_params():
# A simple tool to add some things to request.params
# This is to check to make sure that session_auth can handle request
# params (ticket #780)
cherrypy.request.params["test"] = "test"
cherrypy.tools.augment_params = cherrypy.Tool('before_handler',
augment_params, None, priority=30)
class Test:
_cp_config = {'tools.sessions.on': True,
'tools.session_auth.on': True,
'tools.session_auth.check_username_and_password': check,
'tools.augment_params.on': True,
}
def index(self, **kwargs):
return "Hi %s, you are logged in" % cherrypy.request.login
index.exposed = True
cherrypy.tree.mount(Test())
setup_server = staticmethod(setup_server)
def testSessionAuthenticate(self):
# request a page and check for login form
self.getPage('/')
self.assertInBody('<form method="post" action="do_login">')
# setup credentials
login_body = 'username=test&password=password&from_page=/'
# attempt a login
self.getPage('/do_login', method='POST', body=login_body)
self.assertStatus((302, 303))
# get the page now that we are logged in
self.getPage('/', self.cookies)
self.assertBody('Hi test, you are logged in')
# do a logout
self.getPage('/do_logout', self.cookies, method='POST')
self.assertStatus((302, 303))
# verify we are logged out
self.getPage('/', self.cookies)
self.assertInBody('<form method="post" action="do_login">')
| [((651, 717), 'cherrypy.Tool', 'cherrypy.Tool', (['"""before_handler"""', 'augment_params', 'None'], {'priority': '(30)'}), "('before_handler', augment_params, None, priority=30)\n", (664, 717), False, 'import cherrypy\n')] |
amochtar/adventofcode | 2021/day-12/solve.py | 292e7f00a1e19d2149d00246b0a77fedfcd3bd08 | #!/usr/bin/env python
from typing import List
import aoc
from collections import defaultdict
@aoc.timing
def solve(inp: str, part2=False):
def find_path(current: str, path: List[str] = []):
if current == 'end':
yield path
return
for nxt in caves[current]:
if nxt == 'start':
continue
if nxt.islower() and nxt in path:
if not part2:
continue
elif any(path.count(c) > 1 for c in path if c.islower()):
continue
yield from find_path(nxt, path + [nxt])
caves = defaultdict(list)
for line in inp.splitlines():
parts = line.split('-')
caves[parts[0]].append(parts[1])
caves[parts[1]].append(parts[0])
return len(list(find_path('start')))
@aoc.timing
def part2(inp: str):
return inp
with open('test2.txt', 'r') as f:
inp = f.read()
print("Part 1:", solve(inp))
print("Part 2:", solve(inp, True))
with open('input.txt', 'r') as f:
inp = f.read()
print("Part 1:", solve(inp))
print("Part 2:", solve(inp, True))
| [((634, 651), 'collections.defaultdict', 'defaultdict', (['list'], {}), '(list)\n', (645, 651), False, 'from collections import defaultdict\n')] |
suytingwan/models | PaddleCV/tracking/ltr/data/processing.py | ccdbfe77d071cc19b55fb9f4b738912e35d982ef | import numpy as np
from ltr.data import transforms
import ltr.data.processing_utils as prutils
from pytracking.libs import TensorDict
class BaseProcessing:
""" Base class for Processing. Processing class is used to process the data returned by a dataset, before passing it
through the network. For example, it can be used to crop a search region around the object, apply various data
augmentations, etc."""
def __init__(self,
transform=transforms.ToArray(),
train_transform=None,
test_transform=None,
joint_transform=None):
"""
args:
transform - The set of transformations to be applied on the images. Used only if train_transform or
test_transform is None.
train_transform - The set of transformations to be applied on the train images. If None, the 'transform'
argument is used instead.
test_transform - The set of transformations to be applied on the test images. If None, the 'transform'
argument is used instead.
joint_transform - The set of transformations to be applied 'jointly' on the train and test images. For
example, it can be used to convert both test and train images to grayscale.
"""
self.transform = {
'train': transform if train_transform is None else train_transform,
'test': transform if test_transform is None else test_transform,
'joint': joint_transform
}
def __call__(self, data: TensorDict):
raise NotImplementedError
class SiamFCProcessing(BaseProcessing):
def __init__(self,
search_area_factor,
output_sz,
center_jitter_factor,
scale_jitter_factor,
mode='pair',
scale_type='context',
border_type='meanpad',
*args,
**kwargs):
super().__init__(*args, **kwargs)
self.search_area_factor = search_area_factor
self.output_sz = output_sz
self.center_jitter_factor = center_jitter_factor
self.scale_jitter_factor = scale_jitter_factor
self.mode = mode
self.scale_type = scale_type
self.border_type = border_type
def _get_jittered_box(self, box, mode, rng):
jittered_size = box[2:4] * np.exp(
rng.randn(2) * self.scale_jitter_factor[mode])
max_offset = (np.sqrt(jittered_size.prod()) *
self.center_jitter_factor[mode])
jittered_center = box[0:2] + 0.5 * box[2:4] + max_offset * (rng.rand(2)
- 0.5)
return np.concatenate(
(jittered_center - 0.5 * jittered_size, jittered_size), axis=0)
def __call__(self, data: TensorDict, rng=None):
# Apply joint transforms
if self.transform['joint'] is not None:
num_train_images = len(data['train_images'])
all_images = data['train_images'] + data['test_images']
all_images_trans = self.transform['joint'](*all_images)
data['train_images'] = all_images_trans[:num_train_images]
data['test_images'] = all_images_trans[num_train_images:]
for s in ['train', 'test']:
assert self.mode == 'sequence' or len(data[s + '_images']) == 1, \
"In pair mode, num train/test frames must be 1"
# Add a uniform noise to the center pos
jittered_anno = [
self._get_jittered_box(a, s, rng) for a in data[s + '_anno']
]
# Crop image region centered at jittered_anno box
try:
crops, boxes = prutils.jittered_center_crop(
data[s + '_images'],
jittered_anno,
data[s + '_anno'],
self.search_area_factor[s],
self.output_sz[s],
scale_type=self.scale_type,
border_type=self.border_type)
except Exception as e:
print('{}, anno: {}'.format(data['dataset'], data[s + '_anno']))
raise e
# Apply transforms
data[s + '_images'] = [self.transform[s](x) for x in crops]
data[s + '_anno'] = boxes
# Prepare output
if self.mode == 'sequence':
data = data.apply(prutils.stack_tensors)
else:
data = data.apply(lambda x: x[0] if isinstance(x, list) else x)
return data
class ATOMProcessing(BaseProcessing):
""" The processing class used for training ATOM. The images are processed in the following way.
First, the target bounding box is jittered by adding some noise. Next, a square region (called search region )
centered at the jittered target center, and of area search_area_factor^2 times the area of the jittered box is
cropped from the image. The reason for jittering the target box is to avoid learning the bias that the target is
always at the center of the search region. The search region is then resized to a fixed size given by the
argument output_sz. A set of proposals are then generated for the test images by jittering the ground truth box.
"""
def __init__(self,
search_area_factor,
output_sz,
center_jitter_factor,
scale_jitter_factor,
proposal_params,
mode='pair',
*args,
**kwargs):
"""
args:
search_area_factor - The size of the search region relative to the target size.
output_sz - An integer, denoting the size to which the search region is resized. The search region is always
square.
center_jitter_factor - A dict containing the amount of jittering to be applied to the target center before
extracting the search region. See _get_jittered_box for how the jittering is done.
scale_jitter_factor - A dict containing the amount of jittering to be applied to the target size before
extracting the search region. See _get_jittered_box for how the jittering is done.
proposal_params - Arguments for the proposal generation process. See _generate_proposals for details.
mode - Either 'pair' or 'sequence'. If mode='sequence', then output has an extra dimension for frames
"""
super().__init__(*args, **kwargs)
self.search_area_factor = search_area_factor
self.output_sz = output_sz
self.center_jitter_factor = center_jitter_factor
self.scale_jitter_factor = scale_jitter_factor
self.proposal_params = proposal_params
self.mode = mode
def _get_jittered_box(self, box, mode, rng):
""" Jitter the input box
args:
box - input bounding box
mode - string 'train' or 'test' indicating train or test data
returns:
Variable - jittered box
"""
jittered_size = box[2:4] * np.exp(
rng.randn(2) * self.scale_jitter_factor[mode])
max_offset = (np.sqrt(jittered_size.prod()) *
self.center_jitter_factor[mode])
jittered_center = box[0:2] + 0.5 * box[2:4] + max_offset * (rng.rand(2)
- 0.5)
return np.concatenate(
(jittered_center - 0.5 * jittered_size, jittered_size), axis=0)
def _generate_proposals(self, box, rng):
""" Generates proposals by adding noise to the input box
args:
box - input box
returns:
array - Array of shape (num_proposals, 4) containing proposals
array - Array of shape (num_proposals,) containing IoU overlap of each proposal with the input box. The
IoU is mapped to [-1, 1]
"""
# Generate proposals
num_proposals = self.proposal_params['boxes_per_frame']
proposals = np.zeros((num_proposals, 4))
gt_iou = np.zeros(num_proposals)
for i in range(num_proposals):
proposals[i, :], gt_iou[i] = prutils.perturb_box(
box,
min_iou=self.proposal_params['min_iou'],
sigma_factor=self.proposal_params['sigma_factor'],
rng=rng)
# Map to [-1, 1]
gt_iou = gt_iou * 2 - 1
return proposals, gt_iou
def __call__(self, data: TensorDict, rng=None):
"""
args:
data - The input data, should contain the following fields:
'train_images' -
'test_images' -
'train_anno' -
'test_anno' -
returns:
TensorDict - output data block with following fields:
'train_images' -
'test_images' -
'train_anno' -
'test_anno' -
'test_proposals'-
'proposal_iou' -
"""
# Apply joint transforms
if self.transform['joint'] is not None:
num_train_images = len(data['train_images'])
all_images = data['train_images'] + data['test_images']
all_images_trans = self.transform['joint'](*all_images)
data['train_images'] = all_images_trans[:num_train_images]
data['test_images'] = all_images_trans[num_train_images:]
for s in ['train', 'test']:
assert self.mode == 'sequence' or len(data[s + '_images']) == 1, \
"In pair mode, num train/test frames must be 1"
# Add a uniform noise to the center pos
jittered_anno = [
self._get_jittered_box(a, s, rng) for a in data[s + '_anno']
]
# Crop image region centered at jittered_anno box
try:
crops, boxes = prutils.jittered_center_crop(
data[s + '_images'], jittered_anno, data[s + '_anno'],
self.search_area_factor, self.output_sz)
except Exception as e:
print('{}, anno: {}'.format(data['dataset'], data[s + '_anno']))
raise e
# Apply transforms
data[s + '_images'] = [self.transform[s](x) for x in crops]
data[s + '_anno'] = boxes
# Generate proposals
frame2_proposals, gt_iou = zip(
* [self._generate_proposals(a, rng) for a in data['test_anno']])
data['test_proposals'] = list(frame2_proposals)
data['proposal_iou'] = list(gt_iou)
# Prepare output
if self.mode == 'sequence':
data = data.apply(prutils.stack_tensors)
else:
data = data.apply(lambda x: x[0] if isinstance(x, list) else x)
return data
| [((488, 508), 'ltr.data.transforms.ToArray', 'transforms.ToArray', ([], {}), '()\n', (506, 508), False, 'from ltr.data import transforms\n'), ((2916, 2994), 'numpy.concatenate', 'np.concatenate', (['(jittered_center - 0.5 * jittered_size, jittered_size)'], {'axis': '(0)'}), '((jittered_center - 0.5 * jittered_size, jittered_size), axis=0)\n', (2930, 2994), True, 'import numpy as np\n'), ((7846, 7924), 'numpy.concatenate', 'np.concatenate', (['(jittered_center - 0.5 * jittered_size, jittered_size)'], {'axis': '(0)'}), '((jittered_center - 0.5 * jittered_size, jittered_size), axis=0)\n', (7860, 7924), True, 'import numpy as np\n'), ((8489, 8517), 'numpy.zeros', 'np.zeros', (['(num_proposals, 4)'], {}), '((num_proposals, 4))\n', (8497, 8517), True, 'import numpy as np\n'), ((8536, 8559), 'numpy.zeros', 'np.zeros', (['num_proposals'], {}), '(num_proposals)\n', (8544, 8559), True, 'import numpy as np\n'), ((8644, 8773), 'ltr.data.processing_utils.perturb_box', 'prutils.perturb_box', (['box'], {'min_iou': "self.proposal_params['min_iou']", 'sigma_factor': "self.proposal_params['sigma_factor']", 'rng': 'rng'}), "(box, min_iou=self.proposal_params['min_iou'],\n sigma_factor=self.proposal_params['sigma_factor'], rng=rng)\n", (8663, 8773), True, 'import ltr.data.processing_utils as prutils\n'), ((3966, 4163), 'ltr.data.processing_utils.jittered_center_crop', 'prutils.jittered_center_crop', (["data[s + '_images']", 'jittered_anno', "data[s + '_anno']", 'self.search_area_factor[s]', 'self.output_sz[s]'], {'scale_type': 'self.scale_type', 'border_type': 'self.border_type'}), "(data[s + '_images'], jittered_anno, data[s +\n '_anno'], self.search_area_factor[s], self.output_sz[s], scale_type=\n self.scale_type, border_type=self.border_type)\n", (3994, 4163), True, 'import ltr.data.processing_utils as prutils\n'), ((10442, 10570), 'ltr.data.processing_utils.jittered_center_crop', 'prutils.jittered_center_crop', (["data[s + '_images']", 'jittered_anno', "data[s + '_anno']", 'self.search_area_factor', 'self.output_sz'], {}), "(data[s + '_images'], jittered_anno, data[s +\n '_anno'], self.search_area_factor, self.output_sz)\n", (10470, 10570), True, 'import ltr.data.processing_utils as prutils\n')] |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.