file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
TAToolsHandler.py | TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Aangepaste tijden voor serie %s' % series.name)
processedObjects = []
processedPoints = {}
table = self.doc.add_table('changestable', ['Station', 'A', 'V', 'A', 'V', ''])
for index in range(len(series.points)):
point = series.points[index]
oldTimes = point.scheduled_times
upArrival = self.request.get('arr_%d_%d' % (Direction.up, index))
upDeparture = self.request.get('dep_%d_%d' % (Direction.up, index))
downArrival = self.request.get('arr_%d_%d' % (Direction.down, index))
downDeparture = self.request.get('dep_%d_%d' % (Direction.down, index))
newTimes = (minutes_from_string(upArrival),
minutes_from_string(upDeparture),
minutes_from_string(downArrival),
minutes_from_string(downDeparture))
row = table.add_row()
row.add_to_cell(0, point.stationName)
row.add_to_cell(1, upArrival)
row.add_to_cell(2, upDeparture)
row.add_to_cell(3, downArrival)
row.add_to_cell(4, downDeparture)
if oldTimes != newTimes:
point.scheduled_times = newTimes
processedPoints[point.id] = point
processedObjects.append(point)
row.add_to_cell(5, 'aangepast')
series.cache_set()
memcache.set_multi(processedPoints, namespace='TAScheduledPoint')
db.put(processedObjects)
self.response.out.write(self.doc.write())
def patternTimeTable(self, series, direction):
table = markup.HTMLTable('timetable_%d' % direction, ['Station', 'A', 'V', 'meting', '#', 'delta', 'A', 'V'])
indexes = range(len(series.points))
if direction == Direction.down: indexes.reverse()
for index in indexes:
point = series.points[index]
station = point.station
planArrival, planDeparture = point.times_in_direction(direction)
row = table.add_row()
row.add_to_cell(0, station.name)
row.add_to_cell(1, string_from_minutes(planArrival))
row.add_to_cell(2, string_from_minutes(planDeparture))
stationDict = self.results[direction].get(station.id, None)
if stationDict == None:
departure, count = ('-', '-')
delta = 0
else:
departure, count = mostCommonItem(stationDict['v'])
delta = departure - planDeparture
departure = string_from_minutes(departure)
row.add_to_cell(3, departure)
row.add_to_cell(4, count)
row.add_to_cell(5, delta)
row.add_to_cell(6, markup.input('text', 'arr_%d_%d' % (direction, index), string_from_minutes(planArrival + delta), size=4))
row.add_to_cell(7, markup.input('text', 'dep_%d_%d' % (direction, index), string_from_minutes(planDeparture + delta), size=4))
return table
def analyzeStops(self):
series_id = self.request.get('series')
query = db.Query(TAArchivedMission).filter('series_id =', series_id)
for mission in query.fetch(50):
|
def stopDictionary(self, direction, stopKey):
dictionary = self.results[direction].get(stopKey, None)
if dictionary == None:
dictionary = dict()
self.results[direction][stopKey] = dictionary
return dictionary
def histogram(self, direction, stopKey, dataKey):
stopDictionary = self.stopDictionary(direction, stopKey)
dictionary = stopDictionary.get(dataKey, None)
if dictionary == None:
dictionary = dict()
stopDictionary[dataKey] = dictionary
return dictionary
def addDataToHistogram(self, histogram, key):
histogram[key] = histogram.get(key, 0) + 1
class ReoffsetHandler(webapp2.RequestHandler):
tableTitles = ('tijd', 'aantal', 'perc.')
tableFormat = (':%02d', '%d', '%.1f%%')
def get(self):
series =TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Herschik offsets serie %s' % series.name)
self.writeReport(series)
self.response.out.write(self.doc.write())
def post(self):
series = TASeries.get(self.request.get('series'))
self.deltaOffset = [int(self.request.get('offset_up')), int(self.request.get('offset_down'))]
self.round = [int(self.request.get('round_up')), int(self.request.get('round_down'))]
self.processedObjects = []
self.processedMissions = {}
self.processedPoints = {}
self.doc = ToolsDocument('Aangepaste offsets serie %s' % series.name)
self.doc.main.add(markup.heading(2, 'Aangepaste patroontijden'))
self.processPoints(series)
self.doc.main.add(markup.heading(2, 'Aangepaste offsettijden'))
table = self.doc.add_table('adapted_missions', ['Missie', 'Offset'])
self.processMissions(series.all_mission_ids(Direction.up), Direction.up, table)
self.processMissions(series.all_mission_ids(Direction.down), Direction.down, table)
series.cache_set()
self.saveChanges()
# self.writeReport(series)
self.response.out.write(self.doc.write())
def writeReport(self, series):
self.departure = [series.first_point.upDeparture, series.last_point.downDeparture]
self.startStation = [series.first_point.stationName, series.last_point.stationName]
self.foundOffset = [None, None]
self.doc.main.add(markup.heading(2, 'Heenrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.up))
self.reportOffset(FIRST_HALF, Direction.up)
self.reportOffset(SECND_HALF, Direction.up)
self.doc.main.add(markup.heading(2, 'Terugrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.down))
self.reportOffset(FIRST_HALF, Direction.down)
self.reportOffset(SECND_HALF, Direction.down)
if self.foundOffset[Direction.up] or self.foundOffset[Direction.down]:
self.doc.main.add(markup.heading(2, 'Aanpassen'))
self.proposeChanges()
def analyzeOffset(self, missionIDs):
self.offset = [None, None]
self.data=[[], []]
firstHalfHist = dict()
firstHalfItems = 0
secondHalfHist = dict()
secondHalfItems = 0
for missionID in missionIDs:
mission = TAMission.get(missionID)
num = mission.number
if bool(num % 2): num -= 1
key = mission.offset.minute
if bool(num % 4):
firstHalfHist[key] = firstHalfHist.get(key, 0) + 1
firstHalfItems += 1
else:
secondHalfHist[key] = secondHalfHist.get(key, 0) + 1
secondHalfItems += 1
self.generateData(FIRST_HALF, firstHalfHist, firstHalfItems)
self.generateData(SECND_HALF, secondHalfHist, secondHalfItems)
def generateData(self, halfHour, histogram, count):
maxFrequency = 0
for key, value in histogram.iteritems():
self.data[halfHour].append((int(key), value, 100.0 * value/count))
if value > maxFrequency:
maxFrequency = value
self.offset[halfHour] = int(key)
def reportOffset(self, halfHour, direction):
if self.offset[halfHour] != None:
self.doc.main.add(markup.heading(3, '%s halfuur :%02d' % (ORD_LABEL[halfHour], self.offset[halfHour])))
table = self.doc.add_table('table_%d' % (2 * direction + halfHour), self.tableTitles, self.tableFormat)
table.fill_data(self.data[halfHour])
departure = self.offset[halfHour] + self.departure[direction]
if departure >= 60:
departure -= 60
self.offset[halfHour] -= 60
self.doc.add_paragraph('Vertrek uit %s: %d + %d = :%02d' %
(self.startStation[direction], self.offset[halfHour], self.departure[ | if mission.up: direction = Direction.up
else: direction = Direction.down
for stop in mission.stopsList:
stopKey = stop.station_id
if stop.status == StopStatuses.planned:
departureHist = self.histogram(direction, stopKey, 'v')
difference = utc_from_cet(stop.departure) - correctedOffsetUTC(mission)
self.addDataToHistogram(departureHist, difference.seconds // 60)
delayHist = self.histogram(direction, stopKey, 'dv')
self.addDataToHistogram(delayHist, int(stop.delay_dep))
platformHist = self.histogram(direction, stopKey, 'p')
self.addDataToHistogram(platformHist, stop.platform) | conditional_block |
TAToolsHandler.py | TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Aangepaste tijden voor serie %s' % series.name)
processedObjects = []
processedPoints = {}
table = self.doc.add_table('changestable', ['Station', 'A', 'V', 'A', 'V', ''])
for index in range(len(series.points)):
point = series.points[index]
oldTimes = point.scheduled_times
upArrival = self.request.get('arr_%d_%d' % (Direction.up, index))
upDeparture = self.request.get('dep_%d_%d' % (Direction.up, index))
downArrival = self.request.get('arr_%d_%d' % (Direction.down, index))
downDeparture = self.request.get('dep_%d_%d' % (Direction.down, index))
newTimes = (minutes_from_string(upArrival),
minutes_from_string(upDeparture),
minutes_from_string(downArrival),
minutes_from_string(downDeparture))
row = table.add_row()
row.add_to_cell(0, point.stationName)
row.add_to_cell(1, upArrival)
row.add_to_cell(2, upDeparture)
row.add_to_cell(3, downArrival)
row.add_to_cell(4, downDeparture)
if oldTimes != newTimes:
point.scheduled_times = newTimes
processedPoints[point.id] = point
processedObjects.append(point)
row.add_to_cell(5, 'aangepast')
series.cache_set()
memcache.set_multi(processedPoints, namespace='TAScheduledPoint')
db.put(processedObjects)
self.response.out.write(self.doc.write())
def patternTimeTable(self, series, direction):
table = markup.HTMLTable('timetable_%d' % direction, ['Station', 'A', 'V', 'meting', '#', 'delta', 'A', 'V'])
indexes = range(len(series.points))
if direction == Direction.down: indexes.reverse()
for index in indexes:
point = series.points[index]
station = point.station
planArrival, planDeparture = point.times_in_direction(direction)
row = table.add_row()
row.add_to_cell(0, station.name)
row.add_to_cell(1, string_from_minutes(planArrival))
row.add_to_cell(2, string_from_minutes(planDeparture))
stationDict = self.results[direction].get(station.id, None)
if stationDict == None:
departure, count = ('-', '-')
delta = 0
else:
departure, count = mostCommonItem(stationDict['v'])
delta = departure - planDeparture
departure = string_from_minutes(departure)
row.add_to_cell(3, departure)
row.add_to_cell(4, count)
row.add_to_cell(5, delta)
row.add_to_cell(6, markup.input('text', 'arr_%d_%d' % (direction, index), string_from_minutes(planArrival + delta), size=4))
row.add_to_cell(7, markup.input('text', 'dep_%d_%d' % (direction, index), string_from_minutes(planDeparture + delta), size=4))
return table
def analyzeStops(self):
series_id = self.request.get('series')
query = db.Query(TAArchivedMission).filter('series_id =', series_id)
for mission in query.fetch(50):
if mission.up: direction = Direction.up
else: direction = Direction.down
for stop in mission.stopsList:
stopKey = stop.station_id
if stop.status == StopStatuses.planned:
departureHist = self.histogram(direction, stopKey, 'v')
difference = utc_from_cet(stop.departure) - correctedOffsetUTC(mission)
self.addDataToHistogram(departureHist, difference.seconds // 60)
delayHist = self.histogram(direction, stopKey, 'dv')
self.addDataToHistogram(delayHist, int(stop.delay_dep))
platformHist = self.histogram(direction, stopKey, 'p')
self.addDataToHistogram(platformHist, stop.platform)
def stopDictionary(self, direction, stopKey):
dictionary = self.results[direction].get(stopKey, None)
if dictionary == None:
dictionary = dict()
self.results[direction][stopKey] = dictionary
return dictionary
def histogram(self, direction, stopKey, dataKey):
stopDictionary = self.stopDictionary(direction, stopKey)
dictionary = stopDictionary.get(dataKey, None)
if dictionary == None:
dictionary = dict()
stopDictionary[dataKey] = dictionary
return dictionary
def addDataToHistogram(self, histogram, key):
histogram[key] = histogram.get(key, 0) + 1
class ReoffsetHandler(webapp2.RequestHandler):
tableTitles = ('tijd', 'aantal', 'perc.')
tableFormat = (':%02d', '%d', '%.1f%%')
def get(self):
series =TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Herschik offsets serie %s' % series.name)
self.writeReport(series)
self.response.out.write(self.doc.write())
def post(self):
series = TASeries.get(self.request.get('series'))
self.deltaOffset = [int(self.request.get('offset_up')), int(self.request.get('offset_down'))]
self.round = [int(self.request.get('round_up')), int(self.request.get('round_down'))]
self.processedObjects = []
self.processedMissions = {}
self.processedPoints = {}
self.doc = ToolsDocument('Aangepaste offsets serie %s' % series.name)
self.doc.main.add(markup.heading(2, 'Aangepaste patroontijden'))
self.processPoints(series)
self.doc.main.add(markup.heading(2, 'Aangepaste offsettijden'))
table = self.doc.add_table('adapted_missions', ['Missie', 'Offset'])
self.processMissions(series.all_mission_ids(Direction.up), Direction.up, table)
self.processMissions(series.all_mission_ids(Direction.down), Direction.down, table)
series.cache_set()
self.saveChanges()
# self.writeReport(series)
self.response.out.write(self.doc.write())
def writeReport(self, series):
|
def analyzeOffset(self, missionIDs):
self.offset = [None, None]
self.data=[[], []]
firstHalfHist = dict()
firstHalfItems = 0
secondHalfHist = dict()
secondHalfItems = 0
for missionID in missionIDs:
mission = TAMission.get(missionID)
num = mission.number
if bool(num % 2): num -= 1
key = mission.offset.minute
if bool(num % 4):
firstHalfHist[key] = firstHalfHist.get(key, 0) + 1
firstHalfItems += 1
else:
secondHalfHist[key] = secondHalfHist.get(key, 0) + 1
secondHalfItems += 1
self.generateData(FIRST_HALF, firstHalfHist, firstHalfItems)
self.generateData(SECND_HALF, secondHalfHist, secondHalfItems)
def generateData(self, halfHour, histogram, count):
maxFrequency = 0
for key, value in histogram.iteritems():
self.data[halfHour].append((int(key), value, 100.0 * value/count))
if value > maxFrequency:
maxFrequency = value
self.offset[halfHour] = int(key)
def reportOffset(self, halfHour, direction):
if self.offset[halfHour] != None:
self.doc.main.add(markup.heading(3, '%s halfuur :%02d' % (ORD_LABEL[halfHour], self.offset[halfHour])))
table = self.doc.add_table('table_%d' % (2 * direction + halfHour), self.tableTitles, self.tableFormat)
table.fill_data(self.data[halfHour])
departure = self.offset[halfHour] + self.departure[direction]
if departure >= 60:
departure -= 60
self.offset[halfHour] -= 60
self.doc.add_paragraph('Vertrek uit %s: %d + %d = :%02d' %
(self.startStation[direction], self.offset[halfHour], self.departure[ | self.departure = [series.first_point.upDeparture, series.last_point.downDeparture]
self.startStation = [series.first_point.stationName, series.last_point.stationName]
self.foundOffset = [None, None]
self.doc.main.add(markup.heading(2, 'Heenrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.up))
self.reportOffset(FIRST_HALF, Direction.up)
self.reportOffset(SECND_HALF, Direction.up)
self.doc.main.add(markup.heading(2, 'Terugrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.down))
self.reportOffset(FIRST_HALF, Direction.down)
self.reportOffset(SECND_HALF, Direction.down)
if self.foundOffset[Direction.up] or self.foundOffset[Direction.down]:
self.doc.main.add(markup.heading(2, 'Aanpassen'))
self.proposeChanges() | identifier_body |
TAToolsHandler.py | == None:
dictionary = dict()
self.results[direction][stopKey] = dictionary
return dictionary
def histogram(self, direction, stopKey, dataKey):
stopDictionary = self.stopDictionary(direction, stopKey)
dictionary = stopDictionary.get(dataKey, None)
if dictionary == None:
dictionary = dict()
stopDictionary[dataKey] = dictionary
return dictionary
def addDataToHistogram(self, histogram, key):
histogram[key] = histogram.get(key, 0) + 1
class ReoffsetHandler(webapp2.RequestHandler):
tableTitles = ('tijd', 'aantal', 'perc.')
tableFormat = (':%02d', '%d', '%.1f%%')
def get(self):
series =TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Herschik offsets serie %s' % series.name)
self.writeReport(series)
self.response.out.write(self.doc.write())
def post(self):
series = TASeries.get(self.request.get('series'))
self.deltaOffset = [int(self.request.get('offset_up')), int(self.request.get('offset_down'))]
self.round = [int(self.request.get('round_up')), int(self.request.get('round_down'))]
self.processedObjects = []
self.processedMissions = {}
self.processedPoints = {}
self.doc = ToolsDocument('Aangepaste offsets serie %s' % series.name)
self.doc.main.add(markup.heading(2, 'Aangepaste patroontijden'))
self.processPoints(series)
self.doc.main.add(markup.heading(2, 'Aangepaste offsettijden'))
table = self.doc.add_table('adapted_missions', ['Missie', 'Offset'])
self.processMissions(series.all_mission_ids(Direction.up), Direction.up, table)
self.processMissions(series.all_mission_ids(Direction.down), Direction.down, table)
series.cache_set()
self.saveChanges()
# self.writeReport(series)
self.response.out.write(self.doc.write())
def writeReport(self, series):
self.departure = [series.first_point.upDeparture, series.last_point.downDeparture]
self.startStation = [series.first_point.stationName, series.last_point.stationName]
self.foundOffset = [None, None]
self.doc.main.add(markup.heading(2, 'Heenrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.up))
self.reportOffset(FIRST_HALF, Direction.up)
self.reportOffset(SECND_HALF, Direction.up)
self.doc.main.add(markup.heading(2, 'Terugrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.down))
self.reportOffset(FIRST_HALF, Direction.down)
self.reportOffset(SECND_HALF, Direction.down)
if self.foundOffset[Direction.up] or self.foundOffset[Direction.down]:
self.doc.main.add(markup.heading(2, 'Aanpassen'))
self.proposeChanges()
def analyzeOffset(self, missionIDs):
self.offset = [None, None]
self.data=[[], []]
firstHalfHist = dict()
firstHalfItems = 0
secondHalfHist = dict()
secondHalfItems = 0
for missionID in missionIDs:
mission = TAMission.get(missionID)
num = mission.number
if bool(num % 2): num -= 1
key = mission.offset.minute
if bool(num % 4):
firstHalfHist[key] = firstHalfHist.get(key, 0) + 1
firstHalfItems += 1
else:
secondHalfHist[key] = secondHalfHist.get(key, 0) + 1
secondHalfItems += 1
self.generateData(FIRST_HALF, firstHalfHist, firstHalfItems)
self.generateData(SECND_HALF, secondHalfHist, secondHalfItems)
def generateData(self, halfHour, histogram, count):
maxFrequency = 0
for key, value in histogram.iteritems():
self.data[halfHour].append((int(key), value, 100.0 * value/count))
if value > maxFrequency:
maxFrequency = value
self.offset[halfHour] = int(key)
def reportOffset(self, halfHour, direction):
if self.offset[halfHour] != None:
self.doc.main.add(markup.heading(3, '%s halfuur :%02d' % (ORD_LABEL[halfHour], self.offset[halfHour])))
table = self.doc.add_table('table_%d' % (2 * direction + halfHour), self.tableTitles, self.tableFormat)
table.fill_data(self.data[halfHour])
departure = self.offset[halfHour] + self.departure[direction]
if departure >= 60:
departure -= 60
self.offset[halfHour] -= 60
self.doc.add_paragraph('Vertrek uit %s: %d + %d = :%02d' %
(self.startStation[direction], self.offset[halfHour], self.departure[direction], departure))
if self.foundOffset[direction] == None or self.offset[halfHour] < self.foundOffset[direction]:
self.foundOffset[direction] = self.offset[halfHour]
def proposeChanges(self):
table = markup.HTMLTable('submit_table', ['', 'Offset', 'Afronden'])
form = markup.form('/tools/reoffset', 'post')
form.add(markup.input('hidden', 'series', self.request.get('series')))
form.add(table)
self.doc.main.add(form)
row = table.add_row()
row.add_to_cell(0,'heen')
row.add_to_cell(1, markup.input('text', 'offset_up', str(self.foundOffset[Direction.up]), size=6))
row.add_to_cell(2, markup.input('text', 'round_up', '3', size=6))
row = table.add_row()
row.add_to_cell(0,'terug')
row.add_to_cell(1, markup.input('text', 'offset_down', str(self.foundOffset[Direction.down]), size=6))
row.add_to_cell(2, markup.input('text', 'round_down', '3', size=6))
row = table.add_row()
row.add_to_cell(0, markup.input('submit', value='pas aan'))
def processPoints(self,series):
table = self.doc.add_table('adapted_schedule', ['Station', 'Heen', 'Terug'])
for point in series.points:
# Change arrival and departure times:
oldUp, oldDown = point.times_strings
point.upArrival += self.deltaOffset[Direction.up]
point.upDeparture += self.deltaOffset[Direction.up]
point.downArrival += self.deltaOffset[Direction.down]
point.downDeparture += self.deltaOffset[Direction.down]
newUp, newDown = point.times_strings
# Add point to queue for saveChanges
self.processedPoints[point.id] = point
self.processedObjects.append(point)
# Report the changes:
row = table.add_row()
row.add_to_cell(0, point.stationName)
row.add_to_cell(1, '[%s] %s [%s]' % (oldUp, change_string(self.deltaOffset[Direction.up]), newUp))
row.add_to_cell(2, '[%s] %s [%s]' % (oldDown, change_string(self.deltaOffset[Direction.down]), newDown))
def processMissions(self, missionIDs, direction, table):
if self.deltaOffset[direction]:
for missionID in missionIDs:
# Change mission offset time:
mission = TAMission.get(missionID)
oldOffset = datetime(2002, 2, 2).replace(hour=mission.offset.hour, minute=mission.offset.minute)
newOffset = round_mission_offset(oldOffset - timedelta(minutes=self.deltaOffset[direction]), self.round[direction])
mission.offset = newOffset.time()
# Add mission to queue for saveChanges
self.processedMissions[missionID] = mission
self.processedObjects.append(mission)
# Report the changes:
row = table.add_row()
row.add_to_cell(0, missionID)
row.add_to_cell(1, '%s %s %s' % (oldOffset.strftime('%H:%M'),
change_string(-self.deltaOffset[direction]),
newOffset.strftime('%H:%M')))
def saveChanges(self):
memcache.set_multi(self.processedPoints, namespace='TAScheduledPoint')
memcache.set_multi(self.processedMissions, namespace='TAMission')
db.put(self.processedObjects)
# HTML Document
class ToolsDocument(markup.HTMLDocument):
def __init__(self, title, language='en'):
markup.HTMLDocument.__init__(self, title, language)
#Stylesheet
style_element = markup.link('stylesheet', '/web/style.css')
style_element.set_attribute('type', 'css')
style_element.set_attribute('media', 'screen')
self.head.add(style_element)
#Header
self.header = markup.XMLElement('header')
self.header.add(markup.user_id())
self.header.add(markup.heading(1, title))
self.body.add(self.header)
#Paper with two columns: sidebar and main | random_line_split |
||
TAToolsHandler.py | TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Aangepaste tijden voor serie %s' % series.name)
processedObjects = []
processedPoints = {}
table = self.doc.add_table('changestable', ['Station', 'A', 'V', 'A', 'V', ''])
for index in range(len(series.points)):
point = series.points[index]
oldTimes = point.scheduled_times
upArrival = self.request.get('arr_%d_%d' % (Direction.up, index))
upDeparture = self.request.get('dep_%d_%d' % (Direction.up, index))
downArrival = self.request.get('arr_%d_%d' % (Direction.down, index))
downDeparture = self.request.get('dep_%d_%d' % (Direction.down, index))
newTimes = (minutes_from_string(upArrival),
minutes_from_string(upDeparture),
minutes_from_string(downArrival),
minutes_from_string(downDeparture))
row = table.add_row()
row.add_to_cell(0, point.stationName)
row.add_to_cell(1, upArrival)
row.add_to_cell(2, upDeparture)
row.add_to_cell(3, downArrival)
row.add_to_cell(4, downDeparture)
if oldTimes != newTimes:
point.scheduled_times = newTimes
processedPoints[point.id] = point
processedObjects.append(point)
row.add_to_cell(5, 'aangepast')
series.cache_set()
memcache.set_multi(processedPoints, namespace='TAScheduledPoint')
db.put(processedObjects)
self.response.out.write(self.doc.write())
def patternTimeTable(self, series, direction):
table = markup.HTMLTable('timetable_%d' % direction, ['Station', 'A', 'V', 'meting', '#', 'delta', 'A', 'V'])
indexes = range(len(series.points))
if direction == Direction.down: indexes.reverse()
for index in indexes:
point = series.points[index]
station = point.station
planArrival, planDeparture = point.times_in_direction(direction)
row = table.add_row()
row.add_to_cell(0, station.name)
row.add_to_cell(1, string_from_minutes(planArrival))
row.add_to_cell(2, string_from_minutes(planDeparture))
stationDict = self.results[direction].get(station.id, None)
if stationDict == None:
departure, count = ('-', '-')
delta = 0
else:
departure, count = mostCommonItem(stationDict['v'])
delta = departure - planDeparture
departure = string_from_minutes(departure)
row.add_to_cell(3, departure)
row.add_to_cell(4, count)
row.add_to_cell(5, delta)
row.add_to_cell(6, markup.input('text', 'arr_%d_%d' % (direction, index), string_from_minutes(planArrival + delta), size=4))
row.add_to_cell(7, markup.input('text', 'dep_%d_%d' % (direction, index), string_from_minutes(planDeparture + delta), size=4))
return table
def analyzeStops(self):
series_id = self.request.get('series')
query = db.Query(TAArchivedMission).filter('series_id =', series_id)
for mission in query.fetch(50):
if mission.up: direction = Direction.up
else: direction = Direction.down
for stop in mission.stopsList:
stopKey = stop.station_id
if stop.status == StopStatuses.planned:
departureHist = self.histogram(direction, stopKey, 'v')
difference = utc_from_cet(stop.departure) - correctedOffsetUTC(mission)
self.addDataToHistogram(departureHist, difference.seconds // 60)
delayHist = self.histogram(direction, stopKey, 'dv')
self.addDataToHistogram(delayHist, int(stop.delay_dep))
platformHist = self.histogram(direction, stopKey, 'p')
self.addDataToHistogram(platformHist, stop.platform)
def stopDictionary(self, direction, stopKey):
dictionary = self.results[direction].get(stopKey, None)
if dictionary == None:
dictionary = dict()
self.results[direction][stopKey] = dictionary
return dictionary
def histogram(self, direction, stopKey, dataKey):
stopDictionary = self.stopDictionary(direction, stopKey)
dictionary = stopDictionary.get(dataKey, None)
if dictionary == None:
dictionary = dict()
stopDictionary[dataKey] = dictionary
return dictionary
def addDataToHistogram(self, histogram, key):
histogram[key] = histogram.get(key, 0) + 1
class ReoffsetHandler(webapp2.RequestHandler):
tableTitles = ('tijd', 'aantal', 'perc.')
tableFormat = (':%02d', '%d', '%.1f%%')
def get(self):
series =TASeries.get(self.request.get('series'))
self.doc = ToolsDocument('Herschik offsets serie %s' % series.name)
self.writeReport(series)
self.response.out.write(self.doc.write())
def post(self):
series = TASeries.get(self.request.get('series'))
self.deltaOffset = [int(self.request.get('offset_up')), int(self.request.get('offset_down'))]
self.round = [int(self.request.get('round_up')), int(self.request.get('round_down'))]
self.processedObjects = []
self.processedMissions = {}
self.processedPoints = {}
self.doc = ToolsDocument('Aangepaste offsets serie %s' % series.name)
self.doc.main.add(markup.heading(2, 'Aangepaste patroontijden'))
self.processPoints(series)
self.doc.main.add(markup.heading(2, 'Aangepaste offsettijden'))
table = self.doc.add_table('adapted_missions', ['Missie', 'Offset'])
self.processMissions(series.all_mission_ids(Direction.up), Direction.up, table)
self.processMissions(series.all_mission_ids(Direction.down), Direction.down, table)
series.cache_set()
self.saveChanges()
# self.writeReport(series)
self.response.out.write(self.doc.write())
def writeReport(self, series):
self.departure = [series.first_point.upDeparture, series.last_point.downDeparture]
self.startStation = [series.first_point.stationName, series.last_point.stationName]
self.foundOffset = [None, None]
self.doc.main.add(markup.heading(2, 'Heenrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.up))
self.reportOffset(FIRST_HALF, Direction.up)
self.reportOffset(SECND_HALF, Direction.up)
self.doc.main.add(markup.heading(2, 'Terugrichting'))
self.analyzeOffset(series.all_mission_ids(Direction.down))
self.reportOffset(FIRST_HALF, Direction.down)
self.reportOffset(SECND_HALF, Direction.down)
if self.foundOffset[Direction.up] or self.foundOffset[Direction.down]:
self.doc.main.add(markup.heading(2, 'Aanpassen'))
self.proposeChanges()
def analyzeOffset(self, missionIDs):
self.offset = [None, None]
self.data=[[], []]
firstHalfHist = dict()
firstHalfItems = 0
secondHalfHist = dict()
secondHalfItems = 0
for missionID in missionIDs:
mission = TAMission.get(missionID)
num = mission.number
if bool(num % 2): num -= 1
key = mission.offset.minute
if bool(num % 4):
firstHalfHist[key] = firstHalfHist.get(key, 0) + 1
firstHalfItems += 1
else:
secondHalfHist[key] = secondHalfHist.get(key, 0) + 1
secondHalfItems += 1
self.generateData(FIRST_HALF, firstHalfHist, firstHalfItems)
self.generateData(SECND_HALF, secondHalfHist, secondHalfItems)
def generateData(self, halfHour, histogram, count):
maxFrequency = 0
for key, value in histogram.iteritems():
self.data[halfHour].append((int(key), value, 100.0 * value/count))
if value > maxFrequency:
maxFrequency = value
self.offset[halfHour] = int(key)
def | (self, halfHour, direction):
if self.offset[halfHour] != None:
self.doc.main.add(markup.heading(3, '%s halfuur :%02d' % (ORD_LABEL[halfHour], self.offset[halfHour])))
table = self.doc.add_table('table_%d' % (2 * direction + halfHour), self.tableTitles, self.tableFormat)
table.fill_data(self.data[halfHour])
departure = self.offset[halfHour] + self.departure[direction]
if departure >= 60:
departure -= 60
self.offset[halfHour] -= 60
self.doc.add_paragraph('Vertrek uit %s: %d + %d = :%02d' %
(self.startStation[direction], self.offset[halfHour], self.departure[ | reportOffset | identifier_name |
resultados.component.ts | import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute, ROUTER_DIRECTIVES } from '@angular/router';
import { LocalStorage, SessionStorage } from "angular2-localstorage/WebStorage";
import { Logger } from '../logger';
import { Title, SafeResourceUrl, DomSanitizationService } from '@angular/platform-browser';
import { Session } from '../classes/session';
import { Survey } from '../classes/survey';
| import 'app/js/results.js';
@Component({
selector: 'q-results',
templateUrl: 'app/templates/results.component.html',
directives: [ROUTER_DIRECTIVES]
})
export class ResultsComponent implements OnInit {
surveyObj: Survey = new Survey();
sessionObj: Session = new Session();
firebase: AngularFire;
sanitizer: DomSanitizationService;
surveyID: any;
isEmpty: boolean = false;
isLoaded: boolean = false;
proyectedUrl: SafeResourceUrl;
constructor(
private router : Router,
private route : ActivatedRoute,
private logger : Logger,
public angFire : AngularFire,
private titleService : Title,
private sanit: DomSanitizationService) {
this.firebase = angFire;
this.sanitizer = sanit;
}
public setTitle(newTitle: string) {
this.titleService.setTitle( newTitle );
}
ngOnInit() {
this.setTitle("Resultados - México Cumbre de Negocios");
this.route.params.subscribe(params => {
this.surveyID = params['id'];
this.proyectedUrl = this.sanitizer.bypassSecurityTrustResourceUrl("#/resultadosProyectados/"+this.surveyID);
this.firebase.database.object('/surveys/'+this.surveyID).subscribe(srvObj => {
this.surveyObj = srvObj;
this.getOptions();
this.firebase.database.object('/sessions/'+srvObj.sessionId).subscribe(sessObj => {
this.sessionObj = sessObj;
});
});
});
}
getOptions(){
ResultsVar.reset();
let votesObj: any[] = [];
let votemp: any[] = [];
votemp.push("Opcion");
votemp.push("Numero de votos");
ResultsVar.setVote(votemp);
var optionsArr = this.getArrayOf(this.surveyObj.options);
var counter = 0;
var load = 0;
var dataSize = optionsArr.length;
optionsArr.forEach((opt: any) => {
this.firebase.database.object('/votes/'+opt.voteId).subscribe(vote => {
let votemp: any[] = [];
votemp.push(opt.name);
var voteNum = (vote.users != false) ? vote.users.length : 0;
votemp.push(voteNum);
ResultsVar.setVote(votemp);
load++;
if(voteNum == 0) counter++;
if(counter == dataSize) this.isEmpty = true;
if(load == dataSize){
ResultsVar.init();
this.isLoaded = true;
}
});
});
}
getArrayOf(object: any) {
let newArr: any[] = [];
for (var key in object) {
object[key]["$key"] = key;
newArr.push(object[key]);
}
return newArr;
}
} | import { AngularFire, FirebaseListObservable, FirebaseObjectObservable } from 'angularfire2';
declare var ResultsVar: any; | random_line_split |
resultados.component.ts | import { Component, OnInit } from '@angular/core';
import { Router, ActivatedRoute, ROUTER_DIRECTIVES } from '@angular/router';
import { LocalStorage, SessionStorage } from "angular2-localstorage/WebStorage";
import { Logger } from '../logger';
import { Title, SafeResourceUrl, DomSanitizationService } from '@angular/platform-browser';
import { Session } from '../classes/session';
import { Survey } from '../classes/survey';
import { AngularFire, FirebaseListObservable, FirebaseObjectObservable } from 'angularfire2';
declare var ResultsVar: any;
import 'app/js/results.js';
@Component({
selector: 'q-results',
templateUrl: 'app/templates/results.component.html',
directives: [ROUTER_DIRECTIVES]
})
export class | implements OnInit {
surveyObj: Survey = new Survey();
sessionObj: Session = new Session();
firebase: AngularFire;
sanitizer: DomSanitizationService;
surveyID: any;
isEmpty: boolean = false;
isLoaded: boolean = false;
proyectedUrl: SafeResourceUrl;
constructor(
private router : Router,
private route : ActivatedRoute,
private logger : Logger,
public angFire : AngularFire,
private titleService : Title,
private sanit: DomSanitizationService) {
this.firebase = angFire;
this.sanitizer = sanit;
}
public setTitle(newTitle: string) {
this.titleService.setTitle( newTitle );
}
ngOnInit() {
this.setTitle("Resultados - México Cumbre de Negocios");
this.route.params.subscribe(params => {
this.surveyID = params['id'];
this.proyectedUrl = this.sanitizer.bypassSecurityTrustResourceUrl("#/resultadosProyectados/"+this.surveyID);
this.firebase.database.object('/surveys/'+this.surveyID).subscribe(srvObj => {
this.surveyObj = srvObj;
this.getOptions();
this.firebase.database.object('/sessions/'+srvObj.sessionId).subscribe(sessObj => {
this.sessionObj = sessObj;
});
});
});
}
getOptions(){
ResultsVar.reset();
let votesObj: any[] = [];
let votemp: any[] = [];
votemp.push("Opcion");
votemp.push("Numero de votos");
ResultsVar.setVote(votemp);
var optionsArr = this.getArrayOf(this.surveyObj.options);
var counter = 0;
var load = 0;
var dataSize = optionsArr.length;
optionsArr.forEach((opt: any) => {
this.firebase.database.object('/votes/'+opt.voteId).subscribe(vote => {
let votemp: any[] = [];
votemp.push(opt.name);
var voteNum = (vote.users != false) ? vote.users.length : 0;
votemp.push(voteNum);
ResultsVar.setVote(votemp);
load++;
if(voteNum == 0) counter++;
if(counter == dataSize) this.isEmpty = true;
if(load == dataSize){
ResultsVar.init();
this.isLoaded = true;
}
});
});
}
getArrayOf(object: any) {
let newArr: any[] = [];
for (var key in object) {
object[key]["$key"] = key;
newArr.push(object[key]);
}
return newArr;
}
} | ResultsComponent | identifier_name |
test_auto_FSL2Scheme.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import FSL2Scheme
def test_FSL2Scheme_inputs():
| ),
flipy=dict(argstr='-flipy',
),
flipz=dict(argstr='-flipz',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
interleave=dict(argstr='-interleave',
),
numscans=dict(argstr='-numscans %d',
units='NA',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
terminal_output=dict(nohash=True,
),
usegradmod=dict(argstr='-usegradmod',
),
)
inputs = FSL2Scheme.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_FSL2Scheme_outputs():
output_map = dict(scheme=dict(),
)
outputs = FSL2Scheme.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| input_map = dict(args=dict(argstr='%s',
),
bscale=dict(argstr='-bscale %d',
units='NA',
),
bval_file=dict(argstr='-bvalfile %s',
mandatory=True,
position=2,
),
bvec_file=dict(argstr='-bvecfile %s',
mandatory=True,
position=1,
),
diffusiontime=dict(argstr='-diffusiontime %f',
units='NA',
),
environ=dict(nohash=True,
usedefault=True,
),
flipx=dict(argstr='-flipx', | identifier_body |
test_auto_FSL2Scheme.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import FSL2Scheme
def | ():
input_map = dict(args=dict(argstr='%s',
),
bscale=dict(argstr='-bscale %d',
units='NA',
),
bval_file=dict(argstr='-bvalfile %s',
mandatory=True,
position=2,
),
bvec_file=dict(argstr='-bvecfile %s',
mandatory=True,
position=1,
),
diffusiontime=dict(argstr='-diffusiontime %f',
units='NA',
),
environ=dict(nohash=True,
usedefault=True,
),
flipx=dict(argstr='-flipx',
),
flipy=dict(argstr='-flipy',
),
flipz=dict(argstr='-flipz',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
interleave=dict(argstr='-interleave',
),
numscans=dict(argstr='-numscans %d',
units='NA',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
terminal_output=dict(nohash=True,
),
usegradmod=dict(argstr='-usegradmod',
),
)
inputs = FSL2Scheme.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
def test_FSL2Scheme_outputs():
output_map = dict(scheme=dict(),
)
outputs = FSL2Scheme.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| test_FSL2Scheme_inputs | identifier_name |
test_auto_FSL2Scheme.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import FSL2Scheme
def test_FSL2Scheme_inputs():
input_map = dict(args=dict(argstr='%s',
),
bscale=dict(argstr='-bscale %d',
units='NA',
),
bval_file=dict(argstr='-bvalfile %s',
mandatory=True,
position=2,
),
bvec_file=dict(argstr='-bvecfile %s',
mandatory=True,
position=1,
),
diffusiontime=dict(argstr='-diffusiontime %f',
units='NA',
),
environ=dict(nohash=True,
usedefault=True,
),
flipx=dict(argstr='-flipx',
),
flipy=dict(argstr='-flipy',
),
flipz=dict(argstr='-flipz',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
interleave=dict(argstr='-interleave',
),
numscans=dict(argstr='-numscans %d',
units='NA',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
terminal_output=dict(nohash=True,
),
usegradmod=dict(argstr='-usegradmod',
),
)
inputs = FSL2Scheme.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
|
def test_FSL2Scheme_outputs():
output_map = dict(scheme=dict(),
)
outputs = FSL2Scheme.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value
| yield assert_equal, getattr(inputs.traits()[key], metakey), value | conditional_block |
test_auto_FSL2Scheme.py | # AUTO-GENERATED by tools/checkspecs.py - DO NOT EDIT
from nipype.testing import assert_equal
from nipype.interfaces.camino.convert import FSL2Scheme
def test_FSL2Scheme_inputs():
input_map = dict(args=dict(argstr='%s',
),
bscale=dict(argstr='-bscale %d',
units='NA',
),
bval_file=dict(argstr='-bvalfile %s',
mandatory=True,
position=2,
),
bvec_file=dict(argstr='-bvecfile %s',
mandatory=True,
position=1,
),
diffusiontime=dict(argstr='-diffusiontime %f',
units='NA',
),
environ=dict(nohash=True,
usedefault=True,
),
flipx=dict(argstr='-flipx',
),
flipy=dict(argstr='-flipy',
),
flipz=dict(argstr='-flipz',
),
ignore_exception=dict(nohash=True,
usedefault=True,
),
interleave=dict(argstr='-interleave',
),
numscans=dict(argstr='-numscans %d',
units='NA',
),
out_file=dict(argstr='> %s',
genfile=True,
position=-1,
),
terminal_output=dict(nohash=True,
),
usegradmod=dict(argstr='-usegradmod',
),
)
inputs = FSL2Scheme.input_spec()
for key, metadata in input_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(inputs.traits()[key], metakey), value
| )
outputs = FSL2Scheme.output_spec()
for key, metadata in output_map.items():
for metakey, value in metadata.items():
yield assert_equal, getattr(outputs.traits()[key], metakey), value | def test_FSL2Scheme_outputs():
output_map = dict(scheme=dict(), | random_line_split |
connection.js | /**
* Module dependencies.
*/
var Connection = require('../../connection')
, mongo = require('mongodb')
, Server = mongo.Server
, ReplSetServers = mongo.ReplSetServers;
/**
* Connection for mongodb-native driver
*
* @api private
*/
function | () {
Connection.apply(this, arguments);
};
/**
* Inherits from Connection.
*/
NativeConnection.prototype.__proto__ = Connection.prototype;
/**
* Opens the connection.
*
* Example server options:
* auto_reconnect (default: false)
* poolSize (default: 1)
*
* Example db options:
* pk - custom primary key factory to generate `_id` values
*
* Some of these may break Mongoose. Use at your own risk. You have been warned.
*
* @param {Function} callback
* @api private
*/
NativeConnection.prototype.doOpen = function (fn) {
var server;
if (!this.db) {
server = new mongo.Server(this.host, Number(this.port), this.options.server);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Opens a set connection
*
* See description of doOpen for server options. In this case options.replset
* is also passed to ReplSetServers. Some additional options there are
*
* reconnectWait (default: 1000)
* retries (default: 30)
* rs_name (default: false)
* read_secondary (default: false) Are reads allowed from secondaries?
*
* @param {Function} fn
* @api private
*/
NativeConnection.prototype.doOpenSet = function (fn) {
if (!this.db) {
var servers = []
, ports = this.port
, self = this
this.host.forEach(function (host, i) {
servers.push(new mongo.Server(host, Number(ports[i]), self.options.server));
});
var server = new ReplSetServers(servers, this.options.replset);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Closes the connection
*
* @param {Function} callback
* @api private
*/
NativeConnection.prototype.doClose = function (fn) {
this.db.close();
if (fn) fn();
return this;
}
/**
* Module exports.
*/
module.exports = NativeConnection;
| NativeConnection | identifier_name |
connection.js | /**
* Module dependencies.
*/
var Connection = require('../../connection')
, mongo = require('mongodb')
, Server = mongo.Server
, ReplSetServers = mongo.ReplSetServers;
/**
* Connection for mongodb-native driver
*
* @api private
*/
function NativeConnection() | ;
/**
* Inherits from Connection.
*/
NativeConnection.prototype.__proto__ = Connection.prototype;
/**
* Opens the connection.
*
* Example server options:
* auto_reconnect (default: false)
* poolSize (default: 1)
*
* Example db options:
* pk - custom primary key factory to generate `_id` values
*
* Some of these may break Mongoose. Use at your own risk. You have been warned.
*
* @param {Function} callback
* @api private
*/
NativeConnection.prototype.doOpen = function (fn) {
var server;
if (!this.db) {
server = new mongo.Server(this.host, Number(this.port), this.options.server);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Opens a set connection
*
* See description of doOpen for server options. In this case options.replset
* is also passed to ReplSetServers. Some additional options there are
*
* reconnectWait (default: 1000)
* retries (default: 30)
* rs_name (default: false)
* read_secondary (default: false) Are reads allowed from secondaries?
*
* @param {Function} fn
* @api private
*/
NativeConnection.prototype.doOpenSet = function (fn) {
if (!this.db) {
var servers = []
, ports = this.port
, self = this
this.host.forEach(function (host, i) {
servers.push(new mongo.Server(host, Number(ports[i]), self.options.server));
});
var server = new ReplSetServers(servers, this.options.replset);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Closes the connection
*
* @param {Function} callback
* @api private
*/
NativeConnection.prototype.doClose = function (fn) {
this.db.close();
if (fn) fn();
return this;
}
/**
* Module exports.
*/
module.exports = NativeConnection;
| {
Connection.apply(this, arguments);
} | identifier_body |
connection.js | /**
* Module dependencies.
*/
var Connection = require('../../connection')
, mongo = require('mongodb')
, Server = mongo.Server
, ReplSetServers = mongo.ReplSetServers;
/**
* Connection for mongodb-native driver
*
* @api private
*/
function NativeConnection() {
Connection.apply(this, arguments);
};
/**
* Inherits from Connection.
*/
NativeConnection.prototype.__proto__ = Connection.prototype;
/**
* Opens the connection.
*
* Example server options:
* auto_reconnect (default: false)
* poolSize (default: 1)
*
* Example db options:
* pk - custom primary key factory to generate `_id` values
*
* Some of these may break Mongoose. Use at your own risk. You have been warned.
*
* @param {Function} callback
* @api private | if (!this.db) {
server = new mongo.Server(this.host, Number(this.port), this.options.server);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Opens a set connection
*
* See description of doOpen for server options. In this case options.replset
* is also passed to ReplSetServers. Some additional options there are
*
* reconnectWait (default: 1000)
* retries (default: 30)
* rs_name (default: false)
* read_secondary (default: false) Are reads allowed from secondaries?
*
* @param {Function} fn
* @api private
*/
NativeConnection.prototype.doOpenSet = function (fn) {
if (!this.db) {
var servers = []
, ports = this.port
, self = this
this.host.forEach(function (host, i) {
servers.push(new mongo.Server(host, Number(ports[i]), self.options.server));
});
var server = new ReplSetServers(servers, this.options.replset);
this.db = new mongo.Db(this.name, server, this.options.db);
}
this.db.open(fn);
return this;
};
/**
* Closes the connection
*
* @param {Function} callback
* @api private
*/
NativeConnection.prototype.doClose = function (fn) {
this.db.close();
if (fn) fn();
return this;
}
/**
* Module exports.
*/
module.exports = NativeConnection; | */
NativeConnection.prototype.doOpen = function (fn) {
var server;
| random_line_split |
defaults-nb_NO.min.js | /*!
* Bootstrap-select v1.12.2 (http://silviomoreto.github.io/bootstrap-select) | *
* Copyright 2013-2017 bootstrap-select
* Licensed under MIT (https://github.com/silviomoreto/bootstrap-select/blob/master/LICENSE)
*/
!function(a,b){"function"==typeof define&&define.amd?define(["jquery"],function(a){return b(a)}):"object"==typeof module&&module.exports?module.exports=b(require("jquery")):b(a.jQuery)}(this,function(a){!function(a){a.fn.selectpicker.defaults={noneSelectedText:"Ingen valgt",noneResultsText:"Søket gir ingen treff {0}",countSelectedText:function(a,b){return 1==a?"{0} alternativ valgt":"{0} alternativer valgt"},maxOptionsText:function(a,b){return["Grense nådd (maks {n} valg)","Grense for grupper nådd (maks {n} grupper)"]},selectAllText:"Merk alle",deselectAllText:"Fjern alle",multipleSeparator:", "}}(a)}); | random_line_split |
|
aquifer.py | - 1
self.Haq = np.atleast_1d(Haq).astype('d')
self.Hll = np.atleast_1d(Hll).astype('d')
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
self.c = np.atleast_1d(c).astype('d')
self.c[self.c > 1e100] = 1e100
self.Saq = np.atleast_1d(Saq).astype('d')
self.Sll = np.atleast_1d(Sll).astype('d')
self.Sll[self.Sll < 1e-20] = 1e-20 # Cannot be zero
self.poraq = np.atleast_1d(poraq).astype('d')
self.porll = np.atleast_1d(porll).astype('d')
self.ltype = np.atleast_1d(ltype)
self.zaqtop = self.z[:-1][self.ltype == 'a']
self.zaqbot = self.z[1:][self.ltype == 'a']
self.layernumber = np.zeros(self.nlayers, dtype='int')
self.layernumber[self.ltype == 'a'] = np.arange(self.naq)
self.layernumber[self.ltype == 'l'] = np.arange(self.nlayers - self.naq)
if self.ltype[0] == 'a':
self.layernumber[self.ltype == 'l'] += 1 # first leaky layer below first aquifer layer
self.topboundary = topboundary[:3]
self.phreatictop = phreatictop
self.kzoverkh = kzoverkh
if self.kzoverkh is not None:
self.kzoverkh = np.atleast_1d(self.kzoverkh).astype('d')
if len(self.kzoverkh) == 1:
self.kzoverkh = self.kzoverkh * np.ones(self.naq)
self.model3d = model3d
if self.model3d:
assert self.kzoverkh is not None, \
"model3d specified without kzoverkh"
#self.D = self.T / self.Saq
self.area = 1e200 # Smaller than default of ml.aq so that inhom is found
def __repr__(self):
return 'Inhom T: ' + str(self.T)
def initialize(self):
'''
eigval[naq, npval]: Array with eigenvalues
lab[naq, npval]: Array with lambda values
lab2[naq, nint, npint]: Array with lambda values reorganized per
interval
eigvec[naq, naq, npval]: Array with eigenvector matrices
coef[naq ,naq, npval]: Array with coefficients;
coef[ilayers, :, np] are the coefficients if the element is in
ilayers belonging to Laplace parameter number np
'''
# Recompute T for when kaq is changed
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
# Compute Saq and Sll
self.Scoefaq = self.Saq * self.Haq
self.Scoefll = self.Sll * self.Hll
if (self.topboundary == 'con') and self.phreatictop:
self.Scoefaq[0] = self.Scoefaq[0] / self.Haq[0]
elif (self.topboundary == 'lea') and self.phreatictop:
self.Scoefll[0] = self.Scoefll[0] / self.Hll[0]
self.D = self.T / self.Scoefaq
# Compute c if model3d for when kaq is changed
if self.model3d:
self.c[1:] = \
0.5 * self.Haq[:-1] / (self.kzoverkh[:-1] * self.kaq[:-1]) + \
0.5 * self.Haq[1:] / (self.kzoverkh[1:] * self.kaq[1:])
#
self.eigval = np.zeros((self.naq, self.model.npval), 'D')
self.lab = np.zeros((self.naq, self.model.npval), 'D')
self.eigvec = np.zeros((self.naq, self.naq, self.model.npval), 'D')
self.coef = np.zeros((self.naq, self.naq, self.model.npval), 'D')
b = np.diag(np.ones(self.naq))
for i in range(self.model.npval):
w, v = self.compute_lab_eigvec(self.model.p[i])
# Eigenvectors are columns of v
self.eigval[:, i] = w; self.eigvec[:, :, i] = v
self.coef[:, :, i] = np.linalg.solve(v, b).T
self.lab = 1.0 / np.sqrt(self.eigval)
self.lab2 = self.lab.copy()
self.lab2.shape = (self.naq, self.model.nint, self.model.npint)
self.lababs = np.abs(self.lab2[:, :, 0]) # used to check distances
self.eigvec2 = self.eigvec.copy()
self.eigvec2.shape = (self.naq, self.naq,
self.model.nint, self.model.npint)
def compute_lab_eigvec(self, p, returnA = False, B = None):
sqrtpSc = np.sqrt( p * self.Scoefll * self.c )
a, b = np.zeros_like(sqrtpSc), np.zeros_like(sqrtpSc)
small = np.abs(sqrtpSc) < 200
a[small] = sqrtpSc[small] / np.tanh(sqrtpSc[small])
b[small] = sqrtpSc[small] / np.sinh(sqrtpSc[small])
a[~small] = sqrtpSc[~small] / ((1.0 - np.exp(-2.0*sqrtpSc[~small])) /
(1.0 + np.exp(-2.0*sqrtpSc[~small])))
b[~small] = sqrtpSc[~small] * 2.0 * np.exp(-sqrtpSc[~small]) / \
(1.0 - np.exp(-2.0*sqrtpSc[~small]))
if (self.topboundary[:3] == 'sem') or (self.topboundary[:3] == 'lea'):
dzero = sqrtpSc[0] * np.tanh(sqrtpSc[0])
d0 = p / self.D
if B is not None:
d0 = d0 * B # B is vector of load efficiency paramters
d0[:-1] += a[1:] / (self.c[1:] * self.T[:-1])
d0[1:] += a[1:] / (self.c[1:] * self.T[1:])
if self.topboundary[:3] == 'lea':
d0[0] += dzero / ( self.c[0] * self.T[0] )
elif self.topboundary[:3] == 'sem':
d0[0] += a[0] / ( self.c[0] * self.T[0] )
dm1 = -b[1:] / (self.c[1:] * self.T[:-1])
dp1 = -b[1:] / (self.c[1:] * self.T[1:])
A = np.diag(dm1,-1) + np.diag(d0,0) + np.diag(dp1,1)
if returnA: return A
w, v = np.linalg.eig(A)
# sorting moved here
index = np.argsort(abs(w))[::-1]
w = w[index]
v = v[:, index]
return w, v
def head_to_potential(self, h, layers):
return h * self.Tcol[layers]
def potential_to_head(self, pot, layers):
return pot / self.Tcol[layers]
def isInside(self,x,y):
print('Must overload AquiferData.isInside method')
return True
def inWhichLayer(self, z):
|
def findlayer(self, z):
'''
Returns layer-number, layer-type and model-layer-number'''
if z > | '''Returns -9999 if above top of system,
+9999 if below bottom of system,
negative for in leaky layer.
leaky layer -n is on top of aquifer n'''
if z > self.zt[0]:
return -9999
for i in range(self.naq-1):
if z >= self.zb[i]:
return i
if z > self.zt[i+1]:
return -i-1
if z >= self.zb[self.naq-1]:
return self.naq - 1
return +9999 | identifier_body |
aquifer.py | - 1
self.Haq = np.atleast_1d(Haq).astype('d')
self.Hll = np.atleast_1d(Hll).astype('d')
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
self.c = np.atleast_1d(c).astype('d')
self.c[self.c > 1e100] = 1e100
self.Saq = np.atleast_1d(Saq).astype('d')
self.Sll = np.atleast_1d(Sll).astype('d')
self.Sll[self.Sll < 1e-20] = 1e-20 # Cannot be zero
self.poraq = np.atleast_1d(poraq).astype('d')
self.porll = np.atleast_1d(porll).astype('d')
self.ltype = np.atleast_1d(ltype)
self.zaqtop = self.z[:-1][self.ltype == 'a']
self.zaqbot = self.z[1:][self.ltype == 'a']
self.layernumber = np.zeros(self.nlayers, dtype='int')
self.layernumber[self.ltype == 'a'] = np.arange(self.naq)
self.layernumber[self.ltype == 'l'] = np.arange(self.nlayers - self.naq)
if self.ltype[0] == 'a':
self.layernumber[self.ltype == 'l'] += 1 # first leaky layer below first aquifer layer
self.topboundary = topboundary[:3]
self.phreatictop = phreatictop
self.kzoverkh = kzoverkh
if self.kzoverkh is not None:
self.kzoverkh = np.atleast_1d(self.kzoverkh).astype('d')
if len(self.kzoverkh) == 1:
self.kzoverkh = self.kzoverkh * np.ones(self.naq)
self.model3d = model3d
if self.model3d:
assert self.kzoverkh is not None, \
"model3d specified without kzoverkh"
#self.D = self.T / self.Saq
self.area = 1e200 # Smaller than default of ml.aq so that inhom is found
| '''
eigval[naq, npval]: Array with eigenvalues
lab[naq, npval]: Array with lambda values
lab2[naq, nint, npint]: Array with lambda values reorganized per
interval
eigvec[naq, naq, npval]: Array with eigenvector matrices
coef[naq ,naq, npval]: Array with coefficients;
coef[ilayers, :, np] are the coefficients if the element is in
ilayers belonging to Laplace parameter number np
'''
# Recompute T for when kaq is changed
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
# Compute Saq and Sll
self.Scoefaq = self.Saq * self.Haq
self.Scoefll = self.Sll * self.Hll
if (self.topboundary == 'con') and self.phreatictop:
self.Scoefaq[0] = self.Scoefaq[0] / self.Haq[0]
elif (self.topboundary == 'lea') and self.phreatictop:
self.Scoefll[0] = self.Scoefll[0] / self.Hll[0]
self.D = self.T / self.Scoefaq
# Compute c if model3d for when kaq is changed
if self.model3d:
self.c[1:] = \
0.5 * self.Haq[:-1] / (self.kzoverkh[:-1] * self.kaq[:-1]) + \
0.5 * self.Haq[1:] / (self.kzoverkh[1:] * self.kaq[1:])
#
self.eigval = np.zeros((self.naq, self.model.npval), 'D')
self.lab = np.zeros((self.naq, self.model.npval), 'D')
self.eigvec = np.zeros((self.naq, self.naq, self.model.npval), 'D')
self.coef = np.zeros((self.naq, self.naq, self.model.npval), 'D')
b = np.diag(np.ones(self.naq))
for i in range(self.model.npval):
w, v = self.compute_lab_eigvec(self.model.p[i])
# Eigenvectors are columns of v
self.eigval[:, i] = w; self.eigvec[:, :, i] = v
self.coef[:, :, i] = np.linalg.solve(v, b).T
self.lab = 1.0 / np.sqrt(self.eigval)
self.lab2 = self.lab.copy()
self.lab2.shape = (self.naq, self.model.nint, self.model.npint)
self.lababs = np.abs(self.lab2[:, :, 0]) # used to check distances
self.eigvec2 = self.eigvec.copy()
self.eigvec2.shape = (self.naq, self.naq,
self.model.nint, self.model.npint)
def compute_lab_eigvec(self, p, returnA = False, B = None):
sqrtpSc = np.sqrt( p * self.Scoefll * self.c )
a, b = np.zeros_like(sqrtpSc), np.zeros_like(sqrtpSc)
small = np.abs(sqrtpSc) < 200
a[small] = sqrtpSc[small] / np.tanh(sqrtpSc[small])
b[small] = sqrtpSc[small] / np.sinh(sqrtpSc[small])
a[~small] = sqrtpSc[~small] / ((1.0 - np.exp(-2.0*sqrtpSc[~small])) /
(1.0 + np.exp(-2.0*sqrtpSc[~small])))
b[~small] = sqrtpSc[~small] * 2.0 * np.exp(-sqrtpSc[~small]) / \
(1.0 - np.exp(-2.0*sqrtpSc[~small]))
if (self.topboundary[:3] == 'sem') or (self.topboundary[:3] == 'lea'):
dzero = sqrtpSc[0] * np.tanh(sqrtpSc[0])
d0 = p / self.D
if B is not None:
d0 = d0 * B # B is vector of load efficiency paramters
d0[:-1] += a[1:] / (self.c[1:] * self.T[:-1])
d0[1:] += a[1:] / (self.c[1:] * self.T[1:])
if self.topboundary[:3] == 'lea':
d0[0] += dzero / ( self.c[0] * self.T[0] )
elif self.topboundary[:3] == 'sem':
d0[0] += a[0] / ( self.c[0] * self.T[0] )
dm1 = -b[1:] / (self.c[1:] * self.T[:-1])
dp1 = -b[1:] / (self.c[1:] * self.T[1:])
A = np.diag(dm1,-1) + np.diag(d0,0) + np.diag(dp1,1)
if returnA: return A
w, v = np.linalg.eig(A)
# sorting moved here
index = np.argsort(abs(w))[::-1]
w = w[index]
v = v[:, index]
return w, v
def head_to_potential(self, h, layers):
return h * self.Tcol[layers]
def potential_to_head(self, pot, layers):
return pot / self.Tcol[layers]
def isInside(self,x,y):
print('Must overload AquiferData.isInside method')
return True
def inWhichLayer(self, z):
'''Returns -9999 if above top of system,
+9999 if below bottom of system,
negative for in leaky layer.
leaky layer -n is on top of aquifer n'''
if z > self.zt[0]:
return -9999
for i in range(self.naq-1):
if z >= self.zb[i]:
return i
if z > self.zt[i+1]:
return -i-1
if z >= self.zb[self.naq-1]:
return self.naq - 1
return +9999
def findlayer(self, z):
'''
Returns layer-number, layer-type and model-layer-number'''
if z | def __repr__(self):
return 'Inhom T: ' + str(self.T)
def initialize(self): | random_line_split |
aquifer.py | - 1
self.Haq = np.atleast_1d(Haq).astype('d')
self.Hll = np.atleast_1d(Hll).astype('d')
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
self.c = np.atleast_1d(c).astype('d')
self.c[self.c > 1e100] = 1e100
self.Saq = np.atleast_1d(Saq).astype('d')
self.Sll = np.atleast_1d(Sll).astype('d')
self.Sll[self.Sll < 1e-20] = 1e-20 # Cannot be zero
self.poraq = np.atleast_1d(poraq).astype('d')
self.porll = np.atleast_1d(porll).astype('d')
self.ltype = np.atleast_1d(ltype)
self.zaqtop = self.z[:-1][self.ltype == 'a']
self.zaqbot = self.z[1:][self.ltype == 'a']
self.layernumber = np.zeros(self.nlayers, dtype='int')
self.layernumber[self.ltype == 'a'] = np.arange(self.naq)
self.layernumber[self.ltype == 'l'] = np.arange(self.nlayers - self.naq)
if self.ltype[0] == 'a':
self.layernumber[self.ltype == 'l'] += 1 # first leaky layer below first aquifer layer
self.topboundary = topboundary[:3]
self.phreatictop = phreatictop
self.kzoverkh = kzoverkh
if self.kzoverkh is not None:
self.kzoverkh = np.atleast_1d(self.kzoverkh).astype('d')
if len(self.kzoverkh) == 1:
self.kzoverkh = self.kzoverkh * np.ones(self.naq)
self.model3d = model3d
if self.model3d:
assert self.kzoverkh is not None, \
"model3d specified without kzoverkh"
#self.D = self.T / self.Saq
self.area = 1e200 # Smaller than default of ml.aq so that inhom is found
def __repr__(self):
return 'Inhom T: ' + str(self.T)
def initialize(self):
'''
eigval[naq, npval]: Array with eigenvalues
lab[naq, npval]: Array with lambda values
lab2[naq, nint, npint]: Array with lambda values reorganized per
interval
eigvec[naq, naq, npval]: Array with eigenvector matrices
coef[naq ,naq, npval]: Array with coefficients;
coef[ilayers, :, np] are the coefficients if the element is in
ilayers belonging to Laplace parameter number np
'''
# Recompute T for when kaq is changed
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
# Compute Saq and Sll
self.Scoefaq = self.Saq * self.Haq
self.Scoefll = self.Sll * self.Hll
if (self.topboundary == 'con') and self.phreatictop:
self.Scoefaq[0] = self.Scoefaq[0] / self.Haq[0]
elif (self.topboundary == 'lea') and self.phreatictop:
self.Scoefll[0] = self.Scoefll[0] / self.Hll[0]
self.D = self.T / self.Scoefaq
# Compute c if model3d for when kaq is changed
if self.model3d:
|
#
self.eigval = np.zeros((self.naq, self.model.npval), 'D')
self.lab = np.zeros((self.naq, self.model.npval), 'D')
self.eigvec = np.zeros((self.naq, self.naq, self.model.npval), 'D')
self.coef = np.zeros((self.naq, self.naq, self.model.npval), 'D')
b = np.diag(np.ones(self.naq))
for i in range(self.model.npval):
w, v = self.compute_lab_eigvec(self.model.p[i])
# Eigenvectors are columns of v
self.eigval[:, i] = w; self.eigvec[:, :, i] = v
self.coef[:, :, i] = np.linalg.solve(v, b).T
self.lab = 1.0 / np.sqrt(self.eigval)
self.lab2 = self.lab.copy()
self.lab2.shape = (self.naq, self.model.nint, self.model.npint)
self.lababs = np.abs(self.lab2[:, :, 0]) # used to check distances
self.eigvec2 = self.eigvec.copy()
self.eigvec2.shape = (self.naq, self.naq,
self.model.nint, self.model.npint)
def compute_lab_eigvec(self, p, returnA = False, B = None):
sqrtpSc = np.sqrt( p * self.Scoefll * self.c )
a, b = np.zeros_like(sqrtpSc), np.zeros_like(sqrtpSc)
small = np.abs(sqrtpSc) < 200
a[small] = sqrtpSc[small] / np.tanh(sqrtpSc[small])
b[small] = sqrtpSc[small] / np.sinh(sqrtpSc[small])
a[~small] = sqrtpSc[~small] / ((1.0 - np.exp(-2.0*sqrtpSc[~small])) /
(1.0 + np.exp(-2.0*sqrtpSc[~small])))
b[~small] = sqrtpSc[~small] * 2.0 * np.exp(-sqrtpSc[~small]) / \
(1.0 - np.exp(-2.0*sqrtpSc[~small]))
if (self.topboundary[:3] == 'sem') or (self.topboundary[:3] == 'lea'):
dzero = sqrtpSc[0] * np.tanh(sqrtpSc[0])
d0 = p / self.D
if B is not None:
d0 = d0 * B # B is vector of load efficiency paramters
d0[:-1] += a[1:] / (self.c[1:] * self.T[:-1])
d0[1:] += a[1:] / (self.c[1:] * self.T[1:])
if self.topboundary[:3] == 'lea':
d0[0] += dzero / ( self.c[0] * self.T[0] )
elif self.topboundary[:3] == 'sem':
d0[0] += a[0] / ( self.c[0] * self.T[0] )
dm1 = -b[1:] / (self.c[1:] * self.T[:-1])
dp1 = -b[1:] / (self.c[1:] * self.T[1:])
A = np.diag(dm1,-1) + np.diag(d0,0) + np.diag(dp1,1)
if returnA: return A
w, v = np.linalg.eig(A)
# sorting moved here
index = np.argsort(abs(w))[::-1]
w = w[index]
v = v[:, index]
return w, v
def head_to_potential(self, h, layers):
return h * self.Tcol[layers]
def potential_to_head(self, pot, layers):
return pot / self.Tcol[layers]
def isInside(self,x,y):
print('Must overload AquiferData.isInside method')
return True
def inWhichLayer(self, z):
'''Returns -9999 if above top of system,
+9999 if below bottom of system,
negative for in leaky layer.
leaky layer -n is on top of aquifer n'''
if z > self.zt[0]:
return -9999
for i in range(self.naq-1):
if z >= self.zb[i]:
return i
if z > self.zt[i+1]:
return -i-1
if z >= self.zb[self.naq-1]:
return self.naq - 1
return +9999
def findlayer(self, z):
'''
Returns layer-number, layer-type and model-layer-number'''
if z | self.c[1:] = \
0.5 * self.Haq[:-1] / (self.kzoverkh[:-1] * self.kaq[:-1]) + \
0.5 * self.Haq[1:] / (self.kzoverkh[1:] * self.kaq[1:]) | conditional_block |
aquifer.py | - 1
self.Haq = np.atleast_1d(Haq).astype('d')
self.Hll = np.atleast_1d(Hll).astype('d')
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
self.c = np.atleast_1d(c).astype('d')
self.c[self.c > 1e100] = 1e100
self.Saq = np.atleast_1d(Saq).astype('d')
self.Sll = np.atleast_1d(Sll).astype('d')
self.Sll[self.Sll < 1e-20] = 1e-20 # Cannot be zero
self.poraq = np.atleast_1d(poraq).astype('d')
self.porll = np.atleast_1d(porll).astype('d')
self.ltype = np.atleast_1d(ltype)
self.zaqtop = self.z[:-1][self.ltype == 'a']
self.zaqbot = self.z[1:][self.ltype == 'a']
self.layernumber = np.zeros(self.nlayers, dtype='int')
self.layernumber[self.ltype == 'a'] = np.arange(self.naq)
self.layernumber[self.ltype == 'l'] = np.arange(self.nlayers - self.naq)
if self.ltype[0] == 'a':
self.layernumber[self.ltype == 'l'] += 1 # first leaky layer below first aquifer layer
self.topboundary = topboundary[:3]
self.phreatictop = phreatictop
self.kzoverkh = kzoverkh
if self.kzoverkh is not None:
self.kzoverkh = np.atleast_1d(self.kzoverkh).astype('d')
if len(self.kzoverkh) == 1:
self.kzoverkh = self.kzoverkh * np.ones(self.naq)
self.model3d = model3d
if self.model3d:
assert self.kzoverkh is not None, \
"model3d specified without kzoverkh"
#self.D = self.T / self.Saq
self.area = 1e200 # Smaller than default of ml.aq so that inhom is found
def | (self):
return 'Inhom T: ' + str(self.T)
def initialize(self):
'''
eigval[naq, npval]: Array with eigenvalues
lab[naq, npval]: Array with lambda values
lab2[naq, nint, npint]: Array with lambda values reorganized per
interval
eigvec[naq, naq, npval]: Array with eigenvector matrices
coef[naq ,naq, npval]: Array with coefficients;
coef[ilayers, :, np] are the coefficients if the element is in
ilayers belonging to Laplace parameter number np
'''
# Recompute T for when kaq is changed
self.T = self.kaq * self.Haq
self.Tcol = self.T.reshape(self.naq, 1)
# Compute Saq and Sll
self.Scoefaq = self.Saq * self.Haq
self.Scoefll = self.Sll * self.Hll
if (self.topboundary == 'con') and self.phreatictop:
self.Scoefaq[0] = self.Scoefaq[0] / self.Haq[0]
elif (self.topboundary == 'lea') and self.phreatictop:
self.Scoefll[0] = self.Scoefll[0] / self.Hll[0]
self.D = self.T / self.Scoefaq
# Compute c if model3d for when kaq is changed
if self.model3d:
self.c[1:] = \
0.5 * self.Haq[:-1] / (self.kzoverkh[:-1] * self.kaq[:-1]) + \
0.5 * self.Haq[1:] / (self.kzoverkh[1:] * self.kaq[1:])
#
self.eigval = np.zeros((self.naq, self.model.npval), 'D')
self.lab = np.zeros((self.naq, self.model.npval), 'D')
self.eigvec = np.zeros((self.naq, self.naq, self.model.npval), 'D')
self.coef = np.zeros((self.naq, self.naq, self.model.npval), 'D')
b = np.diag(np.ones(self.naq))
for i in range(self.model.npval):
w, v = self.compute_lab_eigvec(self.model.p[i])
# Eigenvectors are columns of v
self.eigval[:, i] = w; self.eigvec[:, :, i] = v
self.coef[:, :, i] = np.linalg.solve(v, b).T
self.lab = 1.0 / np.sqrt(self.eigval)
self.lab2 = self.lab.copy()
self.lab2.shape = (self.naq, self.model.nint, self.model.npint)
self.lababs = np.abs(self.lab2[:, :, 0]) # used to check distances
self.eigvec2 = self.eigvec.copy()
self.eigvec2.shape = (self.naq, self.naq,
self.model.nint, self.model.npint)
def compute_lab_eigvec(self, p, returnA = False, B = None):
sqrtpSc = np.sqrt( p * self.Scoefll * self.c )
a, b = np.zeros_like(sqrtpSc), np.zeros_like(sqrtpSc)
small = np.abs(sqrtpSc) < 200
a[small] = sqrtpSc[small] / np.tanh(sqrtpSc[small])
b[small] = sqrtpSc[small] / np.sinh(sqrtpSc[small])
a[~small] = sqrtpSc[~small] / ((1.0 - np.exp(-2.0*sqrtpSc[~small])) /
(1.0 + np.exp(-2.0*sqrtpSc[~small])))
b[~small] = sqrtpSc[~small] * 2.0 * np.exp(-sqrtpSc[~small]) / \
(1.0 - np.exp(-2.0*sqrtpSc[~small]))
if (self.topboundary[:3] == 'sem') or (self.topboundary[:3] == 'lea'):
dzero = sqrtpSc[0] * np.tanh(sqrtpSc[0])
d0 = p / self.D
if B is not None:
d0 = d0 * B # B is vector of load efficiency paramters
d0[:-1] += a[1:] / (self.c[1:] * self.T[:-1])
d0[1:] += a[1:] / (self.c[1:] * self.T[1:])
if self.topboundary[:3] == 'lea':
d0[0] += dzero / ( self.c[0] * self.T[0] )
elif self.topboundary[:3] == 'sem':
d0[0] += a[0] / ( self.c[0] * self.T[0] )
dm1 = -b[1:] / (self.c[1:] * self.T[:-1])
dp1 = -b[1:] / (self.c[1:] * self.T[1:])
A = np.diag(dm1,-1) + np.diag(d0,0) + np.diag(dp1,1)
if returnA: return A
w, v = np.linalg.eig(A)
# sorting moved here
index = np.argsort(abs(w))[::-1]
w = w[index]
v = v[:, index]
return w, v
def head_to_potential(self, h, layers):
return h * self.Tcol[layers]
def potential_to_head(self, pot, layers):
return pot / self.Tcol[layers]
def isInside(self,x,y):
print('Must overload AquiferData.isInside method')
return True
def inWhichLayer(self, z):
'''Returns -9999 if above top of system,
+9999 if below bottom of system,
negative for in leaky layer.
leaky layer -n is on top of aquifer n'''
if z > self.zt[0]:
return -9999
for i in range(self.naq-1):
if z >= self.zb[i]:
return i
if z > self.zt[i+1]:
return -i-1
if z >= self.zb[self.naq-1]:
return self.naq - 1
return +9999
def findlayer(self, z):
'''
Returns layer-number, layer-type and model-layer-number'''
if z | __repr__ | identifier_name |
tokens.ts | export type AllTokens = Bracket | Literal | Operator | WhiteSpace | ColorValue | NumberValue | StringValue | Field;
export const enum OperatorType {
Sibling = '+', | ValueDelimiter = '-',
PropertyDelimiter = ':'
}
export interface Token {
type: string;
/** Location of token start in source */
start?: number;
/** Location of token end in source */
end?: number;
}
export interface Operator extends Token {
type: 'Operator';
operator: OperatorType;
}
export interface Bracket extends Token {
type: 'Bracket';
open: boolean;
}
export interface Literal extends Token {
type: 'Literal';
value: string;
}
export interface NumberValue extends Token {
type: 'NumberValue';
value: number;
unit: string;
rawValue: string;
}
export interface ColorValue extends Token {
type: 'ColorValue';
r: number;
g: number;
b: number;
a: number;
raw: string;
}
export interface StringValue extends Token {
type: 'StringValue';
value: string;
quote: 'single' | 'double';
}
export interface WhiteSpace extends Token {
type: 'WhiteSpace';
}
export interface Field extends Token {
type: 'Field';
index?: number;
name: string;
} | Important = '!',
ArgumentDelimiter = ',', | random_line_split |
file-manager.service.ts | import {Injectable} from '@angular/core';
import {HttpClient, HttpHeaders, HttpParams} from '@angular/common/http';
import {Observable, of} from 'rxjs';
import {catchError, map, tap} from 'rxjs/operators';
import {FileModel} from '../models/file.model';
@Injectable()
export class FileManagerService {
headers = new HttpHeaders({
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
requestUrl = '';
constructor(
public http: HttpClient
) {
this.requestUrl = '/admin/file_manager';
}
getRequestUrl() {
return this.requestUrl;
}
getList(options ?: any): Observable<FileModel[]> {
let params = new HttpParams();
for (const name in options) {
if (!options.hasOwnProperty(name)
|| typeof options[name] === 'undefined') {
continue;
}
params = params.append(name, options[name]);
}
return this.http.get<FileModel[]>(this.getRequestUrl(), {params: params, headers: this.headers})
.pipe(
catchError(this.handleError<any>())
);
}
createFolder(path: string, folderName: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder`;
return this.http.post<any>(url, {path: path, folderName: folderName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFolder(path: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder_delete`;
return this.http.post<any>(url, {path: path}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFile(path: string, file: FileModel): Observable<any> {
const url = `${this.getRequestUrl()}/file_delete`;
return this.http.post<any>(url, {path: path, name: file.fileName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
rename(path: string, name: string, target = 'folder'): Observable<any> {
const url = `${this.getRequestUrl()}/${target}`;
return this.http.put<any>(url, {path: path, name: name}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
getFormData(item: any): FormData {
const formData: FormData = new FormData();
Object.keys(item).forEach((key) => {
if (item[key] instanceof File) {
formData.append(key, item[key], item[key].name);
} else if (typeof item[key] !== 'undefined') {
if (typeof item[key] === 'boolean') {
formData.append(key, item[key] ? '1' : '0');
} else |
}
});
return formData;
}
postFormData(formData: FormData, path: string): Observable<any> {
const url = `${this.getRequestUrl()}/upload`;
const headers = new HttpHeaders({
'enctype': 'multipart/form-data',
'Accept': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
formData.append('path', path);
return this.http
.post(url, formData, {headers: headers})
.pipe(
catchError(this.handleError<any>())
);
}
handleError<T> (operation = 'operation', result?: T) {
return (err: any): Observable<T> => {
if (err.error) {
throw err.error;
}
return of(result as T);
};
}
}
| {
formData.append(key, item[key] || '');
} | conditional_block |
file-manager.service.ts | import {Injectable} from '@angular/core';
import {HttpClient, HttpHeaders, HttpParams} from '@angular/common/http';
import {Observable, of} from 'rxjs';
import {catchError, map, tap} from 'rxjs/operators';
import {FileModel} from '../models/file.model';
| 'X-Requested-With': 'XMLHttpRequest'
});
requestUrl = '';
constructor(
public http: HttpClient
) {
this.requestUrl = '/admin/file_manager';
}
getRequestUrl() {
return this.requestUrl;
}
getList(options ?: any): Observable<FileModel[]> {
let params = new HttpParams();
for (const name in options) {
if (!options.hasOwnProperty(name)
|| typeof options[name] === 'undefined') {
continue;
}
params = params.append(name, options[name]);
}
return this.http.get<FileModel[]>(this.getRequestUrl(), {params: params, headers: this.headers})
.pipe(
catchError(this.handleError<any>())
);
}
createFolder(path: string, folderName: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder`;
return this.http.post<any>(url, {path: path, folderName: folderName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFolder(path: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder_delete`;
return this.http.post<any>(url, {path: path}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFile(path: string, file: FileModel): Observable<any> {
const url = `${this.getRequestUrl()}/file_delete`;
return this.http.post<any>(url, {path: path, name: file.fileName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
rename(path: string, name: string, target = 'folder'): Observable<any> {
const url = `${this.getRequestUrl()}/${target}`;
return this.http.put<any>(url, {path: path, name: name}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
getFormData(item: any): FormData {
const formData: FormData = new FormData();
Object.keys(item).forEach((key) => {
if (item[key] instanceof File) {
formData.append(key, item[key], item[key].name);
} else if (typeof item[key] !== 'undefined') {
if (typeof item[key] === 'boolean') {
formData.append(key, item[key] ? '1' : '0');
} else {
formData.append(key, item[key] || '');
}
}
});
return formData;
}
postFormData(formData: FormData, path: string): Observable<any> {
const url = `${this.getRequestUrl()}/upload`;
const headers = new HttpHeaders({
'enctype': 'multipart/form-data',
'Accept': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
formData.append('path', path);
return this.http
.post(url, formData, {headers: headers})
.pipe(
catchError(this.handleError<any>())
);
}
handleError<T> (operation = 'operation', result?: T) {
return (err: any): Observable<T> => {
if (err.error) {
throw err.error;
}
return of(result as T);
};
}
} | @Injectable()
export class FileManagerService {
headers = new HttpHeaders({
'Content-Type': 'application/json', | random_line_split |
file-manager.service.ts | import {Injectable} from '@angular/core';
import {HttpClient, HttpHeaders, HttpParams} from '@angular/common/http';
import {Observable, of} from 'rxjs';
import {catchError, map, tap} from 'rxjs/operators';
import {FileModel} from '../models/file.model';
@Injectable()
export class FileManagerService {
headers = new HttpHeaders({
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
requestUrl = '';
constructor(
public http: HttpClient
) {
this.requestUrl = '/admin/file_manager';
}
getRequestUrl() {
return this.requestUrl;
}
getList(options ?: any): Observable<FileModel[]> {
let params = new HttpParams();
for (const name in options) {
if (!options.hasOwnProperty(name)
|| typeof options[name] === 'undefined') {
continue;
}
params = params.append(name, options[name]);
}
return this.http.get<FileModel[]>(this.getRequestUrl(), {params: params, headers: this.headers})
.pipe(
catchError(this.handleError<any>())
);
}
createFolder(path: string, folderName: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder`;
return this.http.post<any>(url, {path: path, folderName: folderName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFolder(path: string): Observable<any> |
deleteFile(path: string, file: FileModel): Observable<any> {
const url = `${this.getRequestUrl()}/file_delete`;
return this.http.post<any>(url, {path: path, name: file.fileName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
rename(path: string, name: string, target = 'folder'): Observable<any> {
const url = `${this.getRequestUrl()}/${target}`;
return this.http.put<any>(url, {path: path, name: name}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
getFormData(item: any): FormData {
const formData: FormData = new FormData();
Object.keys(item).forEach((key) => {
if (item[key] instanceof File) {
formData.append(key, item[key], item[key].name);
} else if (typeof item[key] !== 'undefined') {
if (typeof item[key] === 'boolean') {
formData.append(key, item[key] ? '1' : '0');
} else {
formData.append(key, item[key] || '');
}
}
});
return formData;
}
postFormData(formData: FormData, path: string): Observable<any> {
const url = `${this.getRequestUrl()}/upload`;
const headers = new HttpHeaders({
'enctype': 'multipart/form-data',
'Accept': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
formData.append('path', path);
return this.http
.post(url, formData, {headers: headers})
.pipe(
catchError(this.handleError<any>())
);
}
handleError<T> (operation = 'operation', result?: T) {
return (err: any): Observable<T> => {
if (err.error) {
throw err.error;
}
return of(result as T);
};
}
}
| {
const url = `${this.getRequestUrl()}/folder_delete`;
return this.http.post<any>(url, {path: path}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
} | identifier_body |
file-manager.service.ts | import {Injectable} from '@angular/core';
import {HttpClient, HttpHeaders, HttpParams} from '@angular/common/http';
import {Observable, of} from 'rxjs';
import {catchError, map, tap} from 'rxjs/operators';
import {FileModel} from '../models/file.model';
@Injectable()
export class FileManagerService {
headers = new HttpHeaders({
'Content-Type': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
requestUrl = '';
constructor(
public http: HttpClient
) {
this.requestUrl = '/admin/file_manager';
}
getRequestUrl() {
return this.requestUrl;
}
| (options ?: any): Observable<FileModel[]> {
let params = new HttpParams();
for (const name in options) {
if (!options.hasOwnProperty(name)
|| typeof options[name] === 'undefined') {
continue;
}
params = params.append(name, options[name]);
}
return this.http.get<FileModel[]>(this.getRequestUrl(), {params: params, headers: this.headers})
.pipe(
catchError(this.handleError<any>())
);
}
createFolder(path: string, folderName: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder`;
return this.http.post<any>(url, {path: path, folderName: folderName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFolder(path: string): Observable<any> {
const url = `${this.getRequestUrl()}/folder_delete`;
return this.http.post<any>(url, {path: path}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
deleteFile(path: string, file: FileModel): Observable<any> {
const url = `${this.getRequestUrl()}/file_delete`;
return this.http.post<any>(url, {path: path, name: file.fileName}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
rename(path: string, name: string, target = 'folder'): Observable<any> {
const url = `${this.getRequestUrl()}/${target}`;
return this.http.put<any>(url, {path: path, name: name}, {headers: this.headers}).pipe(
catchError(this.handleError<any>())
);
}
getFormData(item: any): FormData {
const formData: FormData = new FormData();
Object.keys(item).forEach((key) => {
if (item[key] instanceof File) {
formData.append(key, item[key], item[key].name);
} else if (typeof item[key] !== 'undefined') {
if (typeof item[key] === 'boolean') {
formData.append(key, item[key] ? '1' : '0');
} else {
formData.append(key, item[key] || '');
}
}
});
return formData;
}
postFormData(formData: FormData, path: string): Observable<any> {
const url = `${this.getRequestUrl()}/upload`;
const headers = new HttpHeaders({
'enctype': 'multipart/form-data',
'Accept': 'application/json',
'X-Requested-With': 'XMLHttpRequest'
});
formData.append('path', path);
return this.http
.post(url, formData, {headers: headers})
.pipe(
catchError(this.handleError<any>())
);
}
handleError<T> (operation = 'operation', result?: T) {
return (err: any): Observable<T> => {
if (err.error) {
throw err.error;
}
return of(result as T);
};
}
}
| getList | identifier_name |
rekall_offset_finder.py | #!/usr/bin/env python3
"""
Rekall offset finder.
Usage:
rekall_offset_finder.py [options] <domain> [<url>]
Options:
-d --debug Enable debug output
-u URI, --uri=URI Specify Libvirt URI [Default: qemu:///system]
-o --old Use the old config format
-h --help Show this screen.
--version Show version.
"""
import sys
import os
import logging
import json
import stat
from io import StringIO
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
import libvirt
from docopt import docopt
from rekall import plugins, session
NT_KRNL_PDB = ['ntkrnlmp.pdb', 'ntkrpamp.pdb']
SCRIPT_DIR = str(Path(__file__).resolve().parent)
def find_ntoskrnl(version_modules):
for entry in version_modules:
e_type = entry[0]
if e_type == 'r':
e_data = entry[1]
if e_data['pdb'] in NT_KRNL_PDB:
return (e_data['pdb'], e_data['guid'])
raise RuntimeError('Cannot find {} with version_modules '
'plugin'.format(NT_KRNL_PDB))
def extract_offsets(domain, url):
| ntos_pdb, ntos_guid = find_ntoskrnl(version_modules)
ntos_module = Path(ntos_pdb).stem
rekall_profile_path = os.path.join(SCRIPT_DIR,
"{}-profile.json".format(domain))
# create a new session with a text format
# allowing us to write files
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='text',
profile_path=[
"http://profiles.rekall-forensic.com"
])
# build the Rekall JSON profile from PDB
s.RunPlugin("build_local_profile", module_name=ntos_module,
guid=ntos_guid, dumpfile=rekall_profile_path)
config = {
"ostype": "Windows",
"win_pdbase": pdbase,
"win_pid": pid,
"win_tasks": tasks,
"win_pname": name,
"rekall_profile": rekall_profile_path
}
return config
def format_config(domain, config, old_format=False):
if not old_format:
formatted_config = """
%s {
ostype = "Windows";
rekall_profile = "%s";
}
""" % (domain, config['rekall_profile'])
else:
formatted_config = """
%s {
ostype = "Windows";
win_pdbase = %s;
win_pid = %s;
win_tasks = %s;
win_pname = %s;
}
""" % (domain,
hex(config['win_pdbase']),
hex(config['win_pid']),
hex(config['win_tasks']),
hex(config['win_pname'])
)
return formatted_config
def main(args):
# delete rekall's BasicConfig
# we want to configure the root logger
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
debug = args['--debug']
# configure root logger
log_level = logging.INFO
if debug:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.debug(args)
domain_name = args['<domain>']
uri = args['--uri']
old_format = args['--old']
url = args['<url>']
config = None
if not url:
# take temporary memory dump
# we need to create our own tmp_dir
# otherwise the dumpfile will be owned by libvirt
# and we don't have the permission to remove it in /tmp
with TemporaryDirectory() as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as ram_dump:
# chmod to be r/w by everyone
# before libvirt takes ownership
os.chmod(ram_dump.name,
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IWGRP |
stat.S_IROTH | stat.S_IWOTH)
con = libvirt.open(uri)
domain = con.lookupByName(domain_name)
# take dump
logging.info('Dumping %s physical memory to %s', domain.name(),
ram_dump.name)
flags = libvirt.VIR_DUMP_MEMORY_ONLY
dumpformat = libvirt.VIR_DOMAIN_CORE_DUMP_FORMAT_RAW
domain.coreDumpWithFormat(ram_dump.name, dumpformat, flags)
ram_dump.flush()
# extract offsets
config = extract_offsets(domain.name(), ram_dump.name)
else:
config = extract_offsets(domain_name, url)
formatted_config = format_config(domain_name, config, old_format)
logging.info(formatted_config)
if __name__ == '__main__':
args = docopt(__doc__)
exit_code = main(args)
sys.exit(exit_code)
| s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='data',
profile_path=[
"http://profiles.rekall-forensic.com"
])
strio = StringIO()
s.RunPlugin("version_modules", output=strio)
version_modules = json.loads(strio.getvalue())
pdbase = s.profile.get_obj_offset('_KPROCESS', 'DirectoryTableBase')
tasks = s.profile.get_obj_offset('_EPROCESS', 'ActiveProcessLinks')
name = s.profile.get_obj_offset('_EPROCESS', 'ImageFileName')
pid = s.profile.get_obj_offset('_EPROCESS', 'UniqueProcessId')
# find ntoskrnl guid | identifier_body |
rekall_offset_finder.py | #!/usr/bin/env python3
"""
Rekall offset finder.
Usage:
rekall_offset_finder.py [options] <domain> [<url>]
Options:
-d --debug Enable debug output
-u URI, --uri=URI Specify Libvirt URI [Default: qemu:///system]
-o --old Use the old config format
-h --help Show this screen.
--version Show version.
"""
import sys
import os
import logging
import json
import stat
from io import StringIO
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
import libvirt
from docopt import docopt
from rekall import plugins, session
NT_KRNL_PDB = ['ntkrnlmp.pdb', 'ntkrpamp.pdb']
SCRIPT_DIR = str(Path(__file__).resolve().parent)
def find_ntoskrnl(version_modules):
for entry in version_modules:
e_type = entry[0]
if e_type == 'r':
e_data = entry[1]
if e_data['pdb'] in NT_KRNL_PDB:
return (e_data['pdb'], e_data['guid'])
raise RuntimeError('Cannot find {} with version_modules '
'plugin'.format(NT_KRNL_PDB))
def extract_offsets(domain, url):
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='data',
profile_path=[
"http://profiles.rekall-forensic.com"
])
strio = StringIO()
s.RunPlugin("version_modules", output=strio)
version_modules = json.loads(strio.getvalue())
pdbase = s.profile.get_obj_offset('_KPROCESS', 'DirectoryTableBase')
tasks = s.profile.get_obj_offset('_EPROCESS', 'ActiveProcessLinks')
name = s.profile.get_obj_offset('_EPROCESS', 'ImageFileName')
pid = s.profile.get_obj_offset('_EPROCESS', 'UniqueProcessId')
# find ntoskrnl guid
ntos_pdb, ntos_guid = find_ntoskrnl(version_modules)
ntos_module = Path(ntos_pdb).stem
rekall_profile_path = os.path.join(SCRIPT_DIR,
"{}-profile.json".format(domain))
# create a new session with a text format
# allowing us to write files
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='text',
profile_path=[
"http://profiles.rekall-forensic.com"
])
# build the Rekall JSON profile from PDB
s.RunPlugin("build_local_profile", module_name=ntos_module,
guid=ntos_guid, dumpfile=rekall_profile_path)
config = {
"ostype": "Windows",
"win_pdbase": pdbase,
"win_pid": pid,
"win_tasks": tasks,
"win_pname": name,
"rekall_profile": rekall_profile_path
}
return config
def format_config(domain, config, old_format=False):
if not old_format:
formatted_config = """
%s {
ostype = "Windows";
rekall_profile = "%s";
}
""" % (domain, config['rekall_profile'])
else:
formatted_config = """
%s {
ostype = "Windows";
win_pdbase = %s;
win_pid = %s;
win_tasks = %s;
win_pname = %s;
}
""" % (domain,
hex(config['win_pdbase']),
hex(config['win_pid']),
hex(config['win_tasks']),
hex(config['win_pname'])
)
return formatted_config
def main(args):
# delete rekall's BasicConfig
# we want to configure the root logger
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
debug = args['--debug']
# configure root logger
log_level = logging.INFO
if debug:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.debug(args)
domain_name = args['<domain>']
uri = args['--uri']
old_format = args['--old']
url = args['<url>']
config = None
if not url:
# take temporary memory dump
# we need to create our own tmp_dir
# otherwise the dumpfile will be owned by libvirt
# and we don't have the permission to remove it in /tmp
with TemporaryDirectory() as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as ram_dump:
# chmod to be r/w by everyone
# before libvirt takes ownership | con = libvirt.open(uri)
domain = con.lookupByName(domain_name)
# take dump
logging.info('Dumping %s physical memory to %s', domain.name(),
ram_dump.name)
flags = libvirt.VIR_DUMP_MEMORY_ONLY
dumpformat = libvirt.VIR_DOMAIN_CORE_DUMP_FORMAT_RAW
domain.coreDumpWithFormat(ram_dump.name, dumpformat, flags)
ram_dump.flush()
# extract offsets
config = extract_offsets(domain.name(), ram_dump.name)
else:
config = extract_offsets(domain_name, url)
formatted_config = format_config(domain_name, config, old_format)
logging.info(formatted_config)
if __name__ == '__main__':
args = docopt(__doc__)
exit_code = main(args)
sys.exit(exit_code) | os.chmod(ram_dump.name,
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IWGRP |
stat.S_IROTH | stat.S_IWOTH) | random_line_split |
rekall_offset_finder.py | #!/usr/bin/env python3
"""
Rekall offset finder.
Usage:
rekall_offset_finder.py [options] <domain> [<url>]
Options:
-d --debug Enable debug output
-u URI, --uri=URI Specify Libvirt URI [Default: qemu:///system]
-o --old Use the old config format
-h --help Show this screen.
--version Show version.
"""
import sys
import os
import logging
import json
import stat
from io import StringIO
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
import libvirt
from docopt import docopt
from rekall import plugins, session
NT_KRNL_PDB = ['ntkrnlmp.pdb', 'ntkrpamp.pdb']
SCRIPT_DIR = str(Path(__file__).resolve().parent)
def find_ntoskrnl(version_modules):
for entry in version_modules:
e_type = entry[0]
if e_type == 'r':
e_data = entry[1]
if e_data['pdb'] in NT_KRNL_PDB:
return (e_data['pdb'], e_data['guid'])
raise RuntimeError('Cannot find {} with version_modules '
'plugin'.format(NT_KRNL_PDB))
def | (domain, url):
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='data',
profile_path=[
"http://profiles.rekall-forensic.com"
])
strio = StringIO()
s.RunPlugin("version_modules", output=strio)
version_modules = json.loads(strio.getvalue())
pdbase = s.profile.get_obj_offset('_KPROCESS', 'DirectoryTableBase')
tasks = s.profile.get_obj_offset('_EPROCESS', 'ActiveProcessLinks')
name = s.profile.get_obj_offset('_EPROCESS', 'ImageFileName')
pid = s.profile.get_obj_offset('_EPROCESS', 'UniqueProcessId')
# find ntoskrnl guid
ntos_pdb, ntos_guid = find_ntoskrnl(version_modules)
ntos_module = Path(ntos_pdb).stem
rekall_profile_path = os.path.join(SCRIPT_DIR,
"{}-profile.json".format(domain))
# create a new session with a text format
# allowing us to write files
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='text',
profile_path=[
"http://profiles.rekall-forensic.com"
])
# build the Rekall JSON profile from PDB
s.RunPlugin("build_local_profile", module_name=ntos_module,
guid=ntos_guid, dumpfile=rekall_profile_path)
config = {
"ostype": "Windows",
"win_pdbase": pdbase,
"win_pid": pid,
"win_tasks": tasks,
"win_pname": name,
"rekall_profile": rekall_profile_path
}
return config
def format_config(domain, config, old_format=False):
if not old_format:
formatted_config = """
%s {
ostype = "Windows";
rekall_profile = "%s";
}
""" % (domain, config['rekall_profile'])
else:
formatted_config = """
%s {
ostype = "Windows";
win_pdbase = %s;
win_pid = %s;
win_tasks = %s;
win_pname = %s;
}
""" % (domain,
hex(config['win_pdbase']),
hex(config['win_pid']),
hex(config['win_tasks']),
hex(config['win_pname'])
)
return formatted_config
def main(args):
# delete rekall's BasicConfig
# we want to configure the root logger
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
debug = args['--debug']
# configure root logger
log_level = logging.INFO
if debug:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.debug(args)
domain_name = args['<domain>']
uri = args['--uri']
old_format = args['--old']
url = args['<url>']
config = None
if not url:
# take temporary memory dump
# we need to create our own tmp_dir
# otherwise the dumpfile will be owned by libvirt
# and we don't have the permission to remove it in /tmp
with TemporaryDirectory() as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as ram_dump:
# chmod to be r/w by everyone
# before libvirt takes ownership
os.chmod(ram_dump.name,
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IWGRP |
stat.S_IROTH | stat.S_IWOTH)
con = libvirt.open(uri)
domain = con.lookupByName(domain_name)
# take dump
logging.info('Dumping %s physical memory to %s', domain.name(),
ram_dump.name)
flags = libvirt.VIR_DUMP_MEMORY_ONLY
dumpformat = libvirt.VIR_DOMAIN_CORE_DUMP_FORMAT_RAW
domain.coreDumpWithFormat(ram_dump.name, dumpformat, flags)
ram_dump.flush()
# extract offsets
config = extract_offsets(domain.name(), ram_dump.name)
else:
config = extract_offsets(domain_name, url)
formatted_config = format_config(domain_name, config, old_format)
logging.info(formatted_config)
if __name__ == '__main__':
args = docopt(__doc__)
exit_code = main(args)
sys.exit(exit_code)
| extract_offsets | identifier_name |
rekall_offset_finder.py | #!/usr/bin/env python3
"""
Rekall offset finder.
Usage:
rekall_offset_finder.py [options] <domain> [<url>]
Options:
-d --debug Enable debug output
-u URI, --uri=URI Specify Libvirt URI [Default: qemu:///system]
-o --old Use the old config format
-h --help Show this screen.
--version Show version.
"""
import sys
import os
import logging
import json
import stat
from io import StringIO
from pathlib import Path
from tempfile import NamedTemporaryFile, TemporaryDirectory
import libvirt
from docopt import docopt
from rekall import plugins, session
NT_KRNL_PDB = ['ntkrnlmp.pdb', 'ntkrpamp.pdb']
SCRIPT_DIR = str(Path(__file__).resolve().parent)
def find_ntoskrnl(version_modules):
for entry in version_modules:
e_type = entry[0]
if e_type == 'r':
|
raise RuntimeError('Cannot find {} with version_modules '
'plugin'.format(NT_KRNL_PDB))
def extract_offsets(domain, url):
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='data',
profile_path=[
"http://profiles.rekall-forensic.com"
])
strio = StringIO()
s.RunPlugin("version_modules", output=strio)
version_modules = json.loads(strio.getvalue())
pdbase = s.profile.get_obj_offset('_KPROCESS', 'DirectoryTableBase')
tasks = s.profile.get_obj_offset('_EPROCESS', 'ActiveProcessLinks')
name = s.profile.get_obj_offset('_EPROCESS', 'ImageFileName')
pid = s.profile.get_obj_offset('_EPROCESS', 'UniqueProcessId')
# find ntoskrnl guid
ntos_pdb, ntos_guid = find_ntoskrnl(version_modules)
ntos_module = Path(ntos_pdb).stem
rekall_profile_path = os.path.join(SCRIPT_DIR,
"{}-profile.json".format(domain))
# create a new session with a text format
# allowing us to write files
s = session.Session(
filename=url,
autodetect=["rsds"],
logger=logging.getLogger(),
autodetect_build_local='none',
format='text',
profile_path=[
"http://profiles.rekall-forensic.com"
])
# build the Rekall JSON profile from PDB
s.RunPlugin("build_local_profile", module_name=ntos_module,
guid=ntos_guid, dumpfile=rekall_profile_path)
config = {
"ostype": "Windows",
"win_pdbase": pdbase,
"win_pid": pid,
"win_tasks": tasks,
"win_pname": name,
"rekall_profile": rekall_profile_path
}
return config
def format_config(domain, config, old_format=False):
if not old_format:
formatted_config = """
%s {
ostype = "Windows";
rekall_profile = "%s";
}
""" % (domain, config['rekall_profile'])
else:
formatted_config = """
%s {
ostype = "Windows";
win_pdbase = %s;
win_pid = %s;
win_tasks = %s;
win_pname = %s;
}
""" % (domain,
hex(config['win_pdbase']),
hex(config['win_pid']),
hex(config['win_tasks']),
hex(config['win_pname'])
)
return formatted_config
def main(args):
# delete rekall's BasicConfig
# we want to configure the root logger
for handler in logging.root.handlers[:]:
logging.root.removeHandler(handler)
debug = args['--debug']
# configure root logger
log_level = logging.INFO
if debug:
log_level = logging.DEBUG
logging.basicConfig(level=log_level)
logging.debug(args)
domain_name = args['<domain>']
uri = args['--uri']
old_format = args['--old']
url = args['<url>']
config = None
if not url:
# take temporary memory dump
# we need to create our own tmp_dir
# otherwise the dumpfile will be owned by libvirt
# and we don't have the permission to remove it in /tmp
with TemporaryDirectory() as tmp_dir:
with NamedTemporaryFile(dir=tmp_dir) as ram_dump:
# chmod to be r/w by everyone
# before libvirt takes ownership
os.chmod(ram_dump.name,
stat.S_IRUSR | stat.S_IWUSR |
stat.S_IRGRP | stat.S_IWGRP |
stat.S_IROTH | stat.S_IWOTH)
con = libvirt.open(uri)
domain = con.lookupByName(domain_name)
# take dump
logging.info('Dumping %s physical memory to %s', domain.name(),
ram_dump.name)
flags = libvirt.VIR_DUMP_MEMORY_ONLY
dumpformat = libvirt.VIR_DOMAIN_CORE_DUMP_FORMAT_RAW
domain.coreDumpWithFormat(ram_dump.name, dumpformat, flags)
ram_dump.flush()
# extract offsets
config = extract_offsets(domain.name(), ram_dump.name)
else:
config = extract_offsets(domain_name, url)
formatted_config = format_config(domain_name, config, old_format)
logging.info(formatted_config)
if __name__ == '__main__':
args = docopt(__doc__)
exit_code = main(args)
sys.exit(exit_code)
| e_data = entry[1]
if e_data['pdb'] in NT_KRNL_PDB:
return (e_data['pdb'], e_data['guid']) | conditional_block |
has-create.pipe.ts | /*
* Lumeer: Modern Data Definition and Processing Platform
*
* Copyright (C) since 2017 Lumeer.io, s.r.o. and/or its affiliates.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import {Pipe, PipeTransform} from '@angular/core';
import {RuleTiming} from '../../../../../core/model/rule';
@Pipe({
name: 'hasCreate',
})
export class HasCreatePipe implements PipeTransform {
private readonly createTimings = [
RuleTiming.Create,
RuleTiming.CreateUpdate,
RuleTiming.CreateDelete,
RuleTiming.All,
];
public | (value: RuleTiming): boolean {
return this.createTimings.indexOf(value) >= 0;
}
}
| transform | identifier_name |
has-create.pipe.ts | /*
* Lumeer: Modern Data Definition and Processing Platform
*
* Copyright (C) since 2017 Lumeer.io, s.r.o. and/or its affiliates.
*
* This program is free software: you can redistribute it and/or modify
* it under the terms of the GNU General Public License as published by
* the Free Software Foundation, either version 3 of the License, or
* (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
* GNU General Public License for more details.
*
* You should have received a copy of the GNU General Public License
* along with this program. If not, see <http://www.gnu.org/licenses/>.
*/
import {Pipe, PipeTransform} from '@angular/core';
import {RuleTiming} from '../../../../../core/model/rule';
@Pipe({
name: 'hasCreate',
})
export class HasCreatePipe implements PipeTransform {
private readonly createTimings = [
RuleTiming.Create,
RuleTiming.CreateUpdate,
RuleTiming.CreateDelete,
RuleTiming.All,
];
public transform(value: RuleTiming): boolean {
return this.createTimings.indexOf(value) >= 0;
} | } | random_line_split |
|
edit-shift.ts | import { translate } from "./Translate"
import * as bootbox from "bootbox"
var updatePublishEvent = function () {
let $eventPublished = $('[name=evntIsPublished]');
var evtPublished = $eventPublished.is(':checked');
var publishIndicator = $('#eventPublishedIndicator');
publishIndicator.removeClass('fa-check-square-o').removeClass('fa-square-o').removeClass('text-danger').removeClass('text-success'); | publishIndicator.addClass('fa-check-square-o').addClass('text-success');
} else {
publishIndicator.addClass('fa-square-o').addClass('text-danger');
}
var isPrivateInput = $('[name=isPrivate]');
if (isPrivateInput.is(':checked')) {
publishBtn.removeClass('hidden');
} else {
publishBtn.addClass("hidden");
$('[name=evntIsPublished]').prop("checked", false);
}
};
$(window).on('load', updatePublishEvent);
$('#publishBtn').click(function () {
if ($('[name=evntIsPublished]').is(':checked')) {
$('[name=evntIsPublished]').prop('checked', !$('[name=evntIsPublished]').is(':checked'));
updatePublishEvent();
} else {
bootbox.confirm({
title: '<div class="alert alert-warning text-center"><span class="glyphicon glyphicon-warning-sign"></span> WARNING <span class="glyphicon glyphicon-warning-sign"></span></div>',
size: 'small',
message: '<p>' + translate('publishEventWarning') + '</p>',
callback: function (result) {
if (!result) {
return;
}
$('[name=evntIsPublished]').prop('checked', !$('[name=evntIsPublished]').is(':checked'));
updatePublishEvent();
}
});
}
});
$('input[name=isPrivate]').change(updatePublishEvent); | var publishBtn = $('#publishBtn');
if (evtPublished) { | random_line_split |
edit-shift.ts |
import { translate } from "./Translate"
import * as bootbox from "bootbox"
var updatePublishEvent = function () {
let $eventPublished = $('[name=evntIsPublished]');
var evtPublished = $eventPublished.is(':checked');
var publishIndicator = $('#eventPublishedIndicator');
publishIndicator.removeClass('fa-check-square-o').removeClass('fa-square-o').removeClass('text-danger').removeClass('text-success');
var publishBtn = $('#publishBtn');
if (evtPublished) {
publishIndicator.addClass('fa-check-square-o').addClass('text-success');
} else {
publishIndicator.addClass('fa-square-o').addClass('text-danger');
}
var isPrivateInput = $('[name=isPrivate]');
if (isPrivateInput.is(':checked')) {
publishBtn.removeClass('hidden');
} else |
};
$(window).on('load', updatePublishEvent);
$('#publishBtn').click(function () {
if ($('[name=evntIsPublished]').is(':checked')) {
$('[name=evntIsPublished]').prop('checked', !$('[name=evntIsPublished]').is(':checked'));
updatePublishEvent();
} else {
bootbox.confirm({
title: '<div class="alert alert-warning text-center"><span class="glyphicon glyphicon-warning-sign"></span> WARNING <span class="glyphicon glyphicon-warning-sign"></span></div>',
size: 'small',
message: '<p>' + translate('publishEventWarning') + '</p>',
callback: function (result) {
if (!result) {
return;
}
$('[name=evntIsPublished]').prop('checked', !$('[name=evntIsPublished]').is(':checked'));
updatePublishEvent();
}
});
}
});
$('input[name=isPrivate]').change(updatePublishEvent);
| {
publishBtn.addClass("hidden");
$('[name=evntIsPublished]').prop("checked", false);
} | conditional_block |
ServerPeriod.ts | /**
* The Reincarnation
* No descripton provided (generated by Swagger Codegen https://github.com/swagger-api/swagger-codegen)
*
* OpenAPI spec version: beta
*
*
* NOTE: This class is auto generated by the swagger code generator program.
* https://github.com/swagger-api/swagger-codegen.git
* Do not edit the class manually.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
'use strict';
import * as models from './models';
| armageddon?: number;
end?: number;
start?: number;
} | export interface ServerPeriod {
| random_line_split |
main.rs |
extern crate regex;
#[macro_use]
extern crate lazy_static;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use regex::Regex;
lazy_static! {
static ref PARTICLE_RE: Regex = Regex::new(r"^p=<(?P<pos_x>-?[0-9]+),(?P<pos_y>-?[0-9]+),(?P<pos_z>-?[0-9]+)>, v=<(?P<vel_x>-?[0-9]+),(?P<vel_y>-?[0-9]+),(?P<vel_z>-?[0-9]+)>, a=<(?P<acc_x>-?[0-9]+),(?P<acc_y>-?[0-9]+),(?P<acc_z>-?[0-9]+)>$").unwrap();
}
#[derive(Debug, Copy, Clone)]
struct Particle {
pos: (f64, f64, f64),
vel: (f64, f64, f64),
acc: (f64, f64, f64),
}
impl FromStr for Particle {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let caps = PARTICLE_RE.captures(s).ok_or(())?;
let pos_x = caps.name("pos_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_y = caps.name("pos_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_z = caps.name("pos_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_x = caps.name("vel_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_y = caps.name("vel_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_z = caps.name("vel_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_x = caps.name("acc_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_y = caps.name("acc_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_z = caps.name("acc_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
Ok(Particle {
pos: (pos_x, pos_y, pos_z),
vel: (vel_x, vel_y, vel_z),
acc: (acc_x, acc_y, acc_z),
})
}
}
#[derive(Debug)]
struct Simulation(Vec<Particle>);
impl FromStr for Simulation {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Simulation(s.lines()
.map(|line| line.parse())
.collect::<Result<Vec<Particle>, ()>>()?))
}
}
impl Simulation {
fn do_step(&mut self) {
for particle in self.0.iter_mut() {
particle.vel.0 += particle.acc.0;
particle.vel.1 += particle.acc.1;
particle.vel.2 += particle.acc.2;
particle.pos.0 += particle.vel.0;
particle.pos.1 += particle.vel.1;
particle.pos.2 += particle.vel.2;
}
self.0 = self.0
.iter()
.filter(|particle_1| {
let occurrence_count = self.0.iter().fold(
0,
|acc, particle_2| if particle_1.pos ==
particle_2.pos
{
acc + 1
} else {
acc
},
);
occurrence_count <= 1
})
.cloned()
.collect();
}
fn get_longterm_closest_particle(&self) -> (usize, &Particle) |
fn get_num_particles(&self) -> usize {
self.0.len()
}
}
fn dot_product(left: &(f64, f64, f64), right: &(f64, f64, f64)) -> f64 {
left.0 * right.0 + left.1 * right.1 + left.2 * right.2
}
fn get_norm(vec: &(f64, f64, f64)) -> f64 {
dot_product(&vec, &vec).sqrt()
}
fn main() {
let mut input_str = String::new();
io::stdin().read_to_string(&mut input_str).expect(
"input error",
);
let mut sim: Simulation = input_str.parse().expect("parse error");
println!(
"The longterm closest particle is: {:?}",
sim.get_longterm_closest_particle()
);
println!("Running 100000 steps in the simulation");
for i in 0..100_000 {
if i % 10_000 == 0 {
println!("*** {}", i);
}
sim.do_step();
}
println!(
"There are {} particles left after 100000 steps.",
sim.get_num_particles()
);
}
#[cfg(test)]
mod tests {
use super::Simulation;
#[test]
fn simulation_test() {
let test_str = "p=<3,0,0>, v=<2,0,0>, a=<-1,0,0>\n\
p=<4,0,0>, v=<0,0,0>, a=<-2,0,0>";
let sim: Simulation = test_str.parse().unwrap();
assert_eq!(0, sim.get_longterm_closest_particle().0);
}
#[test]
fn crash_test() {
let test_str = "p=<-6,0,0>, v=<3,0,0>, a=<0,0,0>\n\
p=<-4,0,0>, v=<2,0,0>, a=<0,0,0>\n\
p=<-2,0,0>, v=<1,0,0>, a=<0,0,0>\n\
p=<3,0,0>, v=<-1,0,0>, a=<0,0,0>";
let mut sim: Simulation = test_str.parse().unwrap();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
}
}
| {
let (idx, part, _) = self.0
.iter()
.enumerate()
.map(|(idx, part)| {
let norm = get_norm(&part.acc);
(idx, part, norm)
})
.min_by(|&(_, _, l_norm), &(_, _, r_norm)| {
l_norm.partial_cmp(&r_norm).unwrap()
})
.unwrap();
(idx, part)
} | identifier_body |
main.rs |
extern crate regex;
#[macro_use]
extern crate lazy_static;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use regex::Regex;
lazy_static! {
static ref PARTICLE_RE: Regex = Regex::new(r"^p=<(?P<pos_x>-?[0-9]+),(?P<pos_y>-?[0-9]+),(?P<pos_z>-?[0-9]+)>, v=<(?P<vel_x>-?[0-9]+),(?P<vel_y>-?[0-9]+),(?P<vel_z>-?[0-9]+)>, a=<(?P<acc_x>-?[0-9]+),(?P<acc_y>-?[0-9]+),(?P<acc_z>-?[0-9]+)>$").unwrap();
}
#[derive(Debug, Copy, Clone)]
struct Particle {
pos: (f64, f64, f64),
vel: (f64, f64, f64),
acc: (f64, f64, f64),
}
impl FromStr for Particle {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let caps = PARTICLE_RE.captures(s).ok_or(())?;
let pos_x = caps.name("pos_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_y = caps.name("pos_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_z = caps.name("pos_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_x = caps.name("vel_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_y = caps.name("vel_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_z = caps.name("vel_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_x = caps.name("acc_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_y = caps.name("acc_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_z = caps.name("acc_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
Ok(Particle {
pos: (pos_x, pos_y, pos_z),
vel: (vel_x, vel_y, vel_z),
acc: (acc_x, acc_y, acc_z),
})
}
}
#[derive(Debug)]
struct Simulation(Vec<Particle>);
impl FromStr for Simulation {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Simulation(s.lines()
.map(|line| line.parse())
.collect::<Result<Vec<Particle>, ()>>()?))
}
}
impl Simulation {
fn do_step(&mut self) {
for particle in self.0.iter_mut() {
particle.vel.0 += particle.acc.0;
particle.vel.1 += particle.acc.1;
particle.vel.2 += particle.acc.2;
particle.pos.0 += particle.vel.0;
particle.pos.1 += particle.vel.1;
particle.pos.2 += particle.vel.2;
}
self.0 = self.0
.iter()
.filter(|particle_1| {
let occurrence_count = self.0.iter().fold(
0,
|acc, particle_2| if particle_1.pos ==
particle_2.pos
| else {
acc
},
);
occurrence_count <= 1
})
.cloned()
.collect();
}
fn get_longterm_closest_particle(&self) -> (usize, &Particle) {
let (idx, part, _) = self.0
.iter()
.enumerate()
.map(|(idx, part)| {
let norm = get_norm(&part.acc);
(idx, part, norm)
})
.min_by(|&(_, _, l_norm), &(_, _, r_norm)| {
l_norm.partial_cmp(&r_norm).unwrap()
})
.unwrap();
(idx, part)
}
fn get_num_particles(&self) -> usize {
self.0.len()
}
}
fn dot_product(left: &(f64, f64, f64), right: &(f64, f64, f64)) -> f64 {
left.0 * right.0 + left.1 * right.1 + left.2 * right.2
}
fn get_norm(vec: &(f64, f64, f64)) -> f64 {
dot_product(&vec, &vec).sqrt()
}
fn main() {
let mut input_str = String::new();
io::stdin().read_to_string(&mut input_str).expect(
"input error",
);
let mut sim: Simulation = input_str.parse().expect("parse error");
println!(
"The longterm closest particle is: {:?}",
sim.get_longterm_closest_particle()
);
println!("Running 100000 steps in the simulation");
for i in 0..100_000 {
if i % 10_000 == 0 {
println!("*** {}", i);
}
sim.do_step();
}
println!(
"There are {} particles left after 100000 steps.",
sim.get_num_particles()
);
}
#[cfg(test)]
mod tests {
use super::Simulation;
#[test]
fn simulation_test() {
let test_str = "p=<3,0,0>, v=<2,0,0>, a=<-1,0,0>\n\
p=<4,0,0>, v=<0,0,0>, a=<-2,0,0>";
let sim: Simulation = test_str.parse().unwrap();
assert_eq!(0, sim.get_longterm_closest_particle().0);
}
#[test]
fn crash_test() {
let test_str = "p=<-6,0,0>, v=<3,0,0>, a=<0,0,0>\n\
p=<-4,0,0>, v=<2,0,0>, a=<0,0,0>\n\
p=<-2,0,0>, v=<1,0,0>, a=<0,0,0>\n\
p=<3,0,0>, v=<-1,0,0>, a=<0,0,0>";
let mut sim: Simulation = test_str.parse().unwrap();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
}
}
| {
acc + 1
} | conditional_block |
main.rs | extern crate regex;
#[macro_use]
extern crate lazy_static;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use regex::Regex;
lazy_static! {
static ref PARTICLE_RE: Regex = Regex::new(r"^p=<(?P<pos_x>-?[0-9]+),(?P<pos_y>-?[0-9]+),(?P<pos_z>-?[0-9]+)>, v=<(?P<vel_x>-?[0-9]+),(?P<vel_y>-?[0-9]+),(?P<vel_z>-?[0-9]+)>, a=<(?P<acc_x>-?[0-9]+),(?P<acc_y>-?[0-9]+),(?P<acc_z>-?[0-9]+)>$").unwrap();
}
#[derive(Debug, Copy, Clone)]
struct Particle {
pos: (f64, f64, f64),
vel: (f64, f64, f64),
acc: (f64, f64, f64),
}
impl FromStr for Particle {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let caps = PARTICLE_RE.captures(s).ok_or(())?;
let pos_x = caps.name("pos_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_y = caps.name("pos_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_z = caps.name("pos_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_x = caps.name("vel_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_y = caps.name("vel_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_z = caps.name("vel_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_x = caps.name("acc_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_y = caps.name("acc_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_z = caps.name("acc_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
Ok(Particle {
pos: (pos_x, pos_y, pos_z),
vel: (vel_x, vel_y, vel_z),
acc: (acc_x, acc_y, acc_z),
})
}
}
#[derive(Debug)]
struct Simulation(Vec<Particle>);
impl FromStr for Simulation {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
Ok(Simulation(s.lines()
.map(|line| line.parse())
.collect::<Result<Vec<Particle>, ()>>()?))
}
}
impl Simulation {
fn do_step(&mut self) {
for particle in self.0.iter_mut() {
particle.vel.0 += particle.acc.0;
particle.vel.1 += particle.acc.1;
particle.vel.2 += particle.acc.2;
particle.pos.0 += particle.vel.0;
particle.pos.1 += particle.vel.1;
particle.pos.2 += particle.vel.2;
}
self.0 = self.0
.iter()
.filter(|particle_1| {
| acc + 1
} else {
acc
},
);
occurrence_count <= 1
})
.cloned()
.collect();
}
fn get_longterm_closest_particle(&self) -> (usize, &Particle) {
let (idx, part, _) = self.0
.iter()
.enumerate()
.map(|(idx, part)| {
let norm = get_norm(&part.acc);
(idx, part, norm)
})
.min_by(|&(_, _, l_norm), &(_, _, r_norm)| {
l_norm.partial_cmp(&r_norm).unwrap()
})
.unwrap();
(idx, part)
}
fn get_num_particles(&self) -> usize {
self.0.len()
}
}
fn dot_product(left: &(f64, f64, f64), right: &(f64, f64, f64)) -> f64 {
left.0 * right.0 + left.1 * right.1 + left.2 * right.2
}
fn get_norm(vec: &(f64, f64, f64)) -> f64 {
dot_product(&vec, &vec).sqrt()
}
fn main() {
let mut input_str = String::new();
io::stdin().read_to_string(&mut input_str).expect(
"input error",
);
let mut sim: Simulation = input_str.parse().expect("parse error");
println!(
"The longterm closest particle is: {:?}",
sim.get_longterm_closest_particle()
);
println!("Running 100000 steps in the simulation");
for i in 0..100_000 {
if i % 10_000 == 0 {
println!("*** {}", i);
}
sim.do_step();
}
println!(
"There are {} particles left after 100000 steps.",
sim.get_num_particles()
);
}
#[cfg(test)]
mod tests {
use super::Simulation;
#[test]
fn simulation_test() {
let test_str = "p=<3,0,0>, v=<2,0,0>, a=<-1,0,0>\n\
p=<4,0,0>, v=<0,0,0>, a=<-2,0,0>";
let sim: Simulation = test_str.parse().unwrap();
assert_eq!(0, sim.get_longterm_closest_particle().0);
}
#[test]
fn crash_test() {
let test_str = "p=<-6,0,0>, v=<3,0,0>, a=<0,0,0>\n\
p=<-4,0,0>, v=<2,0,0>, a=<0,0,0>\n\
p=<-2,0,0>, v=<1,0,0>, a=<0,0,0>\n\
p=<3,0,0>, v=<-1,0,0>, a=<0,0,0>";
let mut sim: Simulation = test_str.parse().unwrap();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
}
} | let occurrence_count = self.0.iter().fold(
0,
|acc, particle_2| if particle_1.pos ==
particle_2.pos
{ | random_line_split |
main.rs |
extern crate regex;
#[macro_use]
extern crate lazy_static;
use std::io;
use std::io::prelude::*;
use std::str::FromStr;
use regex::Regex;
lazy_static! {
static ref PARTICLE_RE: Regex = Regex::new(r"^p=<(?P<pos_x>-?[0-9]+),(?P<pos_y>-?[0-9]+),(?P<pos_z>-?[0-9]+)>, v=<(?P<vel_x>-?[0-9]+),(?P<vel_y>-?[0-9]+),(?P<vel_z>-?[0-9]+)>, a=<(?P<acc_x>-?[0-9]+),(?P<acc_y>-?[0-9]+),(?P<acc_z>-?[0-9]+)>$").unwrap();
}
#[derive(Debug, Copy, Clone)]
struct Particle {
pos: (f64, f64, f64),
vel: (f64, f64, f64),
acc: (f64, f64, f64),
}
impl FromStr for Particle {
type Err = ();
fn from_str(s: &str) -> Result<Self, Self::Err> {
let caps = PARTICLE_RE.captures(s).ok_or(())?;
let pos_x = caps.name("pos_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_y = caps.name("pos_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let pos_z = caps.name("pos_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_x = caps.name("vel_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_y = caps.name("vel_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let vel_z = caps.name("vel_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_x = caps.name("acc_x").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_y = caps.name("acc_y").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
let acc_z = caps.name("acc_z").ok_or(())?.as_str().parse().map_err(
|_| (),
)?;
Ok(Particle {
pos: (pos_x, pos_y, pos_z),
vel: (vel_x, vel_y, vel_z),
acc: (acc_x, acc_y, acc_z),
})
}
}
#[derive(Debug)]
struct Simulation(Vec<Particle>);
impl FromStr for Simulation {
type Err = ();
fn | (s: &str) -> Result<Self, Self::Err> {
Ok(Simulation(s.lines()
.map(|line| line.parse())
.collect::<Result<Vec<Particle>, ()>>()?))
}
}
impl Simulation {
fn do_step(&mut self) {
for particle in self.0.iter_mut() {
particle.vel.0 += particle.acc.0;
particle.vel.1 += particle.acc.1;
particle.vel.2 += particle.acc.2;
particle.pos.0 += particle.vel.0;
particle.pos.1 += particle.vel.1;
particle.pos.2 += particle.vel.2;
}
self.0 = self.0
.iter()
.filter(|particle_1| {
let occurrence_count = self.0.iter().fold(
0,
|acc, particle_2| if particle_1.pos ==
particle_2.pos
{
acc + 1
} else {
acc
},
);
occurrence_count <= 1
})
.cloned()
.collect();
}
fn get_longterm_closest_particle(&self) -> (usize, &Particle) {
let (idx, part, _) = self.0
.iter()
.enumerate()
.map(|(idx, part)| {
let norm = get_norm(&part.acc);
(idx, part, norm)
})
.min_by(|&(_, _, l_norm), &(_, _, r_norm)| {
l_norm.partial_cmp(&r_norm).unwrap()
})
.unwrap();
(idx, part)
}
fn get_num_particles(&self) -> usize {
self.0.len()
}
}
fn dot_product(left: &(f64, f64, f64), right: &(f64, f64, f64)) -> f64 {
left.0 * right.0 + left.1 * right.1 + left.2 * right.2
}
fn get_norm(vec: &(f64, f64, f64)) -> f64 {
dot_product(&vec, &vec).sqrt()
}
fn main() {
let mut input_str = String::new();
io::stdin().read_to_string(&mut input_str).expect(
"input error",
);
let mut sim: Simulation = input_str.parse().expect("parse error");
println!(
"The longterm closest particle is: {:?}",
sim.get_longterm_closest_particle()
);
println!("Running 100000 steps in the simulation");
for i in 0..100_000 {
if i % 10_000 == 0 {
println!("*** {}", i);
}
sim.do_step();
}
println!(
"There are {} particles left after 100000 steps.",
sim.get_num_particles()
);
}
#[cfg(test)]
mod tests {
use super::Simulation;
#[test]
fn simulation_test() {
let test_str = "p=<3,0,0>, v=<2,0,0>, a=<-1,0,0>\n\
p=<4,0,0>, v=<0,0,0>, a=<-2,0,0>";
let sim: Simulation = test_str.parse().unwrap();
assert_eq!(0, sim.get_longterm_closest_particle().0);
}
#[test]
fn crash_test() {
let test_str = "p=<-6,0,0>, v=<3,0,0>, a=<0,0,0>\n\
p=<-4,0,0>, v=<2,0,0>, a=<0,0,0>\n\
p=<-2,0,0>, v=<1,0,0>, a=<0,0,0>\n\
p=<3,0,0>, v=<-1,0,0>, a=<0,0,0>";
let mut sim: Simulation = test_str.parse().unwrap();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(4, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
sim.do_step();
assert_eq!(1, sim.get_num_particles());
}
}
| from_str | identifier_name |
auth.service.ts | import {EventEmitter, Injectable} from '@angular/core';
import {Headers, Http, Response, URLSearchParams} from '@angular/http';
import {Observable} from 'rxjs/Rx';
import {Logger} from './logger.service';
@Injectable()
export class AuthService {
private locationWatcher = new EventEmitter();
private loginUrl = 'https://localhost:8443/simple-jee7/rest/login';
constructor(private http: Http, private log: Logger) { log.info('Instantiating AuthService'); }
authenticated: boolean = false;
login: string = '';
password: string = '';
permissions: string[] = [];
public isAuthenticated(): boolean { return this.authenticated }
public hasRole(roles: string[]): boolean { return false }
public doLogin(userName: string, password: string) {
this.log.info('doLogin');
let headers = new Headers();
headers.append('Authorization', 'Basic ' + btoa(userName + ':' + password));
return this.http.get(this.loginUrl, { headers: headers })
.map<boolean>((res) => this.processLoginResult.bind(this)(res, userName, password));
}
private processLoginResult(res: Response, userName: string, password) {
let data = res.json(); | this.authenticated = true;
this.login = userName;
this.password = password;
// store permissions
this.permissions = data.permissions;
}
return this.authenticated;
}
public subscribe(
onNext: (value: any) => void, onThrow?: (exception: any) => void, onReturn?: () => void) {
return this.locationWatcher.subscribe(onNext, onThrow, onReturn);
}
} | if (res.status === 200) { | random_line_split |
auth.service.ts | import {EventEmitter, Injectable} from '@angular/core';
import {Headers, Http, Response, URLSearchParams} from '@angular/http';
import {Observable} from 'rxjs/Rx';
import {Logger} from './logger.service';
@Injectable()
export class AuthService {
private locationWatcher = new EventEmitter();
private loginUrl = 'https://localhost:8443/simple-jee7/rest/login';
constructor(private http: Http, private log: Logger) { log.info('Instantiating AuthService'); }
authenticated: boolean = false;
login: string = '';
password: string = '';
permissions: string[] = [];
public isAuthenticated(): boolean { return this.authenticated }
public hasRole(roles: string[]): boolean { return false }
public doLogin(userName: string, password: string) {
this.log.info('doLogin');
let headers = new Headers();
headers.append('Authorization', 'Basic ' + btoa(userName + ':' + password));
return this.http.get(this.loginUrl, { headers: headers })
.map<boolean>((res) => this.processLoginResult.bind(this)(res, userName, password));
}
private processLoginResult(res: Response, userName: string, password) {
let data = res.json();
if (res.status === 200) |
return this.authenticated;
}
public subscribe(
onNext: (value: any) => void, onThrow?: (exception: any) => void, onReturn?: () => void) {
return this.locationWatcher.subscribe(onNext, onThrow, onReturn);
}
}
| {
this.authenticated = true;
this.login = userName;
this.password = password;
// store permissions
this.permissions = data.permissions;
} | conditional_block |
auth.service.ts | import {EventEmitter, Injectable} from '@angular/core';
import {Headers, Http, Response, URLSearchParams} from '@angular/http';
import {Observable} from 'rxjs/Rx';
import {Logger} from './logger.service';
@Injectable()
export class | {
private locationWatcher = new EventEmitter();
private loginUrl = 'https://localhost:8443/simple-jee7/rest/login';
constructor(private http: Http, private log: Logger) { log.info('Instantiating AuthService'); }
authenticated: boolean = false;
login: string = '';
password: string = '';
permissions: string[] = [];
public isAuthenticated(): boolean { return this.authenticated }
public hasRole(roles: string[]): boolean { return false }
public doLogin(userName: string, password: string) {
this.log.info('doLogin');
let headers = new Headers();
headers.append('Authorization', 'Basic ' + btoa(userName + ':' + password));
return this.http.get(this.loginUrl, { headers: headers })
.map<boolean>((res) => this.processLoginResult.bind(this)(res, userName, password));
}
private processLoginResult(res: Response, userName: string, password) {
let data = res.json();
if (res.status === 200) {
this.authenticated = true;
this.login = userName;
this.password = password;
// store permissions
this.permissions = data.permissions;
}
return this.authenticated;
}
public subscribe(
onNext: (value: any) => void, onThrow?: (exception: any) => void, onReturn?: () => void) {
return this.locationWatcher.subscribe(onNext, onThrow, onReturn);
}
}
| AuthService | identifier_name |
base64.js | // Base 64 encoding
const BASE_64_CHARS = "ABCDEFGHIJKLMNOPQRSTUVWXYZabcdefghijklmnopqrstuvwxyz0123456789+/";
const BASE_64_VALS = Object.create(null);
const getChar = val => BASE_64_CHARS.charAt(val);
const getVal = ch => ch === '=' ? -1 : BASE_64_VALS[ch];
for (let i = 0; i < BASE_64_CHARS.length; i++) {
BASE_64_VALS[getChar(i)] = i;
};
const encode = array => {
if (typeof array === "string") {
const str = array;
array = newBinary(str.length);
for (let i = 0; i < str.length; i++) { | "Not ascii. Base64.encode can only take ascii strings.");
}
array[i] = ch;
}
}
const answer = [];
let a = null;
let b = null;
let c = null;
let d = null;
for (let i = 0; i < array.length; i++) {
switch (i % 3) {
case 0:
a = (array[i] >> 2) & 0x3F;
b = (array[i] & 0x03) << 4;
break;
case 1:
b = b | (array[i] >> 4) & 0xF;
c = (array[i] & 0xF) << 2;
break;
case 2:
c = c | (array[i] >> 6) & 0x03;
d = array[i] & 0x3F;
answer.push(getChar(a));
answer.push(getChar(b));
answer.push(getChar(c));
answer.push(getChar(d));
a = null;
b = null;
c = null;
d = null;
break;
}
}
if (a != null) {
answer.push(getChar(a));
answer.push(getChar(b));
if (c == null) {
answer.push('=');
} else {
answer.push(getChar(c));
}
if (d == null) {
answer.push('=');
}
}
return answer.join("");
};
// XXX This is a weird place for this to live, but it's used both by
// this package and 'ejson', and we can't put it in 'ejson' without
// introducing a circular dependency. It should probably be in its own
// package or as a helper in a package that both 'base64' and 'ejson'
// use.
const newBinary = len => {
if (typeof Uint8Array === 'undefined' || typeof ArrayBuffer === 'undefined') {
const ret = [];
for (let i = 0; i < len; i++) {
ret.push(0);
}
ret.$Uint8ArrayPolyfill = true;
return ret;
}
return new Uint8Array(new ArrayBuffer(len));
};
const decode = str => {
let len = Math.floor((str.length * 3) / 4);
if (str.charAt(str.length - 1) == '=') {
len--;
if (str.charAt(str.length - 2) == '=') {
len--;
}
}
const arr = newBinary(len);
let one = null;
let two = null;
let three = null;
let j = 0;
for (let i = 0; i < str.length; i++) {
const c = str.charAt(i);
const v = getVal(c);
switch (i % 4) {
case 0:
if (v < 0) {
throw new Error('invalid base64 string');
}
one = v << 2;
break;
case 1:
if (v < 0) {
throw new Error('invalid base64 string');
}
one = one | (v >> 4);
arr[j++] = one;
two = (v & 0x0F) << 4;
break;
case 2:
if (v >= 0) {
two = two | (v >> 2);
arr[j++] = two;
three = (v & 0x03) << 6;
}
break;
case 3:
if (v >= 0) {
arr[j++] = three | v;
}
break;
}
}
return arr;
};
export const Base64 = { encode, decode, newBinary }; | const ch = str.charCodeAt(i);
if (ch > 0xFF) {
throw new Error( | random_line_split |
movie_clip.js | .us/
*
* Copyright 2011, Chris Smoak
* Released under the MIT License.
* http://www.opensource.org/licenses/mit-license.php
*/
define(function(require, exports, module) {
var ext = require('lib/ext')
var env = require('lib/env')
var DisplayObject = require('player/display_object').DisplayObject
var Element = require('dom/element').Element
var Namespace = require('dom/namespace').Namespace |
var MovieClip = function(def, loader, parent, renderer) {
this.def = def
this.loader = loader
this.parent = parent
this.renderer = renderer
DisplayObject.call(this)
this.timeline = def.timeline
this.playhead = null
this.onEnterFrameCallback = ext.bind(this.onEnterFrame, this)
this.displayList = new DisplayList()
this.$isTimelineControlled = !!this.timeline
this.renderer.setMovieClip(this)
}
ext.inherits(MovieClip, DisplayObject)
ext.add(MovieClip.prototype, {
getPlayhead: function() {
return this.playhead
},
getFrameCount: function() {
return this.timeline ? this.timeline.getFrameCount() : 1
},
// TODO: implement when we have async loading
frameReady : function(frame) {
return true
},
enterFrame: function() {
// update playhead
var lastPlayhead = this.playhead
if (lastPlayhead === null) {
// first frame
this.playing = (this.getFrameCount() > 1)
this.playhead = 0
}
else {
// after first frame
if (this.playing) {
this.playhead += 1
if (this.playhead >= this.getFrameCount()) {
this.playhead = 0
}
}
}
this.$processFrameChange(lastPlayhead, true)
},
$processFrameChange: function(lastPlayhead, enterFrame) {
var toAdd = null
if (this.playhead !== lastPlayhead) {
if (this.timeline) {
// update timeline entries
var changes = this.timeline.getChangedSpans(lastPlayhead, this.playhead)
var toRemove = changes.toRemove
toAdd = changes.toAdd
var toUpdate = changes.toUpdate
// children are removed
var removeLength = toRemove.length
for (var i = 0; i < removeLength; i++) {
var entry = toRemove[i]
this.removeChildAtDepth(entry.getDepth())
}
// children are updated
var updateLength = toUpdate.length
for (var i = 0; i < updateLength; i++) {
var entry = toUpdate[i]
this.$updateChild(entry)
}
}
}
// update all existing children
var displayObjects = this.displayList.getDisplayObjects()
for (var depth in displayObjects) {
if (displayObjects.hasOwnProperty(depth)) {
if (displayObjects[depth].enterFrame) {
displayObjects[depth].enterFrame()
}
}
}
// queue up onEnterFrame if this is not the first time we're called
if (enterFrame && lastPlayhead !== null) {
this.loader.addAction(this.onEnterFrameCallback)
}
// queue up frame script if we've changed frames
if (this.playhead !== lastPlayhead) {
var callback = ext.bind(this.frameAction, this, [this.playhead])
this.loader.addAction(callback)
}
// new children are inited
if (toAdd) {
var addLength = toAdd.length
for (var i = 0; i < addLength; i++) {
var entry = toAdd[i]
var obj = this.loader.createDisplayObject(entry.getInstance().tag, this)
// TODO: limit when we need to update here
this.$updateChild(entry)
//obj.spanid = entry.getInstance().id
}
}
},
$updateChild: function(entry) {
var obj = this.displayList.getAtDepth(entry.getDepth())
if (obj && obj.isTimelineControlled()) {
obj.timelineUpdate(entry.tag)
}
},
removeChildAtDepth: function(depth) {
var obj = this.displayList.removeAtDepth(depth)
if (obj) {
this.renderer.removeChild(obj)
this.childChangedName(obj, obj.getName(), null)
}
},
insertChild: function(obj, depth) {
var displayList = this.displayList
// if we have an existing object at this depth, move it
var existing = displayList.getAtDepth(depth)
if (existing) {
var newDepth = displayList.getNextHighestDepth()
existing.setDepth(newDepth)
displayList.removeAtDepth(depth)
displayList.setAtDepth(existing, newDepth)
}
// add the new object
obj.setDepth(depth)
displayList.setAtDepth(obj, depth)
this.renderer.addChild(obj)
this.childChangedName(obj, null, obj.getName())
},
frameAction: function(frame) {
if (!this.timeline) {
return
}
var tags = this.timeline.getFrame(frame).tags
for (var i = 0; i < tags.length; i++) {
var tag = tags[i]
// TODO: cleanup following
// button children's tags don't have headers
if (!tag.header) {
continue
}
//var t = fljs.swf.TagTypes
switch (tag.header.TagType) {
case 12://t.DoAction:
this.loader.doAction(this, tag)
break
case 9://t.SetBackgroundColor:
// TODO
break
case 15://t.StartSound:
case 19://t.SoundStreamBlock:
case 18://t.SoundStreamHead:
case 45://t.SoundStreamHead2:
break
default:
// [nop]
}
}
},
onEnterFrame: function() {
// TODO: rename _as2Object
if (this._as2Object && this._as2Object.onEnterFrame) {
this._as2Object.onEnterFrame()
}
},
play: function() {
this.playing = true
},
stop: function() {
this.playing = false
},
gotoAndPlay: function(frame) {
this.playing = true
this.$gotoFrame(frame)
},
gotoAndStop: function(frame) {
this.playing = false
this.$gotoFrame(frame)
},
$gotoFrame: function(frame) {
// are we specifying a frame by label?
if (typeof frame == 'string') {
if (this.timeline) {
frame = this.timeline.getFrameNumberForLabel(frame)
}
}
// or by frame number?
else {
// we're 0-based, as2 is 1-based
frame -= 1
}
if (frame === null || frame < 0) {
frame = 0
}
if (frame >= this.getFrameCount()) {
frame = this.getFrameCount() - 1
}
var lastPlayhead = this.playhead
this.playhead = frame
this.$processFrameChange(lastPlayhead, false)
},
as2Object : function() {
if (!this._as2Object) {
var mc = this._as2Object = new as2_MovieClip()
mc.__dispObj = this
var objs = this.displayList.getDisplayObjects()
for (var depth in objs) {
if (objs.hasOwnProperty(depth)) {
var obj = objs[depth]
var name = obj.getName()
if (name) {
mc[name] = obj.as2Object()
}
}
}
if (this.getName()) {
mc[this.getName()] = mc
}
}
return this._as2Object
},
childChangedName: function(child, oldName, newName) {
if (!oldName && !newName) {
return
}
var mc = this._as2Object
if (mc) {
var target = child.as2Object()
if (oldName && mc[oldName] === target) {
delete mc[oldName]
}
if (newName) {
mc[newName] = target
}
}
},
/**
* DisplayObject override
*/
calcBbox : function() {
var bbox
var objs = this.displayList.getDisplayObjects()
for (var depth in objs) {
if (objs.hasOwnProperty(depth)) {
var obj = objs[depth]
var objBbox = obj.placeBbox()
if (!bbox) {
bbox = objBbox.slice(0)
}
else {
bbox = [
bbox[0] < objBbox[0] ? bbox[0] : objBbox[0],
bbox[1] < objBbox[1] ? bbox[1] : objBbox[1],
bbox[2] > objBbox[2] ? bbox[2] : objBbox[2],
bbox[3] > objBbox[3] ? bbox[3] : objBbox | var as2_MovieClip = require('as2/movie_clip').MovieClip
var DisplayList = require('player/display_list').DisplayList | random_line_split |
cef_process_message.rs | // Copyright (c) 2014 Marshall A. Greenblatt. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool and should not be edited
// by hand. See the translator.README.txt file in the tools directory for
// more information.
//
#![allow(non_snake_case, unused_imports)]
use eutil;
use interfaces;
use types;
use wrappers::CefWrap;
use libc;
use std::collections::HashMap;
use std::ptr;
//
// Structure representing a message. Can be used on any process and thread.
//
#[repr(C)]
pub struct _cef_process_message_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub is_valid: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub is_read_only: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns a writable copy of this object.
//
pub copy: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_process_message_t>,
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub get_name: Option<extern "C" fn(
this: *mut cef_process_message_t) -> types::cef_string_userfree_t>,
//
// Returns the list of arguments.
//
pub get_argument_list: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_list_value_t>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: usize,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_process_message_t = _cef_process_message_t;
//
// Structure representing a message. Can be used on any process and thread.
//
pub struct CefProcessMessage {
c_object: *mut cef_process_message_t,
}
impl Clone for CefProcessMessage {
fn clone(&self) -> CefProcessMessage{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefProcessMessage {
c_object: self.c_object,
}
}
}
}
impl Drop for CefProcessMessage {
fn drop(&mut self) |
}
impl CefProcessMessage {
pub unsafe fn from_c_object(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_process_message_t) -> CefProcessMessage {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefProcessMessage {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_process_message_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_process_message_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub fn is_valid(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_valid.unwrap())(
self.c_object))
}
}
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub fn is_read_only(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_read_only.unwrap())(
self.c_object))
}
}
//
// Returns a writable copy of this object.
//
pub fn copy(&self) -> interfaces::CefProcessMessage {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).copy.unwrap())(
self.c_object))
}
}
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub fn get_name(&self) -> String {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_name.unwrap())(
self.c_object))
}
}
//
// Returns the list of arguments.
//
pub fn get_argument_list(&self) -> interfaces::CefListValue {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_argument_list.unwrap())(
self.c_object))
}
}
//
// Create a new cef_process_message_t object with the specified name.
//
pub fn create(name: &[u16]) -> interfaces::CefProcessMessage {
unsafe {
CefWrap::to_rust(
::process_message::cef_process_message_create(
CefWrap::to_c(name)))
}
}
}
impl CefWrap<*mut cef_process_message_t> for CefProcessMessage {
fn to_c(rust_object: CefProcessMessage) -> *mut cef_process_message_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_process_message_t> for Option<CefProcessMessage> {
fn to_c(rust_object: Option<CefProcessMessage>) -> *mut cef_process_message_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> Option<CefProcessMessage> {
if c_object.is_null() {
None
} else {
Some(CefProcessMessage::from_c_object_addref(c_object))
}
}
}
| {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
} | identifier_body |
cef_process_message.rs | // Copyright (c) 2014 Marshall A. Greenblatt. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool and should not be edited
// by hand. See the translator.README.txt file in the tools directory for
// more information.
//
#![allow(non_snake_case, unused_imports)]
use eutil;
use interfaces;
use types;
use wrappers::CefWrap;
use libc;
use std::collections::HashMap;
use std::ptr;
//
// Structure representing a message. Can be used on any process and thread.
//
#[repr(C)]
pub struct _cef_process_message_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub is_valid: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub is_read_only: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns a writable copy of this object.
//
pub copy: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_process_message_t>,
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub get_name: Option<extern "C" fn(
this: *mut cef_process_message_t) -> types::cef_string_userfree_t>,
//
// Returns the list of arguments.
//
pub get_argument_list: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_list_value_t>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: usize,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_process_message_t = _cef_process_message_t;
//
// Structure representing a message. Can be used on any process and thread.
//
pub struct CefProcessMessage {
c_object: *mut cef_process_message_t,
}
impl Clone for CefProcessMessage {
fn clone(&self) -> CefProcessMessage{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefProcessMessage {
c_object: self.c_object,
}
}
}
}
impl Drop for CefProcessMessage {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefProcessMessage {
pub unsafe fn from_c_object(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_process_message_t) -> CefProcessMessage {
if !c_object.is_null() |
CefProcessMessage {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_process_message_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_process_message_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub fn is_valid(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_valid.unwrap())(
self.c_object))
}
}
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub fn is_read_only(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_read_only.unwrap())(
self.c_object))
}
}
//
// Returns a writable copy of this object.
//
pub fn copy(&self) -> interfaces::CefProcessMessage {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).copy.unwrap())(
self.c_object))
}
}
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub fn get_name(&self) -> String {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_name.unwrap())(
self.c_object))
}
}
//
// Returns the list of arguments.
//
pub fn get_argument_list(&self) -> interfaces::CefListValue {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_argument_list.unwrap())(
self.c_object))
}
}
//
// Create a new cef_process_message_t object with the specified name.
//
pub fn create(name: &[u16]) -> interfaces::CefProcessMessage {
unsafe {
CefWrap::to_rust(
::process_message::cef_process_message_create(
CefWrap::to_c(name)))
}
}
}
impl CefWrap<*mut cef_process_message_t> for CefProcessMessage {
fn to_c(rust_object: CefProcessMessage) -> *mut cef_process_message_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_process_message_t> for Option<CefProcessMessage> {
fn to_c(rust_object: Option<CefProcessMessage>) -> *mut cef_process_message_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> Option<CefProcessMessage> {
if c_object.is_null() {
None
} else {
Some(CefProcessMessage::from_c_object_addref(c_object))
}
}
}
| {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
} | conditional_block |
cef_process_message.rs | // Copyright (c) 2014 Marshall A. Greenblatt. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool and should not be edited
// by hand. See the translator.README.txt file in the tools directory for
// more information.
//
#![allow(non_snake_case, unused_imports)]
use eutil;
use interfaces;
use types;
use wrappers::CefWrap;
use libc;
use std::collections::HashMap;
use std::ptr;
//
// Structure representing a message. Can be used on any process and thread.
//
#[repr(C)]
pub struct _cef_process_message_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub is_valid: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub is_read_only: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns a writable copy of this object.
//
pub copy: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_process_message_t>,
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub get_name: Option<extern "C" fn(
this: *mut cef_process_message_t) -> types::cef_string_userfree_t>,
//
// Returns the list of arguments.
//
pub get_argument_list: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_list_value_t>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: usize,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_process_message_t = _cef_process_message_t;
//
// Structure representing a message. Can be used on any process and thread.
//
pub struct CefProcessMessage {
c_object: *mut cef_process_message_t,
}
impl Clone for CefProcessMessage {
fn clone(&self) -> CefProcessMessage{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefProcessMessage {
c_object: self.c_object,
}
}
}
}
impl Drop for CefProcessMessage {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefProcessMessage {
pub unsafe fn from_c_object(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_process_message_t) -> CefProcessMessage {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefProcessMessage {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_process_message_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_process_message_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null() | pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub fn is_valid(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_valid.unwrap())(
self.c_object))
}
}
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub fn is_read_only(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_read_only.unwrap())(
self.c_object))
}
}
//
// Returns a writable copy of this object.
//
pub fn copy(&self) -> interfaces::CefProcessMessage {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).copy.unwrap())(
self.c_object))
}
}
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub fn get_name(&self) -> String {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_name.unwrap())(
self.c_object))
}
}
//
// Returns the list of arguments.
//
pub fn get_argument_list(&self) -> interfaces::CefListValue {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_argument_list.unwrap())(
self.c_object))
}
}
//
// Create a new cef_process_message_t object with the specified name.
//
pub fn create(name: &[u16]) -> interfaces::CefProcessMessage {
unsafe {
CefWrap::to_rust(
::process_message::cef_process_message_create(
CefWrap::to_c(name)))
}
}
}
impl CefWrap<*mut cef_process_message_t> for CefProcessMessage {
fn to_c(rust_object: CefProcessMessage) -> *mut cef_process_message_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_process_message_t> for Option<CefProcessMessage> {
fn to_c(rust_object: Option<CefProcessMessage>) -> *mut cef_process_message_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> Option<CefProcessMessage> {
if c_object.is_null() {
None
} else {
Some(CefProcessMessage::from_c_object_addref(c_object))
}
}
} | } | random_line_split |
cef_process_message.rs | // Copyright (c) 2014 Marshall A. Greenblatt. All rights reserved.
//
// Redistribution and use in source and binary forms, with or without
// modification, are permitted provided that the following conditions are
// met:
//
// * Redistributions of source code must retain the above copyright
// notice, this list of conditions and the following disclaimer.
// * Redistributions in binary form must reproduce the above
// copyright notice, this list of conditions and the following disclaimer
// in the documentation and/or other materials provided with the
// distribution.
// * Neither the name of Google Inc. nor the name Chromium Embedded
// Framework nor the names of its contributors may be used to endorse
// or promote products derived from this software without specific prior
// written permission.
//
// THIS SOFTWARE IS PROVIDED BY THE COPYRIGHT HOLDERS AND CONTRIBUTORS
// "AS IS" AND ANY EXPRESS OR IMPLIED WARRANTIES, INCLUDING, BUT NOT
// LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR
// A PARTICULAR PURPOSE ARE DISCLAIMED. IN NO EVENT SHALL THE COPYRIGHT
// OWNER OR CONTRIBUTORS BE LIABLE FOR ANY DIRECT, INDIRECT, INCIDENTAL,
// SPECIAL, EXEMPLARY, OR CONSEQUENTIAL DAMAGES (INCLUDING, BUT NOT
// LIMITED TO, PROCUREMENT OF SUBSTITUTE GOODS OR SERVICES; LOSS OF USE,
// DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY
// THEORY OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT
// (INCLUDING NEGLIGENCE OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE
// OF THIS SOFTWARE, EVEN IF ADVISED OF THE POSSIBILITY OF SUCH DAMAGE.
//
// ---------------------------------------------------------------------------
//
// This file was generated by the CEF translator tool and should not be edited
// by hand. See the translator.README.txt file in the tools directory for
// more information.
//
#![allow(non_snake_case, unused_imports)]
use eutil;
use interfaces;
use types;
use wrappers::CefWrap;
use libc;
use std::collections::HashMap;
use std::ptr;
//
// Structure representing a message. Can be used on any process and thread.
//
#[repr(C)]
pub struct _cef_process_message_t {
//
// Base structure.
//
pub base: types::cef_base_t,
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub is_valid: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub is_read_only: Option<extern "C" fn(
this: *mut cef_process_message_t) -> libc::c_int>,
//
// Returns a writable copy of this object.
//
pub copy: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_process_message_t>,
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub get_name: Option<extern "C" fn(
this: *mut cef_process_message_t) -> types::cef_string_userfree_t>,
//
// Returns the list of arguments.
//
pub get_argument_list: Option<extern "C" fn(
this: *mut cef_process_message_t) -> *mut interfaces::cef_list_value_t>,
//
// The reference count. This will only be present for Rust instances!
//
pub ref_count: usize,
//
// Extra data. This will only be present for Rust instances!
//
pub extra: u8,
}
pub type cef_process_message_t = _cef_process_message_t;
//
// Structure representing a message. Can be used on any process and thread.
//
pub struct CefProcessMessage {
c_object: *mut cef_process_message_t,
}
impl Clone for CefProcessMessage {
fn clone(&self) -> CefProcessMessage{
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.add_ref.unwrap())(&mut (*self.c_object).base);
}
CefProcessMessage {
c_object: self.c_object,
}
}
}
}
impl Drop for CefProcessMessage {
fn drop(&mut self) {
unsafe {
if !self.c_object.is_null() {
((*self.c_object).base.release.unwrap())(&mut (*self.c_object).base);
}
}
}
}
impl CefProcessMessage {
pub unsafe fn from_c_object(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage {
c_object: c_object,
}
}
pub unsafe fn from_c_object_addref(c_object: *mut cef_process_message_t) -> CefProcessMessage {
if !c_object.is_null() {
((*c_object).base.add_ref.unwrap())(&mut (*c_object).base);
}
CefProcessMessage {
c_object: c_object,
}
}
pub fn c_object(&self) -> *mut cef_process_message_t {
self.c_object
}
pub fn c_object_addrefed(&self) -> *mut cef_process_message_t {
unsafe {
if !self.c_object.is_null() {
eutil::add_ref(self.c_object as *mut types::cef_base_t);
}
self.c_object
}
}
pub fn is_null_cef_object(&self) -> bool {
self.c_object.is_null()
}
pub fn is_not_null_cef_object(&self) -> bool {
!self.c_object.is_null()
}
//
// Returns true (1) if this object is valid. Do not call any other functions
// if this function returns false (0).
//
pub fn is_valid(&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_valid.unwrap())(
self.c_object))
}
}
//
// Returns true (1) if the values of this object are read-only. Some APIs may
// expose read-only objects.
//
pub fn | (&self) -> libc::c_int {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).is_read_only.unwrap())(
self.c_object))
}
}
//
// Returns a writable copy of this object.
//
pub fn copy(&self) -> interfaces::CefProcessMessage {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).copy.unwrap())(
self.c_object))
}
}
//
// Returns the message name.
//
// The resulting string must be freed by calling cef_string_userfree_free().
pub fn get_name(&self) -> String {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_name.unwrap())(
self.c_object))
}
}
//
// Returns the list of arguments.
//
pub fn get_argument_list(&self) -> interfaces::CefListValue {
if self.c_object.is_null() {
panic!("called a CEF method on a null object")
}
unsafe {
CefWrap::to_rust(
((*self.c_object).get_argument_list.unwrap())(
self.c_object))
}
}
//
// Create a new cef_process_message_t object with the specified name.
//
pub fn create(name: &[u16]) -> interfaces::CefProcessMessage {
unsafe {
CefWrap::to_rust(
::process_message::cef_process_message_create(
CefWrap::to_c(name)))
}
}
}
impl CefWrap<*mut cef_process_message_t> for CefProcessMessage {
fn to_c(rust_object: CefProcessMessage) -> *mut cef_process_message_t {
rust_object.c_object_addrefed()
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> CefProcessMessage {
CefProcessMessage::from_c_object_addref(c_object)
}
}
impl CefWrap<*mut cef_process_message_t> for Option<CefProcessMessage> {
fn to_c(rust_object: Option<CefProcessMessage>) -> *mut cef_process_message_t {
match rust_object {
None => ptr::null_mut(),
Some(rust_object) => rust_object.c_object_addrefed(),
}
}
unsafe fn to_rust(c_object: *mut cef_process_message_t) -> Option<CefProcessMessage> {
if c_object.is_null() {
None
} else {
Some(CefProcessMessage::from_c_object_addref(c_object))
}
}
}
| is_read_only | identifier_name |
gulpfile.js | var gulp = require('gulp');
var gutil = require("gulp-util");
var webpack = require("webpack");
var WebpackDevServer = require("webpack-dev-server");
var path = require('path');
var Server = require('karma').Server;
var config = {
entry: path.resolve(__dirname, 'app/main.js'),
output: {
path: path.resolve(__dirname, 'build'),
filename: 'main.js'
},
module: {
loaders: [{
test: /\.jsx?$/, // A regexp to test the require path. accepts either js or jsx
loader: 'babel' // The module to load. "babel" is short for "babel-loader"
}]
}
}
gulp.task("webpack", function(callback) {
// run webpack
webpack(config, function(err, stats) {
if(err) throw new gutil.PluginError("webpack", err);
gutil.log("[webpack]", stats.toString({
// output options
}));
callback();
});
});
gulp.task("tests", function(callback) {
// console.log(path.resolve(__dirname + 'test/karma.conf.js'))
new Server({
configFile: path.resolve(__dirname, 'test/karma.config.js'),
singleRun: false
}, callback).start();
});
gulp.task("webpack-dev-server", function(callback) {
// Start a webpack-dev-server
var compiler = webpack(config);
new WebpackDevServer(compiler, {
// server and middleware options
}).listen(8080, "localhost", function(err) {
if(err) throw new gutil.PluginError("webpack-dev-server", err);
// Server listening
gutil.log("[webpack-dev-server]", "http://localhost:8080/webpack-dev-server/index.html");
| callback();
});
}); | // keep the server alive or continue? | random_line_split |
org.py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage Organizations")
def cli():
pass
def | (value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option("--displayname",)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization %s has been successfully created." % orgname, fg="green",
)
@cli.command("list", short_help="List organizations")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organizations")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("orgname")
@click.option(
"--new-orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option("--email")
@click.option("--displayname",)
def org_update(orgname, **kwargs):
client = AccountClient()
org = client.get_org(orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(orgname, new_org)
return click.secho(
"The organization %s has been successfully updated." % orgname, fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho("Organization %s has been destroyed." % orgname, fg="green",)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument("orgname",)
@click.argument("username",)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner %s has been successfully added to the %s organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument("orgname",)
@click.argument("username",)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The %s owner has been successfully removed from the %s organization."
% (username, orgname),
fg="green",
)
| validate_orgname | identifier_name |
org.py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage Organizations")
def cli():
pass
def validate_orgname(value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option("--displayname",)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization %s has been successfully created." % orgname, fg="green",
)
@cli.command("list", short_help="List organizations")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organizations")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
|
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("orgname")
@click.option(
"--new-orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option("--email")
@click.option("--displayname",)
def org_update(orgname, **kwargs):
client = AccountClient()
org = client.get_org(orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(orgname, new_org)
return click.secho(
"The organization %s has been successfully updated." % orgname, fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho("Organization %s has been destroyed." % orgname, fg="green",)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument("orgname",)
@click.argument("username",)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner %s has been successfully added to the %s organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument("orgname",)
@click.argument("username",)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The %s owner has been successfully removed from the %s organization."
% (username, orgname),
fg="green",
)
| data.append(("Email:", org.get("email"))) | conditional_block |
org.py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage Organizations")
def cli():
pass
def validate_orgname(value):
|
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option("--displayname",)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization %s has been successfully created." % orgname, fg="green",
)
@cli.command("list", short_help="List organizations")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organizations")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("orgname")
@click.option(
"--new-orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option("--email")
@click.option("--displayname",)
def org_update(orgname, **kwargs):
client = AccountClient()
org = client.get_org(orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(orgname, new_org)
return click.secho(
"The organization %s has been successfully updated." % orgname, fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho("Organization %s has been destroyed." % orgname, fg="green",)
@cli.command("add", short_help="Add a new owner to organization")
@click.argument("orgname",)
@click.argument("username",)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner %s has been successfully added to the %s organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument("orgname",)
@click.argument("username",)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The %s owner has been successfully removed from the %s organization."
% (username, orgname),
fg="green",
)
| return validate_username(value, "Organization name") | identifier_body |
org.py | # Copyright (c) 2014-present PlatformIO <[email protected]>
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# pylint: disable=unused-argument
import json
import click
from tabulate import tabulate
from platformio.clients.account import AccountClient
from platformio.commands.account import validate_email, validate_username
@click.group("org", short_help="Manage Organizations")
def cli():
pass
def validate_orgname(value):
return validate_username(value, "Organization name")
@cli.command("create", short_help="Create a new organization")
@click.argument(
"orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option(
"--email", callback=lambda _, __, value: validate_email(value) if value else value
)
@click.option("--displayname",)
def org_create(orgname, email, displayname):
client = AccountClient()
client.create_org(orgname, email, displayname)
return click.secho(
"The organization %s has been successfully created." % orgname, fg="green",
)
@cli.command("list", short_help="List organizations")
@click.option("--json-output", is_flag=True)
def org_list(json_output):
client = AccountClient()
orgs = client.list_orgs()
if json_output:
return click.echo(json.dumps(orgs))
if not orgs:
return click.echo("You do not have any organizations")
for org in orgs:
click.echo()
click.secho(org.get("orgname"), fg="cyan")
click.echo("-" * len(org.get("orgname")))
data = []
if org.get("displayname"):
data.append(("Display Name:", org.get("displayname")))
if org.get("email"):
data.append(("Email:", org.get("email")))
data.append(
(
"Owners:",
", ".join((owner.get("username") for owner in org.get("owners"))),
)
)
click.echo(tabulate(data, tablefmt="plain"))
return click.echo()
@cli.command("update", short_help="Update organization")
@click.argument("orgname")
@click.option(
"--new-orgname", callback=lambda _, __, value: validate_orgname(value),
)
@click.option("--email")
@click.option("--displayname",)
def org_update(orgname, **kwargs):
client = AccountClient()
org = client.get_org(orgname)
del org["owners"]
new_org = org.copy()
if not any(kwargs.values()):
for field in org:
new_org[field] = click.prompt(
field.replace("_", " ").capitalize(), default=org[field]
)
if field == "email":
validate_email(new_org[field])
if field == "orgname":
validate_orgname(new_org[field])
else:
new_org.update(
{key.replace("new_", ""): value for key, value in kwargs.items() if value}
)
client.update_org(orgname, new_org)
return click.secho(
"The organization %s has been successfully updated." % orgname, fg="green",
)
@cli.command("destroy", short_help="Destroy organization")
@click.argument("orgname")
def account_destroy(orgname):
client = AccountClient()
click.confirm(
"Are you sure you want to delete the %s organization account?\n"
"Warning! All linked data will be permanently removed and can not be restored."
% orgname,
abort=True,
)
client.destroy_org(orgname)
return click.secho("Organization %s has been destroyed." % orgname, fg="green",)
| @click.argument("orgname",)
@click.argument("username",)
def org_add_owner(orgname, username):
client = AccountClient()
client.add_org_owner(orgname, username)
return click.secho(
"The new owner %s has been successfully added to the %s organization."
% (username, orgname),
fg="green",
)
@cli.command("remove", short_help="Remove an owner from organization")
@click.argument("orgname",)
@click.argument("username",)
def org_remove_owner(orgname, username):
client = AccountClient()
client.remove_org_owner(orgname, username)
return click.secho(
"The %s owner has been successfully removed from the %s organization."
% (username, orgname),
fg="green",
) |
@cli.command("add", short_help="Add a new owner to organization") | random_line_split |
videotracklist.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::VideoTrackListBinding::{self, VideoTrackListMethods};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::videotrack::VideoTrack;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
#[dom_struct]
pub struct VideoTrackList {
eventtarget: EventTarget,
tracks: DomRefCell<Vec<Dom<VideoTrack>>>,
media_element: Option<Dom<HTMLMediaElement>>,
}
impl VideoTrackList {
pub fn new_inherited(
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> VideoTrackList {
VideoTrackList {
eventtarget: EventTarget::new_inherited(),
tracks: DomRefCell::new(tracks.iter().map(|track| Dom::from_ref(&**track)).collect()),
media_element: media_element.map(|m| Dom::from_ref(m)),
}
}
pub fn new(
window: &Window,
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> DomRoot<VideoTrackList> {
reflect_dom_object(
Box::new(VideoTrackList::new_inherited(tracks, media_element)),
window,
VideoTrackListBinding::Wrap,
)
}
pub fn len(&self) -> usize {
self.tracks.borrow().len()
}
pub fn find(&self, track: &VideoTrack) -> Option<usize> {
self.tracks.borrow().iter().position(|t| &**t == track)
}
pub fn item(&self, idx: usize) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.get(idx)
.map(|track| DomRoot::from_ref(&**track))
}
pub fn selected_index(&self) -> Option<usize> {
self.tracks
.borrow()
.iter()
.position(|track| track.selected())
}
pub fn set_selected(&self, idx: usize, value: bool) {
let track = match self.item(idx) {
Some(t) => t,
None => return,
};
// If the chosen tracks selected status is the same as the new status, return early.
if track.selected() == value {
return;
}
let global = &self.global();
let this = Trusted::new(self);
let (source, canceller) = global
.as_window()
.task_manager()
.media_element_task_source_with_canceller();
if let Some(current) = self.selected_index() {
self.tracks.borrow()[current].set_selected(false);
}
track.set_selected(value);
if let Some(media_element) = self.media_element.as_ref() {
media_element.set_video_track(idx, value);
}
let _ = source.queue_with_canceller(
task!(media_track_change: move || {
let this = this.root();
this.upcast::<EventTarget>().fire_event(atom!("change"));
}),
&canceller,
);
}
pub fn add(&self, track: &VideoTrack) {
self.tracks.borrow_mut().push(Dom::from_ref(track));
if track.selected() {
if let Some(idx) = self.selected_index() |
}
track.add_track_list(self);
}
pub fn clear(&self) {
self.tracks
.borrow()
.iter()
.for_each(|t| t.remove_track_list());
self.tracks.borrow_mut().clear();
}
}
impl VideoTrackListMethods for VideoTrackList {
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-length
fn Length(&self) -> u32 {
self.len() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-tracklist-item
fn IndexedGetter(&self, idx: u32) -> Option<DomRoot<VideoTrack>> {
self.item(idx as usize)
}
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-gettrackbyid
fn GetTrackById(&self, id: DOMString) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.iter()
.find(|track| track.id() == id)
.map(|track| DomRoot::from_ref(&**track))
}
// https://html.spec.whatwg.org/multipage/#dom-videotrack-selected
fn SelectedIndex(&self) -> i32 {
if let Some(idx) = self.selected_index() {
return idx as i32;
}
return -1;
}
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onchange
event_handler!(change, GetOnchange, SetOnchange);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onaddtrack
event_handler!(addtrack, GetOnaddtrack, SetOnaddtrack);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onremovetrack
event_handler!(removetrack, GetOnremovetrack, SetOnremovetrack);
}
| {
self.set_selected(idx, false);
} | conditional_block |
videotracklist.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::VideoTrackListBinding::{self, VideoTrackListMethods};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::videotrack::VideoTrack;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
#[dom_struct]
pub struct VideoTrackList {
eventtarget: EventTarget,
tracks: DomRefCell<Vec<Dom<VideoTrack>>>,
media_element: Option<Dom<HTMLMediaElement>>,
}
impl VideoTrackList {
pub fn new_inherited(
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> VideoTrackList {
VideoTrackList {
eventtarget: EventTarget::new_inherited(),
tracks: DomRefCell::new(tracks.iter().map(|track| Dom::from_ref(&**track)).collect()),
media_element: media_element.map(|m| Dom::from_ref(m)),
}
}
pub fn new(
window: &Window,
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> DomRoot<VideoTrackList> {
reflect_dom_object(
Box::new(VideoTrackList::new_inherited(tracks, media_element)),
window,
VideoTrackListBinding::Wrap,
)
}
pub fn len(&self) -> usize {
self.tracks.borrow().len()
}
pub fn find(&self, track: &VideoTrack) -> Option<usize> {
self.tracks.borrow().iter().position(|t| &**t == track)
}
pub fn item(&self, idx: usize) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.get(idx)
.map(|track| DomRoot::from_ref(&**track))
}
pub fn selected_index(&self) -> Option<usize> {
self.tracks
.borrow()
.iter()
.position(|track| track.selected())
}
pub fn set_selected(&self, idx: usize, value: bool) {
let track = match self.item(idx) {
Some(t) => t,
None => return,
};
// If the chosen tracks selected status is the same as the new status, return early.
if track.selected() == value {
return;
}
let global = &self.global(); | let this = Trusted::new(self);
let (source, canceller) = global
.as_window()
.task_manager()
.media_element_task_source_with_canceller();
if let Some(current) = self.selected_index() {
self.tracks.borrow()[current].set_selected(false);
}
track.set_selected(value);
if let Some(media_element) = self.media_element.as_ref() {
media_element.set_video_track(idx, value);
}
let _ = source.queue_with_canceller(
task!(media_track_change: move || {
let this = this.root();
this.upcast::<EventTarget>().fire_event(atom!("change"));
}),
&canceller,
);
}
pub fn add(&self, track: &VideoTrack) {
self.tracks.borrow_mut().push(Dom::from_ref(track));
if track.selected() {
if let Some(idx) = self.selected_index() {
self.set_selected(idx, false);
}
}
track.add_track_list(self);
}
pub fn clear(&self) {
self.tracks
.borrow()
.iter()
.for_each(|t| t.remove_track_list());
self.tracks.borrow_mut().clear();
}
}
impl VideoTrackListMethods for VideoTrackList {
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-length
fn Length(&self) -> u32 {
self.len() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-tracklist-item
fn IndexedGetter(&self, idx: u32) -> Option<DomRoot<VideoTrack>> {
self.item(idx as usize)
}
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-gettrackbyid
fn GetTrackById(&self, id: DOMString) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.iter()
.find(|track| track.id() == id)
.map(|track| DomRoot::from_ref(&**track))
}
// https://html.spec.whatwg.org/multipage/#dom-videotrack-selected
fn SelectedIndex(&self) -> i32 {
if let Some(idx) = self.selected_index() {
return idx as i32;
}
return -1;
}
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onchange
event_handler!(change, GetOnchange, SetOnchange);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onaddtrack
event_handler!(addtrack, GetOnaddtrack, SetOnaddtrack);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onremovetrack
event_handler!(removetrack, GetOnremovetrack, SetOnremovetrack);
} | random_line_split |
|
videotracklist.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::VideoTrackListBinding::{self, VideoTrackListMethods};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::videotrack::VideoTrack;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
#[dom_struct]
pub struct VideoTrackList {
eventtarget: EventTarget,
tracks: DomRefCell<Vec<Dom<VideoTrack>>>,
media_element: Option<Dom<HTMLMediaElement>>,
}
impl VideoTrackList {
pub fn new_inherited(
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> VideoTrackList {
VideoTrackList {
eventtarget: EventTarget::new_inherited(),
tracks: DomRefCell::new(tracks.iter().map(|track| Dom::from_ref(&**track)).collect()),
media_element: media_element.map(|m| Dom::from_ref(m)),
}
}
pub fn new(
window: &Window,
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> DomRoot<VideoTrackList> {
reflect_dom_object(
Box::new(VideoTrackList::new_inherited(tracks, media_element)),
window,
VideoTrackListBinding::Wrap,
)
}
pub fn len(&self) -> usize |
pub fn find(&self, track: &VideoTrack) -> Option<usize> {
self.tracks.borrow().iter().position(|t| &**t == track)
}
pub fn item(&self, idx: usize) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.get(idx)
.map(|track| DomRoot::from_ref(&**track))
}
pub fn selected_index(&self) -> Option<usize> {
self.tracks
.borrow()
.iter()
.position(|track| track.selected())
}
pub fn set_selected(&self, idx: usize, value: bool) {
let track = match self.item(idx) {
Some(t) => t,
None => return,
};
// If the chosen tracks selected status is the same as the new status, return early.
if track.selected() == value {
return;
}
let global = &self.global();
let this = Trusted::new(self);
let (source, canceller) = global
.as_window()
.task_manager()
.media_element_task_source_with_canceller();
if let Some(current) = self.selected_index() {
self.tracks.borrow()[current].set_selected(false);
}
track.set_selected(value);
if let Some(media_element) = self.media_element.as_ref() {
media_element.set_video_track(idx, value);
}
let _ = source.queue_with_canceller(
task!(media_track_change: move || {
let this = this.root();
this.upcast::<EventTarget>().fire_event(atom!("change"));
}),
&canceller,
);
}
pub fn add(&self, track: &VideoTrack) {
self.tracks.borrow_mut().push(Dom::from_ref(track));
if track.selected() {
if let Some(idx) = self.selected_index() {
self.set_selected(idx, false);
}
}
track.add_track_list(self);
}
pub fn clear(&self) {
self.tracks
.borrow()
.iter()
.for_each(|t| t.remove_track_list());
self.tracks.borrow_mut().clear();
}
}
impl VideoTrackListMethods for VideoTrackList {
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-length
fn Length(&self) -> u32 {
self.len() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-tracklist-item
fn IndexedGetter(&self, idx: u32) -> Option<DomRoot<VideoTrack>> {
self.item(idx as usize)
}
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-gettrackbyid
fn GetTrackById(&self, id: DOMString) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.iter()
.find(|track| track.id() == id)
.map(|track| DomRoot::from_ref(&**track))
}
// https://html.spec.whatwg.org/multipage/#dom-videotrack-selected
fn SelectedIndex(&self) -> i32 {
if let Some(idx) = self.selected_index() {
return idx as i32;
}
return -1;
}
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onchange
event_handler!(change, GetOnchange, SetOnchange);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onaddtrack
event_handler!(addtrack, GetOnaddtrack, SetOnaddtrack);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onremovetrack
event_handler!(removetrack, GetOnremovetrack, SetOnremovetrack);
}
| {
self.tracks.borrow().len()
} | identifier_body |
videotracklist.rs | /* This Source Code Form is subject to the terms of the Mozilla Public
* License, v. 2.0. If a copy of the MPL was not distributed with this
* file, You can obtain one at https://mozilla.org/MPL/2.0/. */
use crate::dom::bindings::cell::DomRefCell;
use crate::dom::bindings::codegen::Bindings::VideoTrackListBinding::{self, VideoTrackListMethods};
use crate::dom::bindings::inheritance::Castable;
use crate::dom::bindings::refcounted::Trusted;
use crate::dom::bindings::reflector::{reflect_dom_object, DomObject};
use crate::dom::bindings::root::Dom;
use crate::dom::bindings::root::DomRoot;
use crate::dom::bindings::str::DOMString;
use crate::dom::eventtarget::EventTarget;
use crate::dom::htmlmediaelement::HTMLMediaElement;
use crate::dom::videotrack::VideoTrack;
use crate::dom::window::Window;
use crate::task_source::TaskSource;
use dom_struct::dom_struct;
#[dom_struct]
pub struct | {
eventtarget: EventTarget,
tracks: DomRefCell<Vec<Dom<VideoTrack>>>,
media_element: Option<Dom<HTMLMediaElement>>,
}
impl VideoTrackList {
pub fn new_inherited(
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> VideoTrackList {
VideoTrackList {
eventtarget: EventTarget::new_inherited(),
tracks: DomRefCell::new(tracks.iter().map(|track| Dom::from_ref(&**track)).collect()),
media_element: media_element.map(|m| Dom::from_ref(m)),
}
}
pub fn new(
window: &Window,
tracks: &[&VideoTrack],
media_element: Option<&HTMLMediaElement>,
) -> DomRoot<VideoTrackList> {
reflect_dom_object(
Box::new(VideoTrackList::new_inherited(tracks, media_element)),
window,
VideoTrackListBinding::Wrap,
)
}
pub fn len(&self) -> usize {
self.tracks.borrow().len()
}
pub fn find(&self, track: &VideoTrack) -> Option<usize> {
self.tracks.borrow().iter().position(|t| &**t == track)
}
pub fn item(&self, idx: usize) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.get(idx)
.map(|track| DomRoot::from_ref(&**track))
}
pub fn selected_index(&self) -> Option<usize> {
self.tracks
.borrow()
.iter()
.position(|track| track.selected())
}
pub fn set_selected(&self, idx: usize, value: bool) {
let track = match self.item(idx) {
Some(t) => t,
None => return,
};
// If the chosen tracks selected status is the same as the new status, return early.
if track.selected() == value {
return;
}
let global = &self.global();
let this = Trusted::new(self);
let (source, canceller) = global
.as_window()
.task_manager()
.media_element_task_source_with_canceller();
if let Some(current) = self.selected_index() {
self.tracks.borrow()[current].set_selected(false);
}
track.set_selected(value);
if let Some(media_element) = self.media_element.as_ref() {
media_element.set_video_track(idx, value);
}
let _ = source.queue_with_canceller(
task!(media_track_change: move || {
let this = this.root();
this.upcast::<EventTarget>().fire_event(atom!("change"));
}),
&canceller,
);
}
pub fn add(&self, track: &VideoTrack) {
self.tracks.borrow_mut().push(Dom::from_ref(track));
if track.selected() {
if let Some(idx) = self.selected_index() {
self.set_selected(idx, false);
}
}
track.add_track_list(self);
}
pub fn clear(&self) {
self.tracks
.borrow()
.iter()
.for_each(|t| t.remove_track_list());
self.tracks.borrow_mut().clear();
}
}
impl VideoTrackListMethods for VideoTrackList {
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-length
fn Length(&self) -> u32 {
self.len() as u32
}
// https://html.spec.whatwg.org/multipage/#dom-tracklist-item
fn IndexedGetter(&self, idx: u32) -> Option<DomRoot<VideoTrack>> {
self.item(idx as usize)
}
// https://html.spec.whatwg.org/multipage/#dom-videotracklist-gettrackbyid
fn GetTrackById(&self, id: DOMString) -> Option<DomRoot<VideoTrack>> {
self.tracks
.borrow()
.iter()
.find(|track| track.id() == id)
.map(|track| DomRoot::from_ref(&**track))
}
// https://html.spec.whatwg.org/multipage/#dom-videotrack-selected
fn SelectedIndex(&self) -> i32 {
if let Some(idx) = self.selected_index() {
return idx as i32;
}
return -1;
}
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onchange
event_handler!(change, GetOnchange, SetOnchange);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onaddtrack
event_handler!(addtrack, GetOnaddtrack, SetOnaddtrack);
// https://html.spec.whatwg.org/multipage/#handler-tracklist-onremovetrack
event_handler!(removetrack, GetOnremovetrack, SetOnremovetrack);
}
| VideoTrackList | identifier_name |
lib.rs | #![crate_name = "nfc"]
#![crate_type = "dylib"]
extern crate libc;
pub mod ffi;
pub mod initiator;
pub mod target;
pub mod device;
pub mod context;
pub mod error;
pub mod misc;
pub mod to_str;
use libc::size_t;
// Library initialization/deinitialization
// See http://www.libnfc.org/api/group__lib.html
/// Registers an NFC device driver with libnfc
pub fn register_driver(ndr: *const ffi::nfc_driver) -> i32 {
unsafe { ffi::nfc_register_driver(ndr) }
}
/// Initializes libnfc. This function must be called before calling any other libnfc function
pub fn init(context: *mut *mut ffi::nfc_context) {
unsafe { ffi::nfc_init(context); }
}
/// Deinitializes libnfc. Should be called after closing all open devices and before your application terminates
pub fn exit(context: *mut ffi::nfc_context) {
unsafe { ffi::nfc_exit(context); }
}
// NFC Device/Hardware manipulation
// http://www.libnfc.org/api/group__dev.html
/// Open an NFC device
pub fn open(context: *mut ffi::nfc_context, connstring: *const ffi::nfc_connstring) -> *mut ffi::nfc_device {
unsafe { ffi::nfc_open(context, connstring) }
}
/// Close from a NFC device
pub fn close(pnd: *mut ffi::nfc_device) |
/// Scan for discoverable supported devices
pub fn list_devices(context: *mut ffi::nfc_context, connstrings: *mut ffi::nfc_connstring, constrings_len: size_t) -> size_t {
unsafe { ffi::nfc_list_devices(context, connstrings, constrings_len) }
}
/// Switches the NFC device to idle mode
pub fn idle(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_idle(pnd) }
}
/// Aborts current running command
pub fn abort_command(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_abort_command(pnd) }
}
| {
unsafe { ffi::nfc_close(pnd); }
} | identifier_body |
lib.rs | #![crate_name = "nfc"]
#![crate_type = "dylib"]
extern crate libc; | pub mod target;
pub mod device;
pub mod context;
pub mod error;
pub mod misc;
pub mod to_str;
use libc::size_t;
// Library initialization/deinitialization
// See http://www.libnfc.org/api/group__lib.html
/// Registers an NFC device driver with libnfc
pub fn register_driver(ndr: *const ffi::nfc_driver) -> i32 {
unsafe { ffi::nfc_register_driver(ndr) }
}
/// Initializes libnfc. This function must be called before calling any other libnfc function
pub fn init(context: *mut *mut ffi::nfc_context) {
unsafe { ffi::nfc_init(context); }
}
/// Deinitializes libnfc. Should be called after closing all open devices and before your application terminates
pub fn exit(context: *mut ffi::nfc_context) {
unsafe { ffi::nfc_exit(context); }
}
// NFC Device/Hardware manipulation
// http://www.libnfc.org/api/group__dev.html
/// Open an NFC device
pub fn open(context: *mut ffi::nfc_context, connstring: *const ffi::nfc_connstring) -> *mut ffi::nfc_device {
unsafe { ffi::nfc_open(context, connstring) }
}
/// Close from a NFC device
pub fn close(pnd: *mut ffi::nfc_device) {
unsafe { ffi::nfc_close(pnd); }
}
/// Scan for discoverable supported devices
pub fn list_devices(context: *mut ffi::nfc_context, connstrings: *mut ffi::nfc_connstring, constrings_len: size_t) -> size_t {
unsafe { ffi::nfc_list_devices(context, connstrings, constrings_len) }
}
/// Switches the NFC device to idle mode
pub fn idle(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_idle(pnd) }
}
/// Aborts current running command
pub fn abort_command(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_abort_command(pnd) }
} |
pub mod ffi;
pub mod initiator; | random_line_split |
lib.rs | #![crate_name = "nfc"]
#![crate_type = "dylib"]
extern crate libc;
pub mod ffi;
pub mod initiator;
pub mod target;
pub mod device;
pub mod context;
pub mod error;
pub mod misc;
pub mod to_str;
use libc::size_t;
// Library initialization/deinitialization
// See http://www.libnfc.org/api/group__lib.html
/// Registers an NFC device driver with libnfc
pub fn | (ndr: *const ffi::nfc_driver) -> i32 {
unsafe { ffi::nfc_register_driver(ndr) }
}
/// Initializes libnfc. This function must be called before calling any other libnfc function
pub fn init(context: *mut *mut ffi::nfc_context) {
unsafe { ffi::nfc_init(context); }
}
/// Deinitializes libnfc. Should be called after closing all open devices and before your application terminates
pub fn exit(context: *mut ffi::nfc_context) {
unsafe { ffi::nfc_exit(context); }
}
// NFC Device/Hardware manipulation
// http://www.libnfc.org/api/group__dev.html
/// Open an NFC device
pub fn open(context: *mut ffi::nfc_context, connstring: *const ffi::nfc_connstring) -> *mut ffi::nfc_device {
unsafe { ffi::nfc_open(context, connstring) }
}
/// Close from a NFC device
pub fn close(pnd: *mut ffi::nfc_device) {
unsafe { ffi::nfc_close(pnd); }
}
/// Scan for discoverable supported devices
pub fn list_devices(context: *mut ffi::nfc_context, connstrings: *mut ffi::nfc_connstring, constrings_len: size_t) -> size_t {
unsafe { ffi::nfc_list_devices(context, connstrings, constrings_len) }
}
/// Switches the NFC device to idle mode
pub fn idle(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_idle(pnd) }
}
/// Aborts current running command
pub fn abort_command(pnd: *mut ffi::nfc_device) -> i32 {
unsafe { ffi::nfc_abort_command(pnd) }
}
| register_driver | identifier_name |
connection.js | var EventEmitter = require('events').EventEmitter;
var util = require('util');
var MongoClient = require('mongodb').MongoClient;
var Channel = require('./channel');
/**
* Connection constructor.
*
* @param {String|Db} uri string or Db instance
* @param {Object} mongo driver options
* @api public
*/
function Connection(uri, options) | this.destroyed = false;
this.channels = {};
}
module.exports = Connection;
util.inherits(Connection, EventEmitter);
/**
* Current connection state.
*
* @type {String}
* @api public
*/
Object.defineProperty(Connection.prototype, 'state', {
enumerable: true,
get: function () {
var state;
// Using 'destroyed' to be compatible with the driver.
if (this.destroyed) {
state = 'destroyed';
}
else if (this.db) {
state = this.db.serverConfig.isConnected()
? 'connected' : 'disconnected';
} else {
state = 'connecting';
}
return state;
}
});
/**
* Creates or returns a channel with the passed name.
*
* @see Channel
* @return {Channel}
* @api public
*/
Connection.prototype.channel = function (name, options) {
if (typeof name === 'object') {
options = name;
name = 'mubsub';
}
if (!this.channels[name] || this.channels[name].closed) {
this.channels[name] = new Channel(this, name, options);
}
return this.channels[name];
};
/**
* Close connection.
*
* @param {Function} [callback]
* @return {Connection} this
* @api public
*/
Connection.prototype.close = function (callback) {
this.destroyed = true;
this.emit('close');
this.db.close(callback);
return this;
};
| {
var self = this;
options || (options = {});
options.autoReconnect != null || (options.autoReconnect = true);
// It's a Db instance.
if (uri.collection) {
this.db = uri;
} else {
MongoClient.connect(uri, options, function (err, db) {
if (err) return self.emit('error', err);
self.db = db;
self.emit('connect', db);
db.on('error', function (err) {
self.emit('error', err);
});
});
}
| identifier_body |
connection.js | var EventEmitter = require('events').EventEmitter;
var util = require('util');
var MongoClient = require('mongodb').MongoClient;
var Channel = require('./channel');
/**
* Connection constructor.
*
* @param {String|Db} uri string or Db instance
* @param {Object} mongo driver options
* @api public
*/
function Connection(uri, options) {
var self = this;
options || (options = {});
options.autoReconnect != null || (options.autoReconnect = true);
// It's a Db instance.
if (uri.collection) {
this.db = uri;
} else {
MongoClient.connect(uri, options, function (err, db) {
if (err) return self.emit('error', err);
self.db = db;
self.emit('connect', db);
db.on('error', function (err) {
self.emit('error', err);
});
});
}
this.destroyed = false;
this.channels = {};
}
module.exports = Connection;
util.inherits(Connection, EventEmitter);
/**
* Current connection state.
*
* @type {String}
* @api public
*/
Object.defineProperty(Connection.prototype, 'state', {
enumerable: true,
| var state;
// Using 'destroyed' to be compatible with the driver.
if (this.destroyed) {
state = 'destroyed';
}
else if (this.db) {
state = this.db.serverConfig.isConnected()
? 'connected' : 'disconnected';
} else {
state = 'connecting';
}
return state;
}
});
/**
* Creates or returns a channel with the passed name.
*
* @see Channel
* @return {Channel}
* @api public
*/
Connection.prototype.channel = function (name, options) {
if (typeof name === 'object') {
options = name;
name = 'mubsub';
}
if (!this.channels[name] || this.channels[name].closed) {
this.channels[name] = new Channel(this, name, options);
}
return this.channels[name];
};
/**
* Close connection.
*
* @param {Function} [callback]
* @return {Connection} this
* @api public
*/
Connection.prototype.close = function (callback) {
this.destroyed = true;
this.emit('close');
this.db.close(callback);
return this;
}; | get: function () { | random_line_split |
connection.js | var EventEmitter = require('events').EventEmitter;
var util = require('util');
var MongoClient = require('mongodb').MongoClient;
var Channel = require('./channel');
/**
* Connection constructor.
*
* @param {String|Db} uri string or Db instance
* @param {Object} mongo driver options
* @api public
*/
function Connection(uri, options) {
var self = this;
options || (options = {});
options.autoReconnect != null || (options.autoReconnect = true);
// It's a Db instance.
if (uri.collection) {
this.db = uri;
} else {
MongoClient.connect(uri, options, function (err, db) {
if (err) return self.emit('error', err);
self.db = db;
self.emit('connect', db);
db.on('error', function (err) {
self.emit('error', err);
});
});
}
this.destroyed = false;
this.channels = {};
}
module.exports = Connection;
util.inherits(Connection, EventEmitter);
/**
* Current connection state.
*
* @type {String}
* @api public
*/
Object.defineProperty(Connection.prototype, 'state', {
enumerable: true,
get: function () {
var state;
// Using 'destroyed' to be compatible with the driver.
if (this.destroyed) {
state = 'destroyed';
}
else if (this.db) | else {
state = 'connecting';
}
return state;
}
});
/**
* Creates or returns a channel with the passed name.
*
* @see Channel
* @return {Channel}
* @api public
*/
Connection.prototype.channel = function (name, options) {
if (typeof name === 'object') {
options = name;
name = 'mubsub';
}
if (!this.channels[name] || this.channels[name].closed) {
this.channels[name] = new Channel(this, name, options);
}
return this.channels[name];
};
/**
* Close connection.
*
* @param {Function} [callback]
* @return {Connection} this
* @api public
*/
Connection.prototype.close = function (callback) {
this.destroyed = true;
this.emit('close');
this.db.close(callback);
return this;
};
| {
state = this.db.serverConfig.isConnected()
? 'connected' : 'disconnected';
} | conditional_block |
connection.js | var EventEmitter = require('events').EventEmitter;
var util = require('util');
var MongoClient = require('mongodb').MongoClient;
var Channel = require('./channel');
/**
* Connection constructor.
*
* @param {String|Db} uri string or Db instance
* @param {Object} mongo driver options
* @api public
*/
function | (uri, options) {
var self = this;
options || (options = {});
options.autoReconnect != null || (options.autoReconnect = true);
// It's a Db instance.
if (uri.collection) {
this.db = uri;
} else {
MongoClient.connect(uri, options, function (err, db) {
if (err) return self.emit('error', err);
self.db = db;
self.emit('connect', db);
db.on('error', function (err) {
self.emit('error', err);
});
});
}
this.destroyed = false;
this.channels = {};
}
module.exports = Connection;
util.inherits(Connection, EventEmitter);
/**
* Current connection state.
*
* @type {String}
* @api public
*/
Object.defineProperty(Connection.prototype, 'state', {
enumerable: true,
get: function () {
var state;
// Using 'destroyed' to be compatible with the driver.
if (this.destroyed) {
state = 'destroyed';
}
else if (this.db) {
state = this.db.serverConfig.isConnected()
? 'connected' : 'disconnected';
} else {
state = 'connecting';
}
return state;
}
});
/**
* Creates or returns a channel with the passed name.
*
* @see Channel
* @return {Channel}
* @api public
*/
Connection.prototype.channel = function (name, options) {
if (typeof name === 'object') {
options = name;
name = 'mubsub';
}
if (!this.channels[name] || this.channels[name].closed) {
this.channels[name] = new Channel(this, name, options);
}
return this.channels[name];
};
/**
* Close connection.
*
* @param {Function} [callback]
* @return {Connection} this
* @api public
*/
Connection.prototype.close = function (callback) {
this.destroyed = true;
this.emit('close');
this.db.close(callback);
return this;
};
| Connection | identifier_name |
order.py | from ..cw_model import CWModel
class Order(CWModel):
def __init__(self, json_dict=None):
self.id = None # (Integer)
self.company = None # *(CompanyReference)
self.contact = None # (ContactReference)
self.phone = None # (String)
self.phoneExt = None # (String)
self.email = None # (String)
self.site = None # (SiteReference)
self.status = None # *(OrderStatusReference)
self.opportunity = None # (OpportunityReference)
| self.poNumber = None # (String(50))
self.locationId = None # (Integer)
self.businessUnitId = None # (Integer)
self.salesRep = None # *(MemberReference)
self.notes = None # (String)
self.billClosedFlag = None # (Boolean)
self.billShippedFlag = None # (Boolean)
self.restrictDownpaymentFlag = None # (Boolean)
self.description = None # (String)
self.topCommentFlag = None # (Boolean)
self.bottomCommentFlag = None # (Boolean)
self.shipToCompany = None # (CompanyReference)
self.shipToContact = None # (ContactReference)
self.shipToSite = None # (SiteReference)
self.billToCompany = None # (CompanyReference)
self.billToContact = None # (ContactReference)
self.billToSite = None # (SiteReference)
self.productIds = None # (Integer[])
self.documentIds = None # (Integer[])
self.invoiceIds = None # (Integer[])
self.configIds = None # (Integer[])
self.total = None # (Number)
self.taxTotal = None # (Number)
self._info = None # (Metadata)
# initialize object with json dict
super().__init__(json_dict) | self.orderDate = None # (String)
self.dueDate = None # (String)
self.billingTerms = None # (BillingTermsReference)
self.taxCode = None # (TaxCodeReference)
| random_line_split |
order.py | from ..cw_model import CWModel
class Order(CWModel):
def __init__(self, json_dict=None):
| self.restrictDownpaymentFlag = None # (Boolean)
self.description = None # (String)
self.topCommentFlag = None # (Boolean)
self.bottomCommentFlag = None # (Boolean)
self.shipToCompany = None # (CompanyReference)
self.shipToContact = None # (ContactReference)
self.shipToSite = None # (SiteReference)
self.billToCompany = None # (CompanyReference)
self.billToContact = None # (ContactReference)
self.billToSite = None # (SiteReference)
self.productIds = None # (Integer[])
self.documentIds = None # (Integer[])
self.invoiceIds = None # (Integer[])
self.configIds = None # (Integer[])
self.total = None # (Number)
self.taxTotal = None # (Number)
self._info = None # (Metadata)
# initialize object with json dict
super().__init__(json_dict)
| self.id = None # (Integer)
self.company = None # *(CompanyReference)
self.contact = None # (ContactReference)
self.phone = None # (String)
self.phoneExt = None # (String)
self.email = None # (String)
self.site = None # (SiteReference)
self.status = None # *(OrderStatusReference)
self.opportunity = None # (OpportunityReference)
self.orderDate = None # (String)
self.dueDate = None # (String)
self.billingTerms = None # (BillingTermsReference)
self.taxCode = None # (TaxCodeReference)
self.poNumber = None # (String(50))
self.locationId = None # (Integer)
self.businessUnitId = None # (Integer)
self.salesRep = None # *(MemberReference)
self.notes = None # (String)
self.billClosedFlag = None # (Boolean)
self.billShippedFlag = None # (Boolean)
| identifier_body |
order.py | from ..cw_model import CWModel
class | (CWModel):
def __init__(self, json_dict=None):
self.id = None # (Integer)
self.company = None # *(CompanyReference)
self.contact = None # (ContactReference)
self.phone = None # (String)
self.phoneExt = None # (String)
self.email = None # (String)
self.site = None # (SiteReference)
self.status = None # *(OrderStatusReference)
self.opportunity = None # (OpportunityReference)
self.orderDate = None # (String)
self.dueDate = None # (String)
self.billingTerms = None # (BillingTermsReference)
self.taxCode = None # (TaxCodeReference)
self.poNumber = None # (String(50))
self.locationId = None # (Integer)
self.businessUnitId = None # (Integer)
self.salesRep = None # *(MemberReference)
self.notes = None # (String)
self.billClosedFlag = None # (Boolean)
self.billShippedFlag = None # (Boolean)
self.restrictDownpaymentFlag = None # (Boolean)
self.description = None # (String)
self.topCommentFlag = None # (Boolean)
self.bottomCommentFlag = None # (Boolean)
self.shipToCompany = None # (CompanyReference)
self.shipToContact = None # (ContactReference)
self.shipToSite = None # (SiteReference)
self.billToCompany = None # (CompanyReference)
self.billToContact = None # (ContactReference)
self.billToSite = None # (SiteReference)
self.productIds = None # (Integer[])
self.documentIds = None # (Integer[])
self.invoiceIds = None # (Integer[])
self.configIds = None # (Integer[])
self.total = None # (Number)
self.taxTotal = None # (Number)
self._info = None # (Metadata)
# initialize object with json dict
super().__init__(json_dict)
| Order | identifier_name |
io.py | """Microscoper is a wrapper around bioformats using a forked
python-bioformats to extract the raw images from Olympus IX83
CellSense .vsi format, into a more commonly used TIFF format.
Images are bundled together according to their channels.
This code is used internally in SCB Lab, TCIS, TIFR-H.
You're free to modify it and distribute it.
"""
from __future__ import unicode_literals, print_function
import os
import collections
import bioformats as bf
import javabridge as jb
import numpy as np
import tifffile as tf
import tqdm
from .args import arguments
import xml.dom.minidom
def get_files(directory, keyword):
""" Returns all the files in the given directory
and subdirectories, filtering with the keyword.
Usage:
>>> all_vsi_files = get_files(".", ".vsi")
This will have all the .vsi files in the current
directory and all other directories in the current
directory.
"""
file_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
filename = os.path.join(path, name)
if keyword in filename:
file_list.append(filename)
return sorted(file_list)
def get_metadata(filename):
"""Read the meta data and return the metadata object.
"""
meta = bf.get_omexml_metadata(filename)
metadata = bf.omexml.OMEXML(meta)
return metadata
def get_channel(metadata, channel):
"""Return the channel name from the metadata object"""
try:
channel_name = metadata.image().Pixels.Channel(channel).Name
except:
return
if channel_name is None:
return
return channel_name.replace("/", "_")
def read_images(path, save_directory, big, save_separate):
"""Reads images from the .vsi and associated files.
Returns a dictionary with key as channel, and list
of images as values."""
with bf.ImageReader(path) as reader:
# Shape of the data
c_total = reader.rdr.getSizeC()
z_total = reader.rdr.getSizeZ()
t_total = reader.rdr.getSizeT()
# Since we don't support hyperstacks yet...
if 1 not in [z_total, t_total]:
raise TypeError("Only 4D images are currently supported.")
metadata = get_metadata(path)
# This is so we can manually set a description down below.
pbar_c = tqdm.tqdm(range(c_total))
for channel in pbar_c:
images = []
# Get the channel name, so we can name the file after this.
channel_name = get_channel(metadata, channel)
# Update the channel progress bar description with the
# channel name.
pbar_c.set_description(channel_name)
for time in tqdm.tqdm(range(t_total), "T"):
for z in tqdm.tqdm(range(z_total), "Z"):
image = reader.read(c=channel,
z=z,
t=time,
rescale=False)
# If there's no metadata on channel name, save channels
# with numbers,starting from 0.
if channel_name is None:
channel_name = str(channel)
images.append(image)
save_images(np.asarray(images), channel_name, save_directory, big,
save_separate)
return metadata
def save_images(images, channel, save_directory, big=False,
save_separate=False):
"""Saves the images as TIFs with channel name as the filename.
Channel names are saved as numbers when names are not available."""
# Make the output directory, if it doesn't alredy exist.
if not os.path.exists(save_directory):
os.makedirs(save_directory)
# Save a file for every image in a stack.
if save_separate:
filename = save_directory + str(channel) + "_{}.tif"
for num, image in enumerate(images):
with tf.TiffWriter(filename.format(num+1), bigtiff=big) as f:
f.save(image)
# Save a single .tif file for all the images in a channel.
else:
filename = save_directory + str(channel) + ".tif"
with tf.TiffWriter(filename, bigtiff=big) as f:
f.save(images)
def save_metadata(metadata, save_directory):
data = xml.dom.minidom.parseString(metadata.to_xml())
pretty_xml_as_string = data.toprettyxml()
with open(save_directory + "metadata.xml", "w") as xmlfile:
xmlfile.write(pretty_xml_as_string)
def _init_logger():
"""This is so that Javabridge doesn't spill out a lot of DEBUG messages
during runtime.
From CellProfiler/python-bioformats.
"""
rootLoggerName = jb.get_static_field("org/slf4j/Logger",
"ROOT_LOGGER_NAME",
"Ljava/lang/String;")
rootLogger = jb.static_call("org/slf4j/LoggerFactory",
"getLogger",
"(Ljava/lang/String;)Lorg/slf4j/Logger;",
rootLoggerName)
logLevel = jb.get_static_field("ch/qos/logback/classic/Level",
"WARN",
"Lch/qos/logback/classic/Level;")
jb.call(rootLogger,
"setLevel",
"(Lch/qos/logback/classic/Level;)V",
logLevel)
def run():
# Add file extensions to this to be able to read different file types.
| pbar_files = tqdm.tqdm(files)
for path in pbar_files:
if not any(_ in path for _ in extensions):
continue
file_location = os.path.dirname(os.path.realpath(path))
filename = os.path.splitext(os.path.basename(path))[0]
save_directory = file_location + "/_{}_/".format(filename)
pbar_files.set_description("..." + path[-15:])
# If the user wants to store meta data for existing data,
# the user may pass -om or --onlymetadata argument which
# will bypass read_images() and get metadata on its own.
if arg.onlymetadata:
metadata = get_metadata(path)
# The default behaviour is to store the files with the
# metadata.
else:
metadata = read_images(path, save_directory, big=arg.big,
save_separate=arg.separate)
save_metadata(metadata, save_directory)
jb.kill_vm()
| extensions = [".vsi"]
arg = arguments()
files = get_files(arg.f, arg.k)
if 0 == len(files):
print("No file matching *{}* keyword.".format(arg.k))
exit()
if arg.list:
for f in files:
print(f)
print("======================")
print("Total files found:", len(files))
print("======================")
exit()
jb.start_vm(class_path=bf.JARS, max_heap_size="2G")
logger = _init_logger()
| identifier_body |
io.py | """Microscoper is a wrapper around bioformats using a forked
python-bioformats to extract the raw images from Olympus IX83
CellSense .vsi format, into a more commonly used TIFF format.
Images are bundled together according to their channels.
This code is used internally in SCB Lab, TCIS, TIFR-H.
You're free to modify it and distribute it.
"""
from __future__ import unicode_literals, print_function
import os
import collections
import bioformats as bf
import javabridge as jb
import numpy as np
import tifffile as tf
import tqdm
from .args import arguments
import xml.dom.minidom
def get_files(directory, keyword):
""" Returns all the files in the given directory
and subdirectories, filtering with the keyword.
Usage:
>>> all_vsi_files = get_files(".", ".vsi")
This will have all the .vsi files in the current
directory and all other directories in the current
directory.
"""
file_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
filename = os.path.join(path, name)
if keyword in filename:
file_list.append(filename)
return sorted(file_list)
def get_metadata(filename):
"""Read the meta data and return the metadata object.
"""
meta = bf.get_omexml_metadata(filename)
metadata = bf.omexml.OMEXML(meta)
return metadata
def get_channel(metadata, channel):
"""Return the channel name from the metadata object"""
try:
channel_name = metadata.image().Pixels.Channel(channel).Name
except:
return
if channel_name is None:
return
return channel_name.replace("/", "_")
def read_images(path, save_directory, big, save_separate):
"""Reads images from the .vsi and associated files.
Returns a dictionary with key as channel, and list
of images as values."""
with bf.ImageReader(path) as reader:
# Shape of the data | c_total = reader.rdr.getSizeC()
z_total = reader.rdr.getSizeZ()
t_total = reader.rdr.getSizeT()
# Since we don't support hyperstacks yet...
if 1 not in [z_total, t_total]:
raise TypeError("Only 4D images are currently supported.")
metadata = get_metadata(path)
# This is so we can manually set a description down below.
pbar_c = tqdm.tqdm(range(c_total))
for channel in pbar_c:
images = []
# Get the channel name, so we can name the file after this.
channel_name = get_channel(metadata, channel)
# Update the channel progress bar description with the
# channel name.
pbar_c.set_description(channel_name)
for time in tqdm.tqdm(range(t_total), "T"):
for z in tqdm.tqdm(range(z_total), "Z"):
image = reader.read(c=channel,
z=z,
t=time,
rescale=False)
# If there's no metadata on channel name, save channels
# with numbers,starting from 0.
if channel_name is None:
channel_name = str(channel)
images.append(image)
save_images(np.asarray(images), channel_name, save_directory, big,
save_separate)
return metadata
def save_images(images, channel, save_directory, big=False,
save_separate=False):
"""Saves the images as TIFs with channel name as the filename.
Channel names are saved as numbers when names are not available."""
# Make the output directory, if it doesn't alredy exist.
if not os.path.exists(save_directory):
os.makedirs(save_directory)
# Save a file for every image in a stack.
if save_separate:
filename = save_directory + str(channel) + "_{}.tif"
for num, image in enumerate(images):
with tf.TiffWriter(filename.format(num+1), bigtiff=big) as f:
f.save(image)
# Save a single .tif file for all the images in a channel.
else:
filename = save_directory + str(channel) + ".tif"
with tf.TiffWriter(filename, bigtiff=big) as f:
f.save(images)
def save_metadata(metadata, save_directory):
data = xml.dom.minidom.parseString(metadata.to_xml())
pretty_xml_as_string = data.toprettyxml()
with open(save_directory + "metadata.xml", "w") as xmlfile:
xmlfile.write(pretty_xml_as_string)
def _init_logger():
"""This is so that Javabridge doesn't spill out a lot of DEBUG messages
during runtime.
From CellProfiler/python-bioformats.
"""
rootLoggerName = jb.get_static_field("org/slf4j/Logger",
"ROOT_LOGGER_NAME",
"Ljava/lang/String;")
rootLogger = jb.static_call("org/slf4j/LoggerFactory",
"getLogger",
"(Ljava/lang/String;)Lorg/slf4j/Logger;",
rootLoggerName)
logLevel = jb.get_static_field("ch/qos/logback/classic/Level",
"WARN",
"Lch/qos/logback/classic/Level;")
jb.call(rootLogger,
"setLevel",
"(Lch/qos/logback/classic/Level;)V",
logLevel)
def run():
# Add file extensions to this to be able to read different file types.
extensions = [".vsi"]
arg = arguments()
files = get_files(arg.f, arg.k)
if 0 == len(files):
print("No file matching *{}* keyword.".format(arg.k))
exit()
if arg.list:
for f in files:
print(f)
print("======================")
print("Total files found:", len(files))
print("======================")
exit()
jb.start_vm(class_path=bf.JARS, max_heap_size="2G")
logger = _init_logger()
pbar_files = tqdm.tqdm(files)
for path in pbar_files:
if not any(_ in path for _ in extensions):
continue
file_location = os.path.dirname(os.path.realpath(path))
filename = os.path.splitext(os.path.basename(path))[0]
save_directory = file_location + "/_{}_/".format(filename)
pbar_files.set_description("..." + path[-15:])
# If the user wants to store meta data for existing data,
# the user may pass -om or --onlymetadata argument which
# will bypass read_images() and get metadata on its own.
if arg.onlymetadata:
metadata = get_metadata(path)
# The default behaviour is to store the files with the
# metadata.
else:
metadata = read_images(path, save_directory, big=arg.big,
save_separate=arg.separate)
save_metadata(metadata, save_directory)
jb.kill_vm() | random_line_split |
|
io.py | """Microscoper is a wrapper around bioformats using a forked
python-bioformats to extract the raw images from Olympus IX83
CellSense .vsi format, into a more commonly used TIFF format.
Images are bundled together according to their channels.
This code is used internally in SCB Lab, TCIS, TIFR-H.
You're free to modify it and distribute it.
"""
from __future__ import unicode_literals, print_function
import os
import collections
import bioformats as bf
import javabridge as jb
import numpy as np
import tifffile as tf
import tqdm
from .args import arguments
import xml.dom.minidom
def get_files(directory, keyword):
""" Returns all the files in the given directory
and subdirectories, filtering with the keyword.
Usage:
>>> all_vsi_files = get_files(".", ".vsi")
This will have all the .vsi files in the current
directory and all other directories in the current
directory.
"""
file_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
filename = os.path.join(path, name)
if keyword in filename:
file_list.append(filename)
return sorted(file_list)
def get_metadata(filename):
"""Read the meta data and return the metadata object.
"""
meta = bf.get_omexml_metadata(filename)
metadata = bf.omexml.OMEXML(meta)
return metadata
def get_channel(metadata, channel):
"""Return the channel name from the metadata object"""
try:
channel_name = metadata.image().Pixels.Channel(channel).Name
except:
return
if channel_name is None:
return
return channel_name.replace("/", "_")
def read_images(path, save_directory, big, save_separate):
"""Reads images from the .vsi and associated files.
Returns a dictionary with key as channel, and list
of images as values."""
with bf.ImageReader(path) as reader:
# Shape of the data
c_total = reader.rdr.getSizeC()
z_total = reader.rdr.getSizeZ()
t_total = reader.rdr.getSizeT()
# Since we don't support hyperstacks yet...
if 1 not in [z_total, t_total]:
raise TypeError("Only 4D images are currently supported.")
metadata = get_metadata(path)
# This is so we can manually set a description down below.
pbar_c = tqdm.tqdm(range(c_total))
for channel in pbar_c:
images = []
# Get the channel name, so we can name the file after this.
channel_name = get_channel(metadata, channel)
# Update the channel progress bar description with the
# channel name.
pbar_c.set_description(channel_name)
for time in tqdm.tqdm(range(t_total), "T"):
for z in tqdm.tqdm(range(z_total), "Z"):
image = reader.read(c=channel,
z=z,
t=time,
rescale=False)
# If there's no metadata on channel name, save channels
# with numbers,starting from 0.
if channel_name is None:
channel_name = str(channel)
images.append(image)
save_images(np.asarray(images), channel_name, save_directory, big,
save_separate)
return metadata
def | (images, channel, save_directory, big=False,
save_separate=False):
"""Saves the images as TIFs with channel name as the filename.
Channel names are saved as numbers when names are not available."""
# Make the output directory, if it doesn't alredy exist.
if not os.path.exists(save_directory):
os.makedirs(save_directory)
# Save a file for every image in a stack.
if save_separate:
filename = save_directory + str(channel) + "_{}.tif"
for num, image in enumerate(images):
with tf.TiffWriter(filename.format(num+1), bigtiff=big) as f:
f.save(image)
# Save a single .tif file for all the images in a channel.
else:
filename = save_directory + str(channel) + ".tif"
with tf.TiffWriter(filename, bigtiff=big) as f:
f.save(images)
def save_metadata(metadata, save_directory):
data = xml.dom.minidom.parseString(metadata.to_xml())
pretty_xml_as_string = data.toprettyxml()
with open(save_directory + "metadata.xml", "w") as xmlfile:
xmlfile.write(pretty_xml_as_string)
def _init_logger():
"""This is so that Javabridge doesn't spill out a lot of DEBUG messages
during runtime.
From CellProfiler/python-bioformats.
"""
rootLoggerName = jb.get_static_field("org/slf4j/Logger",
"ROOT_LOGGER_NAME",
"Ljava/lang/String;")
rootLogger = jb.static_call("org/slf4j/LoggerFactory",
"getLogger",
"(Ljava/lang/String;)Lorg/slf4j/Logger;",
rootLoggerName)
logLevel = jb.get_static_field("ch/qos/logback/classic/Level",
"WARN",
"Lch/qos/logback/classic/Level;")
jb.call(rootLogger,
"setLevel",
"(Lch/qos/logback/classic/Level;)V",
logLevel)
def run():
# Add file extensions to this to be able to read different file types.
extensions = [".vsi"]
arg = arguments()
files = get_files(arg.f, arg.k)
if 0 == len(files):
print("No file matching *{}* keyword.".format(arg.k))
exit()
if arg.list:
for f in files:
print(f)
print("======================")
print("Total files found:", len(files))
print("======================")
exit()
jb.start_vm(class_path=bf.JARS, max_heap_size="2G")
logger = _init_logger()
pbar_files = tqdm.tqdm(files)
for path in pbar_files:
if not any(_ in path for _ in extensions):
continue
file_location = os.path.dirname(os.path.realpath(path))
filename = os.path.splitext(os.path.basename(path))[0]
save_directory = file_location + "/_{}_/".format(filename)
pbar_files.set_description("..." + path[-15:])
# If the user wants to store meta data for existing data,
# the user may pass -om or --onlymetadata argument which
# will bypass read_images() and get metadata on its own.
if arg.onlymetadata:
metadata = get_metadata(path)
# The default behaviour is to store the files with the
# metadata.
else:
metadata = read_images(path, save_directory, big=arg.big,
save_separate=arg.separate)
save_metadata(metadata, save_directory)
jb.kill_vm()
| save_images | identifier_name |
io.py | """Microscoper is a wrapper around bioformats using a forked
python-bioformats to extract the raw images from Olympus IX83
CellSense .vsi format, into a more commonly used TIFF format.
Images are bundled together according to their channels.
This code is used internally in SCB Lab, TCIS, TIFR-H.
You're free to modify it and distribute it.
"""
from __future__ import unicode_literals, print_function
import os
import collections
import bioformats as bf
import javabridge as jb
import numpy as np
import tifffile as tf
import tqdm
from .args import arguments
import xml.dom.minidom
def get_files(directory, keyword):
""" Returns all the files in the given directory
and subdirectories, filtering with the keyword.
Usage:
>>> all_vsi_files = get_files(".", ".vsi")
This will have all the .vsi files in the current
directory and all other directories in the current
directory.
"""
file_list = []
for path, subdirs, files in os.walk(directory):
for name in files:
filename = os.path.join(path, name)
if keyword in filename:
file_list.append(filename)
return sorted(file_list)
def get_metadata(filename):
"""Read the meta data and return the metadata object.
"""
meta = bf.get_omexml_metadata(filename)
metadata = bf.omexml.OMEXML(meta)
return metadata
def get_channel(metadata, channel):
"""Return the channel name from the metadata object"""
try:
channel_name = metadata.image().Pixels.Channel(channel).Name
except:
return
if channel_name is None:
return
return channel_name.replace("/", "_")
def read_images(path, save_directory, big, save_separate):
"""Reads images from the .vsi and associated files.
Returns a dictionary with key as channel, and list
of images as values."""
with bf.ImageReader(path) as reader:
# Shape of the data
c_total = reader.rdr.getSizeC()
z_total = reader.rdr.getSizeZ()
t_total = reader.rdr.getSizeT()
# Since we don't support hyperstacks yet...
if 1 not in [z_total, t_total]:
raise TypeError("Only 4D images are currently supported.")
metadata = get_metadata(path)
# This is so we can manually set a description down below.
pbar_c = tqdm.tqdm(range(c_total))
for channel in pbar_c:
images = []
# Get the channel name, so we can name the file after this.
channel_name = get_channel(metadata, channel)
# Update the channel progress bar description with the
# channel name.
pbar_c.set_description(channel_name)
for time in tqdm.tqdm(range(t_total), "T"):
for z in tqdm.tqdm(range(z_total), "Z"):
image = reader.read(c=channel,
z=z,
t=time,
rescale=False)
# If there's no metadata on channel name, save channels
# with numbers,starting from 0.
if channel_name is None:
channel_name = str(channel)
images.append(image)
save_images(np.asarray(images), channel_name, save_directory, big,
save_separate)
return metadata
def save_images(images, channel, save_directory, big=False,
save_separate=False):
"""Saves the images as TIFs with channel name as the filename.
Channel names are saved as numbers when names are not available."""
# Make the output directory, if it doesn't alredy exist.
if not os.path.exists(save_directory):
os.makedirs(save_directory)
# Save a file for every image in a stack.
if save_separate:
filename = save_directory + str(channel) + "_{}.tif"
for num, image in enumerate(images):
with tf.TiffWriter(filename.format(num+1), bigtiff=big) as f:
f.save(image)
# Save a single .tif file for all the images in a channel.
else:
filename = save_directory + str(channel) + ".tif"
with tf.TiffWriter(filename, bigtiff=big) as f:
f.save(images)
def save_metadata(metadata, save_directory):
data = xml.dom.minidom.parseString(metadata.to_xml())
pretty_xml_as_string = data.toprettyxml()
with open(save_directory + "metadata.xml", "w") as xmlfile:
xmlfile.write(pretty_xml_as_string)
def _init_logger():
"""This is so that Javabridge doesn't spill out a lot of DEBUG messages
during runtime.
From CellProfiler/python-bioformats.
"""
rootLoggerName = jb.get_static_field("org/slf4j/Logger",
"ROOT_LOGGER_NAME",
"Ljava/lang/String;")
rootLogger = jb.static_call("org/slf4j/LoggerFactory",
"getLogger",
"(Ljava/lang/String;)Lorg/slf4j/Logger;",
rootLoggerName)
logLevel = jb.get_static_field("ch/qos/logback/classic/Level",
"WARN",
"Lch/qos/logback/classic/Level;")
jb.call(rootLogger,
"setLevel",
"(Lch/qos/logback/classic/Level;)V",
logLevel)
def run():
# Add file extensions to this to be able to read different file types.
extensions = [".vsi"]
arg = arguments()
files = get_files(arg.f, arg.k)
if 0 == len(files):
print("No file matching *{}* keyword.".format(arg.k))
exit()
if arg.list:
for f in files:
print(f)
print("======================")
print("Total files found:", len(files))
print("======================")
exit()
jb.start_vm(class_path=bf.JARS, max_heap_size="2G")
logger = _init_logger()
pbar_files = tqdm.tqdm(files)
for path in pbar_files:
if not any(_ in path for _ in extensions):
continue
file_location = os.path.dirname(os.path.realpath(path))
filename = os.path.splitext(os.path.basename(path))[0]
save_directory = file_location + "/_{}_/".format(filename)
pbar_files.set_description("..." + path[-15:])
# If the user wants to store meta data for existing data,
# the user may pass -om or --onlymetadata argument which
# will bypass read_images() and get metadata on its own.
if arg.onlymetadata:
metadata = get_metadata(path)
# The default behaviour is to store the files with the
# metadata.
else:
|
save_metadata(metadata, save_directory)
jb.kill_vm()
| metadata = read_images(path, save_directory, big=arg.big,
save_separate=arg.separate) | conditional_block |
assess_unbinned_reads_across_samples.py | import os
import pandas as pd
#import seaborn as sns
total_reads = pd.read_csv('../data/sample_info/sample_read_counts.tsv', sep='\t', names = ['fastq filename', 'number of reads'])
total_reads['cryptic metagenome name'] = total_reads['fastq filename'].str.strip('.fastq.gz') | sample_translation = pd.read_csv('../data/sample_info/meta4_sample_names--cryptic_to_sample_number.tsv', sep='\t')
read_mappings = pd.read_csv('./data/num_reads_mapped--can_double_count_multiple_mappings.tsv', sep='\t')
reads = pd.merge(sample_info, sample_translation)
reads = pd.merge(reads, total_reads)
reads = pd.merge(reads, read_mappings)
out_path = 'total_num_reads_across_samples_with_sample_info.tsv'
out_dir = './data'
reads.to_csv(os.path.join(out_dir, out_path), sep='\t', index=False) | sample_info = pd.read_csv('../data/sample_info/sample_info.tsv', sep='\t') | random_line_split |
api.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
class IPreferencePanelProvider(Interface):
def get_preference_panels(req):
"""Return a list of available preference panels.
The items returned by this function must be tuple of the form
`(panel, label)`.
"""
def | (req, panel):
"""Process a request for a preference panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
| render_preference_panel | identifier_name |
api.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
class IPreferencePanelProvider(Interface):
def get_preference_panels(req):
"""Return a list of available preference panels.
The items returned by this function must be tuple of the form
`(panel, label)`.
"""
def render_preference_panel(req, panel):
"""Process a request for a preference panel.
| This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
""" | random_line_split |
|
api.py | # -*- coding: utf-8 -*-
#
# Copyright (C)2006-2009 Edgewall Software
# All rights reserved.
#
# This software is licensed as described in the file COPYING, which
# you should have received as part of this distribution. The terms
# are also available at http://trac.edgewall.org/wiki/TracLicense.
#
# This software consists of voluntary contributions made by many
# individuals. For the exact contribution history, see the revision
# history and logs, available at http://trac.edgewall.org/log/.
from trac.core import *
class IPreferencePanelProvider(Interface):
def get_preference_panels(req):
|
def render_preference_panel(req, panel):
"""Process a request for a preference panel.
This function should return a tuple of the form `(template, data)`,
where `template` is the name of the template to use and `data` is the
data to be passed to the template.
"""
| """Return a list of available preference panels.
The items returned by this function must be tuple of the form
`(panel, label)`.
""" | identifier_body |
development.js | 'use strict';
module.exports = {
db: 'mongodb://lolo:[email protected]:10048/todo-database',
app: {
title: 'TuDu - Development Environment'
},
facebook: {
clientID: process.env.FACEBOOK_ID || 'APP_ID',
clientSecret: process.env.FACEBOOK_SECRET || 'APP_SECRET',
callbackURL: '/auth/facebook/callback'
},
twitter: { | },
google: {
clientID: process.env.GOOGLE_ID || 'APP_ID',
clientSecret: process.env.GOOGLE_SECRET || 'APP_SECRET',
callbackURL: '/auth/google/callback'
},
linkedin: {
clientID: process.env.LINKEDIN_ID || 'APP_ID',
clientSecret: process.env.LINKEDIN_SECRET || 'APP_SECRET',
callbackURL: '/auth/linkedin/callback'
},
github: {
clientID: process.env.GITHUB_ID || 'APP_ID',
clientSecret: process.env.GITHUB_SECRET || 'APP_SECRET',
callbackURL: '/auth/github/callback'
},
mailer: {
from: process.env.MAILER_FROM || 'MAILER_FROM',
options: {
service: process.env.MAILER_SERVICE_PROVIDER || 'MAILER_SERVICE_PROVIDER',
auth: {
user: process.env.MAILER_EMAIL_ID || 'MAILER_EMAIL_ID',
pass: process.env.MAILER_PASSWORD || 'MAILER_PASSWORD'
}
}
}
}; | clientID: process.env.TWITTER_KEY || 'CONSUMER_KEY',
clientSecret: process.env.TWITTER_SECRET || 'CONSUMER_SECRET',
callbackURL: '/auth/twitter/callback' | random_line_split |
LanScan.py | import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def | (self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
for ip in self.completeMacAddress:
ips.append(ip)
return ips
print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds" | __checkIfUp | identifier_name |
LanScan.py | import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
| self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def __checkIfUp(self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
for ip in self.completeMacAddress:
ips.append(ip)
return ips
print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds" | random_line_split |
|
LanScan.py | import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
|
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def __checkIfUp(self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
for ip in self.completeMacAddress:
ips.append(ip)
return ips
print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds" | currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1 | conditional_block |
LanScan.py | import subprocess
import time
import sys
import re
class checkIfUp:
__shellPings = []
__shell2Nbst = []
__ipsToCheck = []
checkedIps = 0
onlineIps = 0
unreachable = 0
timedOut = 0
upIpsAddress = []
computerName = []
completeMacAddress = []
executionTime = 0
def __init__(self,fromIp,toIp):
startTime = time.time()
self.fromIp = fromIp # from 192.168.1.x
self.toIp = toIp # to 192.168.x.x
self.__checkIfIpIsValid(fromIp)
self.__checkIfIpIsValid(toIp)
self.__getRange(fromIp,toIp)
self.__shellToQueue()
#self.__checkIfUp() # run by the shellToQueue queue organizer
self.__computerInfoInQueue()
endTime = time.time()
self.executionTime = round(endTime - startTime,3)
def __checkIfIpIsValid(self,ip):
def validateRange(val):
# valid range => 1 <-> 255
try:
val = int(val)
if val < 0 or val > 255:
print "Invalid IP Range ("+str(val)+")"
sys.exit(0)
except:
print "Invalid IP"
sys.exit(0)
ip = ip.split(".")
firstVal = validateRange(ip[0])
secondVal = validateRange(ip[1])
thirdVal = validateRange(ip[2])
fourthVal = validateRange(ip[3])
return True
def __getRange(self,fromIp,toIp):
| currentIp0 = int(fromIp[0])
currentIp1 = int(fromIp[1])
currentIp2 = int(fromIp[2])
currentIp3 = int(fromIp[3])
toIp0 = int(toIp[0])
toIp1 = int(toIp[1])
toIp2 = int(toIp[2])
toIp3 = int(toIp[3])
firstIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck = [firstIp]
while currentIp3 != toIp3 or currentIp2 != toIp2 or currentIp1 != toIp1 or currentIp0 != toIp0:
currentIp3 += 1
if currentIp3 > 255:
currentIp3 = 0
currentIp2 += 1
if currentIp2 > 255:
currentIp2 = 0
currentIp1 += 1
if currentIp1 > 255:
currentIp1 = 0
currentIp0 += 1
addIp = str(currentIp0)+"."+str(currentIp1)+"."+str(currentIp2)+"."+str(currentIp3)
self.__ipsToCheck.append(addIp)
def __shellToQueue(self):
# write them in the shell queue
maxPingsAtOnce = 200
currentQueuedPings = 0
for pingIp in self.__ipsToCheck:
proc = subprocess.Popen(['ping','-n','1',pingIp],stdout=subprocess.PIPE,shell=True)
self.__shellPings.append(proc)
currentQueuedPings += 1
if currentQueuedPings >= maxPingsAtOnce:
#execute shells
self.__checkIfUp()
currentQueuedPings = 0
self.__shellPings = []
self.__checkIfUp() # execute last queue
def __checkIfUp(self):
# execute the shells & determine whether the host is up or not
for shellInQueue in self.__shellPings:
pingResult = ""
shellInQueue.wait()
while True:
line = shellInQueue.stdout.readline()
if line != "":
pingResult += line
else:
break;
self.checkedIps += 1
if 'unreachable' in pingResult:
self.unreachable += 1
elif 'timed out' in pingResult:
self.timedOut += 1
else:
self.onlineIps += 1
currentIp = self.__ipsToCheck[self.checkedIps-1]
self.upIpsAddress.append(currentIp)
def __computerInfoInQueue(self):
# shell queue for online hosts
maxShellsAtOnce = 255
currentQueuedNbst = 0
for onlineIp in self.upIpsAddress:
proc = subprocess.Popen(['\\Windows\\sysnative\\nbtstat.exe','-a',onlineIp],stdout=subprocess.PIPE,shell=True)
self.__shell2Nbst.append(proc)
currentQueuedNbst += 1
if currentQueuedNbst >= maxShellsAtOnce:
# execute shells
self.__gatherComputerInfo()
currentQueuedNbst = 0
self.__shell2Nbst = []
self.__gatherComputerInfo() # execute last queue
def __gatherComputerInfo(self):
# execute the shells and find host Name and MAC
for shellInQueue in self.__shell2Nbst:
nbstResult = ""
shellInQueue.wait()
computerNameLine = ""
macAddressLine = ""
computerName = ""
macAddress = ""
while True:
line = shellInQueue.stdout.readline()
if line != "":
if '<00>' in line and 'UNIQUE' in line:
computerNameLine = line
if 'MAC Address' in line:
macAddressLine = line
else:
break;
computerName = re.findall('([ ]+)(.*?)([ ]+)<00>', computerNameLine)
macAddress = re.findall('([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)-([A-Z0-9]+)',macAddressLine)
try:
self.computerName.append(computerName[0][1])
except:
self.computerName.append("")
completeMacAddress = ""
firstMacElement = 0
try:
for macEach in macAddress[0]:
if firstMacElement == 0:
firstMacElement += 1
else:
completeMacAddress += ":"
completeMacAddress += macEach
firstMacElement = 0
except:
completeMacAddress = ""
self.completeMacAddress.append(completeMacAddress)
def readValue(self):
# debugging use only
ips = []
for ip in self.completeMacAddress:
ips.append(ip)
return ips
print "\t\t---LANScanner v1.0---\n"
# brief tutorial
print "Sample input data:"
print "FromIP: 192.168.1.50"
print "ToIP: 192.168.1.20"
print "---"
# input
fromIp = raw_input("From: ")
toIp = raw_input("To: ")
# enter values to class
userRange = checkIfUp(fromIp,toIp)
# read class values
print ""
#print userRange.readValue() # debugging use only
print "Checked",userRange.checkedIps,"IPs"
print ""
print "Online:",str(userRange.onlineIps)+"/"+str(userRange.checkedIps)
print "Unreachable:",userRange.unreachable,"Timed out:",userRange.timedOut
print "" # newline
print "Online IPs:"
print "IP\t\tNAME\t\tMAC"
counter = 0
for onlineIp in userRange.upIpsAddress:
print onlineIp+"\t"+userRange.computerName[counter]+"\t"+userRange.completeMacAddress[counter]
counter += 1
print ""
print "Took",userRange.executionTime,"seconds" | fromIp = fromIp.split(".")
toIp = toIp.split(".")
# toIp must be > fromIp
def ip3chars(ipBlock):
# input 1; output 001
ipBlock = str(ipBlock)
while len(ipBlock) != 3:
ipBlock = "0"+ipBlock
return ipBlock
fromIpRaw = ip3chars(fromIp[0])+ip3chars(fromIp[1])+ip3chars(fromIp[2])+ip3chars(fromIp[3])
toIpRaw = ip3chars(toIp[0])+ip3chars(toIp[1])+ip3chars(toIp[2])+ip3chars(toIp[3])
if fromIpRaw > toIpRaw:
# if from is bigger switch the order
temp = fromIp
fromIp = toIp
toIp = temp
currentIp = [0,0,0,0]
# all to integers
| identifier_body |
models.py | from django.db import models
from django.template.defaultfilters import truncatechars
class Setting(models.Model):
name = models.CharField(max_length=100, unique=True, db_index=True)
value = models.TextField(blank=True, default='')
value_type = models.CharField(max_length=1, choices=(('s', 'string'), ('i', 'integer'), ('f', 'float'), ('b', 'boolean')))
hide_value_in_list = models.BooleanField(default=False)
def __str__(self):
return "%s = %s (%s)" % (self.name, "**скрыто**" if self.hide_value_in_list else truncatechars(self.value, 150), self.get_value_type_display())
def get_value(self):
val = self.value
types = {'s': str, 'i': int, 'b': (lambda v: v.lower() == "true"), 'f': float}
return types[self.value_type](val)
class Meta:
| verbose_name = 'Параметр'
verbose_name_plural = 'Параметры'
| identifier_name |
|
models.py | from django.db import models
from django.template.defaultfilters import truncatechars
class Setting(models.Model):
name = models.CharField(max_length=100, unique=True, db_index=True)
value = models.TextField(blank=True, default='')
value_type = models.CharField(max_length=1, choices=(('s', 'string'), ('i', 'integer'), ('f', 'float'), ('b', 'boolean')))
hide_value_in_list = models.BooleanField(default=False)
def __str__(self):
return "%s = %s (%s)" % (self.name, "**скрыто**" if self.hide_value_in_list else truncatechars(self.value, 150), self.get_value_type_display())
def get_value(self):
val = self.value
types = {'s': str, 'i': int, 'b': (lambda v: v.lower() == "true"), 'f': float}
return types[self.value_type](val)
class Meta: | verbose_name = 'Параметр'
verbose_name_plural = 'Параметры' | random_line_split |
|
models.py | from django.db import models
from django.template.defaultfilters import truncatechars
class Setting(models.Model):
name = models.CharField(max_length=100, unique=True, db_index=True)
value = models.TextField(blank=True, default='')
value_type = models.CharField(max_length=1, choices=(('s', 'string'), ('i', 'integer'), ('f', 'float'), ('b', 'boolean')))
hide_value_in_list = models.BooleanField(default=False)
def __str__(self):
return "%s = %s (%s)" % (self.name, "**скрыто**" if self.hide_value_in_list else truncatechars(self.value, 150), self.get_value_type_display())
def get_value(self):
val = | class Meta:
verbose_name = 'Параметр'
verbose_name_plural = 'Параметры'
| self.value
types = {'s': str, 'i': int, 'b': (lambda v: v.lower() == "true"), 'f': float}
return types[self.value_type](val)
| identifier_body |
fi.py | # -*- coding: iso-8859-1 -*-
# Text translations for Suomi (fi).
# Automatically generated - DO NOT EDIT, edit fi.po instead!
meta = {
'language': 'Suomi',
'maintainer': '***vacant***',
'encoding': 'iso-8859-1',
'direction': 'ltr',
}
text = { | '''Muokkaa "%(pagename)s"''',
'''Reduce editor size''':
'''Pienennä editointi ikkunan kokoa''',
'''Describe %s here.''':
'''Kuvaile %s tässä.''',
'''Check Spelling''':
'''Oikolue''',
'''Save Changes''':
'''Talleta muutokset''',
'''Cancel''':
'''Peruuta''',
'''Preview''':
'''Esikatsele''',
'''Edit was cancelled.''':
'''Muokkaus peruttu.''',
'''Edit''':
'''Muokkaa''',
'''Default''':
'''Oletusarvo''',
'''Name''':
'''Nimi''',
'''Password''':
'''Tunnussana''',
'''Email''':
'''Sähköposti''',
'''Editor size''':
'''Editointikentän koko''',
'''Time zone''':
'''Aikavyöhyke''',
'''Your time is''':
'''Aikasi on''',
'''Server time is''':
'''Palvelimen aika on''',
'''Date format''':
'''Päivämäärän muoto''',
'''General options''':
'''Yleiset Asetukset''',
'''General Information''':
'''Yleiset Tiedot''',
'''Revision History''':
'''Versiohistoria''',
'''Date''':
'''Päivämäärä''',
'''Size''':
'''Koko''',
'''Editor''':
'''Editori''',
'''Comment''':
'''Huomautus''',
'''view''':
'''näytä''',
'''revert''':
'''palauta''',
'''Show "%(title)s"''':
'''Näytä "%(title)s"''',
'''You are not allowed to revert this page!''':
'''Sinulla ei ole oikeutta palauttaa tätä sivua!''',
'''Python Version''':
'''Python Versio''',
'''Sycamore Version''':
'''Sycamore Versio''',
'''4Suite Version''':
'''4Suite Versio''',
'''del''':
'''poista''',
'''get''':
'''hae''',
'''edit''':
'''muokkaa''',
'''No attachments stored for %(pagename)s''':
'''Sivulla %(pagename)s ei ole liitteitä''',
'''attachment:%(filename)s of %(pagename)s''':
'''liite:%(filename)s / %(pagename)s''',
'''Page "%s" was successfully deleted!''':
'''Sivu "%s" poistettiin onnistuneesti!''',
'''Really delete this page?''':
'''Haluatko varmasti poistaa tämän sivun?''',
'''Drawing \'%(filename)s\' saved.''':
'''Piirrustus \'%(filename)s\' talletettu.''',
'''Create new drawing "%(filename)s"''':
'''Luo uusi piirrustus "%(filename)s"''',
'''date''':
'''päivämäärä''',
'''Others''':
'''Muut''',
'''Clear message''':
'''Tyhjennä viesti''',
'''Mail sent OK''':
'''Sähköposti lähetetty onnistuneesti''',
} | '''Create this page''':
'''Luo tämä sivu''',
'''Edit "%(pagename)s"''': | random_line_split |
datastore_loader.py | #!/usr/bin/env python
"""Process that loads the datastore"""
__author__ = 'Michael Meisinger, Thomas Lennan'
"""
Possible Features
- load objects into different datastores
- load from a directory of YML files in ion-definitions
- load from a ZIP of YMLs
- load an additional directory (not under GIT control)
- change timestamp for resources
- load a subset of objects by type, etc
"""
from pyon.public import CFG, log, ImmediateProcess, iex
from pyon.datastore import datastore_admin
from pyon.core import bootstrap
from pyon.core.bootstrap import get_sys_name
class | (ImmediateProcess):
"""
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=clear prefix=ion
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dump path=res/preload/local/my_dump
bin/pycc -fc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=load path=res/preload/local/my_dump
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dumpres
"""
def on_init(self):
pass
def on_start(self):
# print env temporarily to debug cei
import os
log.info('ENV vars: %s' % str(os.environ))
op = self.CFG.get("op", None)
datastore = self.CFG.get("datastore", None)
path = self.CFG.get("path", None)
prefix = self.CFG.get("prefix", get_sys_name()).lower()
log.info("DatastoreLoader: {op=%s, datastore=%s, path=%s, prefix=%s}" % (op, datastore, path, prefix))
self.da = datastore_admin.DatastoreAdmin()
if op:
if op == "load":
self.da.load_datastore(path, datastore, ignore_errors=False)
elif op == "dump":
self.da.dump_datastore(path, datastore)
elif op == "dumpres":
from ion.util.datastore.resources import ResourceRegistryHelper
rrh = ResourceRegistryHelper()
rrh.dump_resources_as_xlsx(path)
elif op == "blame":
# TODO make generic
self.da.get_blame_objects()
elif op == "clear":
self.da.clear_datastore(datastore, prefix)
else:
raise iex.BadRequest("Operation unknown")
else:
raise iex.BadRequest("No operation specified")
def on_quit(self):
pass
DatastoreLoader = DatastoreAdmin
| DatastoreAdmin | identifier_name |
datastore_loader.py | #!/usr/bin/env python
"""Process that loads the datastore"""
__author__ = 'Michael Meisinger, Thomas Lennan'
"""
Possible Features
- load objects into different datastores
- load from a directory of YML files in ion-definitions
- load from a ZIP of YMLs
- load an additional directory (not under GIT control)
- change timestamp for resources
- load a subset of objects by type, etc
"""
from pyon.public import CFG, log, ImmediateProcess, iex
from pyon.datastore import datastore_admin
from pyon.core import bootstrap
from pyon.core.bootstrap import get_sys_name
class DatastoreAdmin(ImmediateProcess):
"""
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=clear prefix=ion
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dump path=res/preload/local/my_dump
bin/pycc -fc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=load path=res/preload/local/my_dump
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dumpres
"""
def on_init(self):
|
def on_start(self):
# print env temporarily to debug cei
import os
log.info('ENV vars: %s' % str(os.environ))
op = self.CFG.get("op", None)
datastore = self.CFG.get("datastore", None)
path = self.CFG.get("path", None)
prefix = self.CFG.get("prefix", get_sys_name()).lower()
log.info("DatastoreLoader: {op=%s, datastore=%s, path=%s, prefix=%s}" % (op, datastore, path, prefix))
self.da = datastore_admin.DatastoreAdmin()
if op:
if op == "load":
self.da.load_datastore(path, datastore, ignore_errors=False)
elif op == "dump":
self.da.dump_datastore(path, datastore)
elif op == "dumpres":
from ion.util.datastore.resources import ResourceRegistryHelper
rrh = ResourceRegistryHelper()
rrh.dump_resources_as_xlsx(path)
elif op == "blame":
# TODO make generic
self.da.get_blame_objects()
elif op == "clear":
self.da.clear_datastore(datastore, prefix)
else:
raise iex.BadRequest("Operation unknown")
else:
raise iex.BadRequest("No operation specified")
def on_quit(self):
pass
DatastoreLoader = DatastoreAdmin
| pass | identifier_body |
datastore_loader.py | #!/usr/bin/env python
"""Process that loads the datastore"""
__author__ = 'Michael Meisinger, Thomas Lennan'
"""
Possible Features
- load objects into different datastores
- load from a directory of YML files in ion-definitions
- load from a ZIP of YMLs
- load an additional directory (not under GIT control)
- change timestamp for resources
- load a subset of objects by type, etc
"""
from pyon.public import CFG, log, ImmediateProcess, iex
from pyon.datastore import datastore_admin
from pyon.core import bootstrap
from pyon.core.bootstrap import get_sys_name
class DatastoreAdmin(ImmediateProcess):
"""
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=clear prefix=ion
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dump path=res/preload/local/my_dump
bin/pycc -fc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=load path=res/preload/local/my_dump
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dumpres
"""
def on_init(self):
pass
def on_start(self):
# print env temporarily to debug cei
import os
log.info('ENV vars: %s' % str(os.environ))
op = self.CFG.get("op", None)
datastore = self.CFG.get("datastore", None)
path = self.CFG.get("path", None)
prefix = self.CFG.get("prefix", get_sys_name()).lower()
log.info("DatastoreLoader: {op=%s, datastore=%s, path=%s, prefix=%s}" % (op, datastore, path, prefix))
self.da = datastore_admin.DatastoreAdmin()
if op:
if op == "load":
self.da.load_datastore(path, datastore, ignore_errors=False)
elif op == "dump":
self.da.dump_datastore(path, datastore) | rrh = ResourceRegistryHelper()
rrh.dump_resources_as_xlsx(path)
elif op == "blame":
# TODO make generic
self.da.get_blame_objects()
elif op == "clear":
self.da.clear_datastore(datastore, prefix)
else:
raise iex.BadRequest("Operation unknown")
else:
raise iex.BadRequest("No operation specified")
def on_quit(self):
pass
DatastoreLoader = DatastoreAdmin | elif op == "dumpres":
from ion.util.datastore.resources import ResourceRegistryHelper | random_line_split |
datastore_loader.py | #!/usr/bin/env python
"""Process that loads the datastore"""
__author__ = 'Michael Meisinger, Thomas Lennan'
"""
Possible Features
- load objects into different datastores
- load from a directory of YML files in ion-definitions
- load from a ZIP of YMLs
- load an additional directory (not under GIT control)
- change timestamp for resources
- load a subset of objects by type, etc
"""
from pyon.public import CFG, log, ImmediateProcess, iex
from pyon.datastore import datastore_admin
from pyon.core import bootstrap
from pyon.core.bootstrap import get_sys_name
class DatastoreAdmin(ImmediateProcess):
"""
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=clear prefix=ion
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dump path=res/preload/local/my_dump
bin/pycc -fc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=load path=res/preload/local/my_dump
bin/pycc -x ion.process.bootstrap.datastore_loader.DatastoreLoader op=dumpres
"""
def on_init(self):
pass
def on_start(self):
# print env temporarily to debug cei
import os
log.info('ENV vars: %s' % str(os.environ))
op = self.CFG.get("op", None)
datastore = self.CFG.get("datastore", None)
path = self.CFG.get("path", None)
prefix = self.CFG.get("prefix", get_sys_name()).lower()
log.info("DatastoreLoader: {op=%s, datastore=%s, path=%s, prefix=%s}" % (op, datastore, path, prefix))
self.da = datastore_admin.DatastoreAdmin()
if op:
if op == "load":
self.da.load_datastore(path, datastore, ignore_errors=False)
elif op == "dump":
self.da.dump_datastore(path, datastore)
elif op == "dumpres":
from ion.util.datastore.resources import ResourceRegistryHelper
rrh = ResourceRegistryHelper()
rrh.dump_resources_as_xlsx(path)
elif op == "blame":
# TODO make generic
self.da.get_blame_objects()
elif op == "clear":
|
else:
raise iex.BadRequest("Operation unknown")
else:
raise iex.BadRequest("No operation specified")
def on_quit(self):
pass
DatastoreLoader = DatastoreAdmin
| self.da.clear_datastore(datastore, prefix) | conditional_block |
IngestListController.ts | import _ from 'lodash';
import {BaseListController} from 'apps/archive/controllers';
export class IngestListController extends BaseListController {
constructor($scope, $injector, $location, api, $rootScope, search, desks) {
super($scope, $location, search, desks);
$scope.type = 'ingest';
$scope.loading = false;
$scope.repo = {
ingest: true,
archive: false,
search: 'local',
};
$scope.api = api.ingest;
$rootScope.currentModule = 'ingest';
this.fetchItems = function(criteria, next) {
$scope.loading = true;
criteria.aggregations = 1;
criteria.es_highlight = search.getElasticHighlight();
api.query('ingest', criteria).then((items) => {
$scope.items = search.mergeItems(items, $scope.items, next);
$scope.total = items._meta.total;
})
.finally(() => {
$scope.loading = false;
});
};
this.fetchItem = function(id) {
return api.ingest.getById(id);
};
var oldQuery = _.omit($location.search(), '_id');
var update = angular.bind(this, function searchUpdated() {
var newquery = _.omit($location.search(), '_id');
if (!_.isEqual(_.omit(newquery, 'page'), _.omit(oldQuery, 'page'))) |
var query = this.getQuery($location.search());
this.fetchItems({source: query});
oldQuery = newquery;
});
$scope.$on('ingest:update', update);
$scope.$on('item:fetch', update);
$scope.$on('item:deleted', update);
$scope.$watchCollection(function getSearchWithoutId() {
return _.omit($location.search(), '_id');
}, update);
}
}
IngestListController.$inject = ['$scope', '$injector', '$location', 'api', '$rootScope', 'search', 'desks'];
| {
$location.search('page', null);
} | conditional_block |
IngestListController.ts | import _ from 'lodash';
import {BaseListController} from 'apps/archive/controllers';
export class IngestListController extends BaseListController {
constructor($scope, $injector, $location, api, $rootScope, search, desks) | })
.finally(() => {
$scope.loading = false;
});
};
this.fetchItem = function(id) {
return api.ingest.getById(id);
};
var oldQuery = _.omit($location.search(), '_id');
var update = angular.bind(this, function searchUpdated() {
var newquery = _.omit($location.search(), '_id');
if (!_.isEqual(_.omit(newquery, 'page'), _.omit(oldQuery, 'page'))) {
$location.search('page', null);
}
var query = this.getQuery($location.search());
this.fetchItems({source: query});
oldQuery = newquery;
});
$scope.$on('ingest:update', update);
$scope.$on('item:fetch', update);
$scope.$on('item:deleted', update);
$scope.$watchCollection(function getSearchWithoutId() {
return _.omit($location.search(), '_id');
}, update);
}
}
IngestListController.$inject = ['$scope', '$injector', '$location', 'api', '$rootScope', 'search', 'desks'];
| {
super($scope, $location, search, desks);
$scope.type = 'ingest';
$scope.loading = false;
$scope.repo = {
ingest: true,
archive: false,
search: 'local',
};
$scope.api = api.ingest;
$rootScope.currentModule = 'ingest';
this.fetchItems = function(criteria, next) {
$scope.loading = true;
criteria.aggregations = 1;
criteria.es_highlight = search.getElasticHighlight();
api.query('ingest', criteria).then((items) => {
$scope.items = search.mergeItems(items, $scope.items, next);
$scope.total = items._meta.total; | identifier_body |
IngestListController.ts | import _ from 'lodash';
import {BaseListController} from 'apps/archive/controllers';
export class IngestListController extends BaseListController {
| ($scope, $injector, $location, api, $rootScope, search, desks) {
super($scope, $location, search, desks);
$scope.type = 'ingest';
$scope.loading = false;
$scope.repo = {
ingest: true,
archive: false,
search: 'local',
};
$scope.api = api.ingest;
$rootScope.currentModule = 'ingest';
this.fetchItems = function(criteria, next) {
$scope.loading = true;
criteria.aggregations = 1;
criteria.es_highlight = search.getElasticHighlight();
api.query('ingest', criteria).then((items) => {
$scope.items = search.mergeItems(items, $scope.items, next);
$scope.total = items._meta.total;
})
.finally(() => {
$scope.loading = false;
});
};
this.fetchItem = function(id) {
return api.ingest.getById(id);
};
var oldQuery = _.omit($location.search(), '_id');
var update = angular.bind(this, function searchUpdated() {
var newquery = _.omit($location.search(), '_id');
if (!_.isEqual(_.omit(newquery, 'page'), _.omit(oldQuery, 'page'))) {
$location.search('page', null);
}
var query = this.getQuery($location.search());
this.fetchItems({source: query});
oldQuery = newquery;
});
$scope.$on('ingest:update', update);
$scope.$on('item:fetch', update);
$scope.$on('item:deleted', update);
$scope.$watchCollection(function getSearchWithoutId() {
return _.omit($location.search(), '_id');
}, update);
}
}
IngestListController.$inject = ['$scope', '$injector', '$location', 'api', '$rootScope', 'search', 'desks'];
| constructor | identifier_name |
IngestListController.ts | import _ from 'lodash';
import {BaseListController} from 'apps/archive/controllers';
export class IngestListController extends BaseListController {
constructor($scope, $injector, $location, api, $rootScope, search, desks) {
super($scope, $location, search, desks);
$scope.type = 'ingest';
$scope.loading = false;
$scope.repo = {
ingest: true,
archive: false,
search: 'local',
};
$scope.api = api.ingest;
$rootScope.currentModule = 'ingest';
this.fetchItems = function(criteria, next) {
$scope.loading = true;
criteria.aggregations = 1;
criteria.es_highlight = search.getElasticHighlight(); | api.query('ingest', criteria).then((items) => {
$scope.items = search.mergeItems(items, $scope.items, next);
$scope.total = items._meta.total;
})
.finally(() => {
$scope.loading = false;
});
};
this.fetchItem = function(id) {
return api.ingest.getById(id);
};
var oldQuery = _.omit($location.search(), '_id');
var update = angular.bind(this, function searchUpdated() {
var newquery = _.omit($location.search(), '_id');
if (!_.isEqual(_.omit(newquery, 'page'), _.omit(oldQuery, 'page'))) {
$location.search('page', null);
}
var query = this.getQuery($location.search());
this.fetchItems({source: query});
oldQuery = newquery;
});
$scope.$on('ingest:update', update);
$scope.$on('item:fetch', update);
$scope.$on('item:deleted', update);
$scope.$watchCollection(function getSearchWithoutId() {
return _.omit($location.search(), '_id');
}, update);
}
}
IngestListController.$inject = ['$scope', '$injector', '$location', 'api', '$rootScope', 'search', 'desks']; | random_line_split |
|
test_bleuscore.py | # ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore() | # check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) == reference
if __name__ == '__main__':
test_bleuscore() | bleu_metric(sentences, references)
| random_line_split |
test_bleuscore.py | # ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def | ():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) == reference
if __name__ == '__main__':
test_bleuscore()
| test_bleuscore | identifier_name |
test_bleuscore.py | # ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references
for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
|
if __name__ == '__main__':
test_bleuscore()
| assert round(score, 1) == reference | conditional_block |
test_bleuscore.py | # ----------------------------------------------------------------------------
# Copyright 2015-2016 Nervana Systems Inc.
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ----------------------------------------------------------------------------
'''
Test BLEUScore metric against reference
'''
from neon.transforms.cost import BLEUScore
def test_bleuscore():
# dataset with two sentences
| for score, reference in zip(bleu_metric.bleu_n, bleu_score_references):
assert round(score, 1) == reference
if __name__ == '__main__':
test_bleuscore()
| sentences = ["a quick brown fox jumped",
"the rain in spain falls mainly on the plains"]
references = [["a fast brown fox jumped",
"a quick brown fox vaulted",
"a rapid fox of brown color jumped",
"the dog is running on the grass"],
["the precipitation in spain falls on the plains",
"spanish rain falls for the most part on the plains",
"the rain in spain falls in the plains most of the time",
"it is raining today"]]
# reference scores for the given set of reference sentences
bleu_score_references = [92.9, 88.0, 81.5, 67.1] # bleu1, bleu2, bleu3, bleu4
# compute scores
bleu_metric = BLEUScore()
bleu_metric(sentences, references)
# check against references | identifier_body |
TrustTokensView.ts | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import * as i18n from '../../../core/i18n/i18n.js';
import * as DataGrid from '../../../ui/components/data_grid/data_grid.js';
import * as ComponentHelpers from '../../../ui/components/helpers/helpers.js';
import * as IconButton from '../../../ui/components/icon_button/icon_button.js';
import * as LitHtml from '../../../ui/lit-html/lit-html.js';
import trustTokensViewStyles from './trustTokensView.css.js';
import trustTokensViewDeleteButtonStyles from './trustTokensViewDeleteButton.css.js';
import type * as Protocol from '../../../generated/protocol.js';
const UIStrings = {
/**
*@description Text for the issuer of an item
*/
issuer: 'Issuer',
/**
*@description Column header for Trust Token table
*/
storedTokenCount: 'Stored token count',
/**
*@description Hover text for an info icon in the Trust Token panel
*/
allStoredTrustTokensAvailableIn: 'All stored Trust Tokens available in this browser instance.',
/**
* @description Text shown instead of a table when the table would be empty.
*/
noTrustTokensStored: 'No Trust Tokens are currently stored.',
/**
* @description Each row in the Trust Token table has a delete button. This is the text shown
* when hovering over this button. The placeholder is a normal URL, indicating the site which
* provided the Trust Tokens that will be deleted when the button is clicked.
* @example {https://google.com} PH1
*/
deleteTrustTokens: 'Delete all stored Trust Tokens issued by {PH1}.',
};
const str_ = i18n.i18n.registerUIStrings('panels/application/components/TrustTokensView.ts', UIStrings);
export const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_);
interface TrustTokensDeleteButtonData {
issuer: DataGrid.DataGridUtils.CellValue;
deleteClickHandler: (issuerOrigin: string) => void;
}
class TrustTokensDeleteButton extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-delete-button`;
readonly #shadow = this.attachShadow({mode: 'open'});
#issuer: DataGrid.DataGridUtils.CellValue|null = null;
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewDeleteButtonStyles];
}
set data(data: TrustTokensDeleteButtonData) {
this.#issuer = data.issuer;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
if (!this.#issuer) {
return;
}
// clang-format off
LitHtml.render(LitHtml.html`
<!-- Wrap the button in a container, otherwise we can't center it inside the column. -->
<span class="button-container">
<button class="delete-button"
title=${i18nString(UIStrings.deleteTrustTokens, {PH1: this.#issuer as string})}
@click=${(): void => this.#deleteClickHandler(this.#issuer as string)}>
<${IconButton.Icon.Icon.litTagName} .data=${
{iconName: 'trash_bin_icon', color: 'var(--color-text-secondary)', width: '9px', height: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
</button>
</span>`, this.#shadow, {host: this});
// clang-format on
}
}
export interface TrustTokensViewData {
tokens: Protocol.Storage.TrustTokens[];
deleteClickHandler: (issuerOrigin: string) => void;
}
export class | extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-storage-view`;
readonly #shadow = this.attachShadow({mode: 'open'});
#tokens: Protocol.Storage.TrustTokens[] = [];
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewStyles];
this.#render();
}
set data(data: TrustTokensViewData) {
this.#tokens = data.tokens;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
// clang-format off
LitHtml.render(LitHtml.html`
<div>
<span class="heading">Trust Tokens</span>
<${IconButton.Icon.Icon.litTagName} class="info-icon" title=${
i18nString(UIStrings.allStoredTrustTokensAvailableIn)}
.data=${
{iconName: 'ic_info_black_18dp', color: 'var(--color-link)', width: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
${this.#renderGridOrNoDataMessage()}
</div>
`, this.#shadow, {host: this});
// clang-format on
}
#renderGridOrNoDataMessage(): LitHtml.TemplateResult {
if (this.#tokens.length === 0) {
return LitHtml.html`<div class="no-tt-message">${i18nString(UIStrings.noTrustTokensStored)}</div>`;
}
const gridData: DataGrid.DataGridController.DataGridControllerData = {
columns: [
{
id: 'issuer',
title: i18nString(UIStrings.issuer),
widthWeighting: 10,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'count',
title: i18nString(UIStrings.storedTokenCount),
widthWeighting: 5,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'delete-button',
title: '',
widthWeighting: 1,
hideable: false,
visible: true,
sortable: false,
},
],
rows: this.#buildRowsFromTokens(),
initialSort: {
columnId: 'issuer',
direction: DataGrid.DataGridUtils.SortDirection.ASC,
},
};
return LitHtml.html`
<${DataGrid.DataGridController.DataGridController.litTagName} .data=${
gridData as DataGrid.DataGridController.DataGridControllerData}></${
DataGrid.DataGridController.DataGridController.litTagName}>
`;
}
#buildRowsFromTokens(): DataGrid.DataGridUtils.Row[] {
const tokens = this.#tokens.filter(token => token.count > 0);
return tokens.map(token => ({
cells: [
{
columnId: 'delete-button',
value: removeTrailingSlash(token.issuerOrigin),
renderer: this.#deleteButtonRendererForDataGridCell.bind(this),
},
{columnId: 'issuer', value: removeTrailingSlash(token.issuerOrigin)},
{columnId: 'count', value: token.count},
],
}));
}
#deleteButtonRendererForDataGridCell(issuer: DataGrid.DataGridUtils.CellValue): LitHtml.TemplateResult {
// clang-format off
return LitHtml.html`<${TrustTokensDeleteButton.litTagName}
.data=${{issuer, deleteClickHandler: this.#deleteClickHandler} as TrustTokensDeleteButtonData}
></${TrustTokensDeleteButton.litTagName}>`;
// clang-format on
}
}
function removeTrailingSlash(s: string): string {
return s.replace(/\/$/, '');
}
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-delete-button', TrustTokensDeleteButton);
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-storage-view', TrustTokensView);
declare global {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
interface HTMLElementTagNameMap {
'devtools-trust-tokens-storage-view': TrustTokensView;
'devtools-trust-tokens-delete-button': TrustTokensDeleteButton;
}
}
| TrustTokensView | identifier_name |
TrustTokensView.ts | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import * as i18n from '../../../core/i18n/i18n.js';
import * as DataGrid from '../../../ui/components/data_grid/data_grid.js';
import * as ComponentHelpers from '../../../ui/components/helpers/helpers.js';
import * as IconButton from '../../../ui/components/icon_button/icon_button.js';
import * as LitHtml from '../../../ui/lit-html/lit-html.js';
import trustTokensViewStyles from './trustTokensView.css.js';
import trustTokensViewDeleteButtonStyles from './trustTokensViewDeleteButton.css.js';
import type * as Protocol from '../../../generated/protocol.js';
const UIStrings = {
/**
*@description Text for the issuer of an item
*/
issuer: 'Issuer',
/**
*@description Column header for Trust Token table
*/
storedTokenCount: 'Stored token count',
/**
*@description Hover text for an info icon in the Trust Token panel
*/
allStoredTrustTokensAvailableIn: 'All stored Trust Tokens available in this browser instance.',
/**
* @description Text shown instead of a table when the table would be empty.
*/
noTrustTokensStored: 'No Trust Tokens are currently stored.',
/**
* @description Each row in the Trust Token table has a delete button. This is the text shown
* when hovering over this button. The placeholder is a normal URL, indicating the site which
* provided the Trust Tokens that will be deleted when the button is clicked.
* @example {https://google.com} PH1
*/
deleteTrustTokens: 'Delete all stored Trust Tokens issued by {PH1}.',
};
const str_ = i18n.i18n.registerUIStrings('panels/application/components/TrustTokensView.ts', UIStrings);
export const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_);
interface TrustTokensDeleteButtonData {
issuer: DataGrid.DataGridUtils.CellValue;
deleteClickHandler: (issuerOrigin: string) => void;
}
class TrustTokensDeleteButton extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-delete-button`;
readonly #shadow = this.attachShadow({mode: 'open'});
#issuer: DataGrid.DataGridUtils.CellValue|null = null;
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewDeleteButtonStyles];
}
set data(data: TrustTokensDeleteButtonData) {
this.#issuer = data.issuer;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
if (!this.#issuer) {
return;
}
// clang-format off
LitHtml.render(LitHtml.html`
<!-- Wrap the button in a container, otherwise we can't center it inside the column. -->
<span class="button-container">
<button class="delete-button"
title=${i18nString(UIStrings.deleteTrustTokens, {PH1: this.#issuer as string})}
@click=${(): void => this.#deleteClickHandler(this.#issuer as string)}>
<${IconButton.Icon.Icon.litTagName} .data=${
{iconName: 'trash_bin_icon', color: 'var(--color-text-secondary)', width: '9px', height: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
</button>
</span>`, this.#shadow, {host: this});
// clang-format on
}
}
export interface TrustTokensViewData {
tokens: Protocol.Storage.TrustTokens[];
deleteClickHandler: (issuerOrigin: string) => void;
}
export class TrustTokensView extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-storage-view`;
readonly #shadow = this.attachShadow({mode: 'open'});
#tokens: Protocol.Storage.TrustTokens[] = [];
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewStyles];
this.#render();
}
set data(data: TrustTokensViewData) {
this.#tokens = data.tokens;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
// clang-format off
LitHtml.render(LitHtml.html`
<div>
<span class="heading">Trust Tokens</span>
<${IconButton.Icon.Icon.litTagName} class="info-icon" title=${
i18nString(UIStrings.allStoredTrustTokensAvailableIn)}
.data=${
{iconName: 'ic_info_black_18dp', color: 'var(--color-link)', width: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
${this.#renderGridOrNoDataMessage()}
</div>
`, this.#shadow, {host: this});
// clang-format on
}
#renderGridOrNoDataMessage(): LitHtml.TemplateResult {
if (this.#tokens.length === 0) |
const gridData: DataGrid.DataGridController.DataGridControllerData = {
columns: [
{
id: 'issuer',
title: i18nString(UIStrings.issuer),
widthWeighting: 10,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'count',
title: i18nString(UIStrings.storedTokenCount),
widthWeighting: 5,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'delete-button',
title: '',
widthWeighting: 1,
hideable: false,
visible: true,
sortable: false,
},
],
rows: this.#buildRowsFromTokens(),
initialSort: {
columnId: 'issuer',
direction: DataGrid.DataGridUtils.SortDirection.ASC,
},
};
return LitHtml.html`
<${DataGrid.DataGridController.DataGridController.litTagName} .data=${
gridData as DataGrid.DataGridController.DataGridControllerData}></${
DataGrid.DataGridController.DataGridController.litTagName}>
`;
}
#buildRowsFromTokens(): DataGrid.DataGridUtils.Row[] {
const tokens = this.#tokens.filter(token => token.count > 0);
return tokens.map(token => ({
cells: [
{
columnId: 'delete-button',
value: removeTrailingSlash(token.issuerOrigin),
renderer: this.#deleteButtonRendererForDataGridCell.bind(this),
},
{columnId: 'issuer', value: removeTrailingSlash(token.issuerOrigin)},
{columnId: 'count', value: token.count},
],
}));
}
#deleteButtonRendererForDataGridCell(issuer: DataGrid.DataGridUtils.CellValue): LitHtml.TemplateResult {
// clang-format off
return LitHtml.html`<${TrustTokensDeleteButton.litTagName}
.data=${{issuer, deleteClickHandler: this.#deleteClickHandler} as TrustTokensDeleteButtonData}
></${TrustTokensDeleteButton.litTagName}>`;
// clang-format on
}
}
function removeTrailingSlash(s: string): string {
return s.replace(/\/$/, '');
}
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-delete-button', TrustTokensDeleteButton);
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-storage-view', TrustTokensView);
declare global {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
interface HTMLElementTagNameMap {
'devtools-trust-tokens-storage-view': TrustTokensView;
'devtools-trust-tokens-delete-button': TrustTokensDeleteButton;
}
}
| {
return LitHtml.html`<div class="no-tt-message">${i18nString(UIStrings.noTrustTokensStored)}</div>`;
} | conditional_block |
TrustTokensView.ts | // Copyright 2020 The Chromium Authors. All rights reserved.
// Use of this source code is governed by a BSD-style license that can be
// found in the LICENSE file.
import * as i18n from '../../../core/i18n/i18n.js';
import * as DataGrid from '../../../ui/components/data_grid/data_grid.js';
import * as ComponentHelpers from '../../../ui/components/helpers/helpers.js';
import * as IconButton from '../../../ui/components/icon_button/icon_button.js';
import * as LitHtml from '../../../ui/lit-html/lit-html.js';
import trustTokensViewStyles from './trustTokensView.css.js';
import trustTokensViewDeleteButtonStyles from './trustTokensViewDeleteButton.css.js';
import type * as Protocol from '../../../generated/protocol.js';
const UIStrings = {
/**
*@description Text for the issuer of an item
*/
issuer: 'Issuer',
/**
*@description Column header for Trust Token table
*/
storedTokenCount: 'Stored token count',
/**
*@description Hover text for an info icon in the Trust Token panel
*/
allStoredTrustTokensAvailableIn: 'All stored Trust Tokens available in this browser instance.',
/**
* @description Text shown instead of a table when the table would be empty.
*/
noTrustTokensStored: 'No Trust Tokens are currently stored.',
/**
* @description Each row in the Trust Token table has a delete button. This is the text shown
* when hovering over this button. The placeholder is a normal URL, indicating the site which
* provided the Trust Tokens that will be deleted when the button is clicked.
* @example {https://google.com} PH1
*/
deleteTrustTokens: 'Delete all stored Trust Tokens issued by {PH1}.',
};
const str_ = i18n.i18n.registerUIStrings('panels/application/components/TrustTokensView.ts', UIStrings);
export const i18nString = i18n.i18n.getLocalizedString.bind(undefined, str_);
interface TrustTokensDeleteButtonData {
issuer: DataGrid.DataGridUtils.CellValue;
deleteClickHandler: (issuerOrigin: string) => void;
}
class TrustTokensDeleteButton extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-delete-button`;
readonly #shadow = this.attachShadow({mode: 'open'});
#issuer: DataGrid.DataGridUtils.CellValue|null = null;
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewDeleteButtonStyles];
}
set data(data: TrustTokensDeleteButtonData) {
this.#issuer = data.issuer;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
if (!this.#issuer) {
return;
}
// clang-format off
LitHtml.render(LitHtml.html`
<!-- Wrap the button in a container, otherwise we can't center it inside the column. -->
<span class="button-container">
<button class="delete-button"
title=${i18nString(UIStrings.deleteTrustTokens, {PH1: this.#issuer as string})}
@click=${(): void => this.#deleteClickHandler(this.#issuer as string)}>
<${IconButton.Icon.Icon.litTagName} .data=${
{iconName: 'trash_bin_icon', color: 'var(--color-text-secondary)', width: '9px', height: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
</button>
</span>`, this.#shadow, {host: this});
// clang-format on
}
}
export interface TrustTokensViewData {
tokens: Protocol.Storage.TrustTokens[];
deleteClickHandler: (issuerOrigin: string) => void;
}
export class TrustTokensView extends HTMLElement {
static readonly litTagName = LitHtml.literal`devtools-trust-tokens-storage-view`;
readonly #shadow = this.attachShadow({mode: 'open'});
#tokens: Protocol.Storage.TrustTokens[] = [];
#deleteClickHandler: (issuerOrigin: string) => void = () => {};
connectedCallback(): void {
this.#shadow.adoptedStyleSheets = [trustTokensViewStyles];
this.#render();
}
set data(data: TrustTokensViewData) {
this.#tokens = data.tokens;
this.#deleteClickHandler = data.deleteClickHandler;
this.#render();
}
#render(): void {
// clang-format off
LitHtml.render(LitHtml.html`
<div>
<span class="heading">Trust Tokens</span>
<${IconButton.Icon.Icon.litTagName} class="info-icon" title=${
i18nString(UIStrings.allStoredTrustTokensAvailableIn)}
.data=${
{iconName: 'ic_info_black_18dp', color: 'var(--color-link)', width: '14px'} as
IconButton.Icon.IconWithName}>
</${IconButton.Icon.Icon.litTagName}>
${this.#renderGridOrNoDataMessage()}
</div>
`, this.#shadow, {host: this});
// clang-format on
}
#renderGridOrNoDataMessage(): LitHtml.TemplateResult {
if (this.#tokens.length === 0) {
return LitHtml.html`<div class="no-tt-message">${i18nString(UIStrings.noTrustTokensStored)}</div>`; | id: 'issuer',
title: i18nString(UIStrings.issuer),
widthWeighting: 10,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'count',
title: i18nString(UIStrings.storedTokenCount),
widthWeighting: 5,
hideable: false,
visible: true,
sortable: true,
},
{
id: 'delete-button',
title: '',
widthWeighting: 1,
hideable: false,
visible: true,
sortable: false,
},
],
rows: this.#buildRowsFromTokens(),
initialSort: {
columnId: 'issuer',
direction: DataGrid.DataGridUtils.SortDirection.ASC,
},
};
return LitHtml.html`
<${DataGrid.DataGridController.DataGridController.litTagName} .data=${
gridData as DataGrid.DataGridController.DataGridControllerData}></${
DataGrid.DataGridController.DataGridController.litTagName}>
`;
}
#buildRowsFromTokens(): DataGrid.DataGridUtils.Row[] {
const tokens = this.#tokens.filter(token => token.count > 0);
return tokens.map(token => ({
cells: [
{
columnId: 'delete-button',
value: removeTrailingSlash(token.issuerOrigin),
renderer: this.#deleteButtonRendererForDataGridCell.bind(this),
},
{columnId: 'issuer', value: removeTrailingSlash(token.issuerOrigin)},
{columnId: 'count', value: token.count},
],
}));
}
#deleteButtonRendererForDataGridCell(issuer: DataGrid.DataGridUtils.CellValue): LitHtml.TemplateResult {
// clang-format off
return LitHtml.html`<${TrustTokensDeleteButton.litTagName}
.data=${{issuer, deleteClickHandler: this.#deleteClickHandler} as TrustTokensDeleteButtonData}
></${TrustTokensDeleteButton.litTagName}>`;
// clang-format on
}
}
function removeTrailingSlash(s: string): string {
return s.replace(/\/$/, '');
}
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-delete-button', TrustTokensDeleteButton);
ComponentHelpers.CustomElements.defineComponent('devtools-trust-tokens-storage-view', TrustTokensView);
declare global {
// eslint-disable-next-line @typescript-eslint/no-unused-vars
interface HTMLElementTagNameMap {
'devtools-trust-tokens-storage-view': TrustTokensView;
'devtools-trust-tokens-delete-button': TrustTokensDeleteButton;
}
} | }
const gridData: DataGrid.DataGridController.DataGridControllerData = {
columns: [
{ | random_line_split |
log.js | /**
* @file log.js
*/
import window from 'global/window';
/**
* Log plain debug messages
*/
const log = function(){
_logType(null, arguments);
};
/**
* Keep a history of log messages
* @type {Array}
*/
log.history = [];
/**
* Log error messages
*/
log.error = function(){
_logType('error', arguments);
};
/**
* Log warning messages
*/
log.warn = function(){
_logType('warn', arguments);
};
/**
* Log messages to the console and history based on the type of message
*
* @param {String} type The type of message, or `null` for `log`
* @param {Object} args The args to be passed to the log
* @private
* @method _logType
*/
function | (type, args){
// convert args to an array to get array functions
let argsArray = Array.prototype.slice.call(args);
// if there's no console then don't try to output messages
// they will still be stored in log.history
// Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
let noop = function(){};
let console = window['console'] || {
'log': noop,
'warn': noop,
'error': noop
};
if (type) {
// add the type to the front of the message
argsArray.unshift(type.toUpperCase()+':');
} else {
// default to log with no prefix
type = 'log';
}
// add to history
log.history.push(argsArray);
// add console prefix after adding to history
argsArray.unshift('VIDEOJS:');
// call appropriate log function
if (console[type].apply) {
console[type].apply(console, argsArray);
} else {
// ie8 doesn't allow error.apply, but it will just join() the array anyway
console[type](argsArray.join(' '));
}
}
export default log;
| _logType | identifier_name |
log.js | /**
* @file log.js
*/
import window from 'global/window';
/**
* Log plain debug messages
*/
const log = function(){
_logType(null, arguments);
|
/**
* Keep a history of log messages
* @type {Array}
*/
log.history = [];
/**
* Log error messages
*/
log.error = function(){
_logType('error', arguments);
};
/**
* Log warning messages
*/
log.warn = function(){
_logType('warn', arguments);
};
/**
* Log messages to the console and history based on the type of message
*
* @param {String} type The type of message, or `null` for `log`
* @param {Object} args The args to be passed to the log
* @private
* @method _logType
*/
function _logType(type, args){
// convert args to an array to get array functions
let argsArray = Array.prototype.slice.call(args);
// if there's no console then don't try to output messages
// they will still be stored in log.history
// Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
let noop = function(){};
let console = window['console'] || {
'log': noop,
'warn': noop,
'error': noop
};
if (type) {
// add the type to the front of the message
argsArray.unshift(type.toUpperCase()+':');
} else {
// default to log with no prefix
type = 'log';
}
// add to history
log.history.push(argsArray);
// add console prefix after adding to history
argsArray.unshift('VIDEOJS:');
// call appropriate log function
if (console[type].apply) {
console[type].apply(console, argsArray);
} else {
// ie8 doesn't allow error.apply, but it will just join() the array anyway
console[type](argsArray.join(' '));
}
}
export default log; | };
| random_line_split |
log.js | /**
* @file log.js
*/
import window from 'global/window';
/**
* Log plain debug messages
*/
const log = function(){
_logType(null, arguments);
};
/**
* Keep a history of log messages
* @type {Array}
*/
log.history = [];
/**
* Log error messages
*/
log.error = function(){
_logType('error', arguments);
};
/**
* Log warning messages
*/
log.warn = function(){
_logType('warn', arguments);
};
/**
* Log messages to the console and history based on the type of message
*
* @param {String} type The type of message, or `null` for `log`
* @param {Object} args The args to be passed to the log
* @private
* @method _logType
*/
function _logType(type, args) | type = 'log';
}
// add to history
log.history.push(argsArray);
// add console prefix after adding to history
argsArray.unshift('VIDEOJS:');
// call appropriate log function
if (console[type].apply) {
console[type].apply(console, argsArray);
} else {
// ie8 doesn't allow error.apply, but it will just join() the array anyway
console[type](argsArray.join(' '));
}
}
export default log;
| {
// convert args to an array to get array functions
let argsArray = Array.prototype.slice.call(args);
// if there's no console then don't try to output messages
// they will still be stored in log.history
// Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
let noop = function(){};
let console = window['console'] || {
'log': noop,
'warn': noop,
'error': noop
};
if (type) {
// add the type to the front of the message
argsArray.unshift(type.toUpperCase()+':');
} else {
// default to log with no prefix
| identifier_body |
log.js | /**
* @file log.js
*/
import window from 'global/window';
/**
* Log plain debug messages
*/
const log = function(){
_logType(null, arguments);
};
/**
* Keep a history of log messages
* @type {Array}
*/
log.history = [];
/**
* Log error messages
*/
log.error = function(){
_logType('error', arguments);
};
/**
* Log warning messages
*/
log.warn = function(){
_logType('warn', arguments);
};
/**
* Log messages to the console and history based on the type of message
*
* @param {String} type The type of message, or `null` for `log`
* @param {Object} args The args to be passed to the log
* @private
* @method _logType
*/
function _logType(type, args){
// convert args to an array to get array functions
let argsArray = Array.prototype.slice.call(args);
// if there's no console then don't try to output messages
// they will still be stored in log.history
// Was setting these once outside of this function, but containing them
// in the function makes it easier to test cases where console doesn't exist
let noop = function(){};
let console = window['console'] || {
'log': noop,
'warn': noop,
'error': noop
};
if (type) {
// add the type to the front of the message
argsArray.unshift(type.toUpperCase()+':');
} else {
// default to log with no prefix
type = 'log';
}
// add to history
log.history.push(argsArray);
// add console prefix after adding to history
argsArray.unshift('VIDEOJS:');
// call appropriate log function
if (console[type].apply) | else {
// ie8 doesn't allow error.apply, but it will just join() the array anyway
console[type](argsArray.join(' '));
}
}
export default log;
| {
console[type].apply(console, argsArray);
} | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.