file_name
large_stringlengths 4
140
| prefix
large_stringlengths 0
12.1k
| suffix
large_stringlengths 0
12k
| middle
large_stringlengths 0
7.51k
| fim_type
large_stringclasses 4
values |
---|---|---|---|---|
spotcheck.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from builtins import zip
import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
"""
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens:
features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print('\n')
print("%s. %s" %(count_discrepancies, string))
print('-'*75)
print_spaced('token', model1, model2)
print('-'*75)
for token in zip(tokens, tags1, tags2):
|
print("\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list)))
def print_spaced(s1, s2, s3):
n = 25
print(s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3)
def validateTaggers(model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = list(zip(*components))
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_both += 1
elif (tags1 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("%s: "%model2, tags2)
wrong_count_1 += 1
elif (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_2 += 1
else:
correct_count += 1
print("\n\nBOTH WRONG: ", wrong_count_both)
print("%s WRONG: %s" %(model1, wrong_count_1))
print("%s WRONG: %s" %(model2, wrong_count_2))
print("BOTH CORRECT: ", correct_count)
| print_spaced(token[0], token[1], token[2]) | conditional_block |
spotcheck.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from builtins import zip
import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
"""
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens:
features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print('\n')
print("%s. %s" %(count_discrepancies, string))
print('-'*75)
print_spaced('token', model1, model2)
print('-'*75)
for token in zip(tokens, tags1, tags2):
print_spaced(token[0], token[1], token[2])
print("\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list)))
def print_spaced(s1, s2, s3):
n = 25
print(s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3)
def | (model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = list(zip(*components))
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_both += 1
elif (tags1 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("%s: "%model2, tags2)
wrong_count_1 += 1
elif (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_2 += 1
else:
correct_count += 1
print("\n\nBOTH WRONG: ", wrong_count_both)
print("%s WRONG: %s" %(model1, wrong_count_1))
print("%s WRONG: %s" %(model2, wrong_count_2))
print("BOTH CORRECT: ", correct_count)
| validateTaggers | identifier_name |
spotcheck.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from builtins import zip
import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
"""
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens:
features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print('\n')
print("%s. %s" %(count_discrepancies, string))
print('-'*75)
print_spaced('token', model1, model2)
print('-'*75)
for token in zip(tokens, tags1, tags2):
print_spaced(token[0], token[1], token[2])
print("\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list)))
def print_spaced(s1, s2, s3):
n = 25
print(s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3)
def validateTaggers(model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = list(zip(*components))
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_both += 1
elif (tags1 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("%s: "%model2, tags2)
wrong_count_1 += 1
elif (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true) | correct_count += 1
print("\n\nBOTH WRONG: ", wrong_count_both)
print("%s WRONG: %s" %(model1, wrong_count_1))
print("%s WRONG: %s" %(model2, wrong_count_2))
print("BOTH CORRECT: ", correct_count) | print("%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_2 += 1
else: | random_line_split |
spotcheck.py | #!/usr/bin/python
# -*- coding: utf-8 -*-
from __future__ import print_function
from builtins import zip
import pycrfsuite
def compareTaggers(model1, model2, string_list, module_name):
| features = module.tokens2features(tokens)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if tags1 != tags2:
count_discrepancies += 1
print('\n')
print("%s. %s" %(count_discrepancies, string))
print('-'*75)
print_spaced('token', model1, model2)
print('-'*75)
for token in zip(tokens, tags1, tags2):
print_spaced(token[0], token[1], token[2])
print("\n\n%s of %s strings were labeled differently"%(count_discrepancies, len(string_list)))
def print_spaced(s1, s2, s3):
n = 25
print(s1 + " "*(n-len(s1)) + s2 + " "*(n-len(s2)) + s3)
def validateTaggers(model1, model2, labeled_string_list, module_name):
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
wrong_count_1 = 0
wrong_count_2 = 0
wrong_count_both = 0
correct_count = 0
for labeled_string in labeled_string_list:
unlabeled_string, components = labeled_string
tokens = module.tokenize(unlabeled_string)
if tokens:
features = module.tokens2features(tokens)
_, tags_true = list(zip(*components))
tags_true = list(tags_true)
tags1 = tagger1.tag(features)
tags2 = tagger2.tag(features)
if (tags1 != tags_true) and (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_both += 1
elif (tags1 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("*%s: "%model1, tags1)
print("%s: "%model2, tags2)
wrong_count_1 += 1
elif (tags2 != tags_true):
print("\nSTRING: ", unlabeled_string)
print("TRUE: ", tags_true)
print("%s: "%model1, tags1)
print("*%s: "%model2, tags2)
wrong_count_2 += 1
else:
correct_count += 1
print("\n\nBOTH WRONG: ", wrong_count_both)
print("%s WRONG: %s" %(model1, wrong_count_1))
print("%s WRONG: %s" %(model2, wrong_count_2))
print("BOTH CORRECT: ", correct_count)
| """
Compare two models. Given a list of strings, prints out tokens & tags
whenever the two taggers parse a string differently. This is for spot-checking models
:param tagger1: a .crfsuite filename
:param tagger2: another .crfsuite filename
:param string_list: a list of strings to be checked
:param module_name: name of a parser module
"""
module = __import__(module_name)
tagger1 = pycrfsuite.Tagger()
tagger1.open(module_name+'/'+model1)
tagger2 = pycrfsuite.Tagger()
tagger2.open(module_name+'/'+model2)
count_discrepancies = 0
for string in string_list:
tokens = module.tokenize(string)
if tokens: | identifier_body |
InstapaperShareButton.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _assert = require('assert');
var _assert2 = _interopRequireDefault(_assert);
var _createShareButton = require('./utils/createShareButton');
var _createShareButton2 = _interopRequireDefault(_createShareButton);
var _objectToGetParams = require('./utils/objectToGetParams');
var _objectToGetParams2 = _interopRequireDefault(_objectToGetParams);
function _interopRequireDefault(obj) |
function instapaperLink(url, _ref) {
var title = _ref.title,
description = _ref.description;
(0, _assert2.default)(url, 'instapaper.url');
return 'http://www.instapaper.com/hello2' + (0, _objectToGetParams2.default)({
url: url,
title: title,
description: description
});
}
var InstapaperShareButton = (0, _createShareButton2.default)('instapaper', instapaperLink, function (props) {
return {
title: props.title,
description: props.description
};
}, {
title: _propTypes2.default.string,
description: _propTypes2.default.string
}, {
windowWidth: 500,
windowHeight: 500
});
exports.default = InstapaperShareButton; | { return obj && obj.__esModule ? obj : { default: obj }; } | identifier_body |
InstapaperShareButton.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _assert = require('assert');
var _assert2 = _interopRequireDefault(_assert);
var _createShareButton = require('./utils/createShareButton');
var _createShareButton2 = _interopRequireDefault(_createShareButton);
var _objectToGetParams = require('./utils/objectToGetParams');
var _objectToGetParams2 = _interopRequireDefault(_objectToGetParams);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function | (url, _ref) {
var title = _ref.title,
description = _ref.description;
(0, _assert2.default)(url, 'instapaper.url');
return 'http://www.instapaper.com/hello2' + (0, _objectToGetParams2.default)({
url: url,
title: title,
description: description
});
}
var InstapaperShareButton = (0, _createShareButton2.default)('instapaper', instapaperLink, function (props) {
return {
title: props.title,
description: props.description
};
}, {
title: _propTypes2.default.string,
description: _propTypes2.default.string
}, {
windowWidth: 500,
windowHeight: 500
});
exports.default = InstapaperShareButton; | instapaperLink | identifier_name |
InstapaperShareButton.js | 'use strict';
Object.defineProperty(exports, "__esModule", {
value: true
});
var _propTypes = require('prop-types');
var _propTypes2 = _interopRequireDefault(_propTypes);
var _assert = require('assert');
var _assert2 = _interopRequireDefault(_assert);
var _createShareButton = require('./utils/createShareButton');
var _createShareButton2 = _interopRequireDefault(_createShareButton);
| var _objectToGetParams = require('./utils/objectToGetParams');
var _objectToGetParams2 = _interopRequireDefault(_objectToGetParams);
function _interopRequireDefault(obj) { return obj && obj.__esModule ? obj : { default: obj }; }
function instapaperLink(url, _ref) {
var title = _ref.title,
description = _ref.description;
(0, _assert2.default)(url, 'instapaper.url');
return 'http://www.instapaper.com/hello2' + (0, _objectToGetParams2.default)({
url: url,
title: title,
description: description
});
}
var InstapaperShareButton = (0, _createShareButton2.default)('instapaper', instapaperLink, function (props) {
return {
title: props.title,
description: props.description
};
}, {
title: _propTypes2.default.string,
description: _propTypes2.default.string
}, {
windowWidth: 500,
windowHeight: 500
});
exports.default = InstapaperShareButton; | random_line_split |
|
dirac-rss-list-status.py | #!/usr/bin/env python
"""
dirac-rss-list-status
Script that dumps the DB information for the elements into the standard output.
If returns information concerning the StatusType and Status attributes.
Usage:
dirac-rss-list-status
--element= Element family to be Synchronized ( Site, Resource or Node )
--elementType= ElementType narrows the search; None if default
--name= ElementName; None if default
--tokenOwner= Owner of the token; None if default
--statusType= StatusType; None if default
--status= Status; None if default
Verbosity:
-o LogLevel=LEVEL NOTICE by default, levels available: INFO, DEBUG, VERBOSE..
"""
from DIRAC import gLogger, exit as DIRACExit, version
from DIRAC.Core.Base import Script
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.Core.Utilities.PrettyPrint import printTable
__RCSID__ = '$Id:$'
subLogger = None
switchDict = {}
def registerSwitches():
'''
Registers all switches that can be used while calling the script from the
command line interface.
'''
switches = (
( 'element=', 'Element family to be Synchronized ( Site, Resource or Node )' ),
( 'elementType=', 'ElementType narrows the search; None if default' ),
( 'name=', 'ElementName; None if default' ),
( 'tokenOwner=', 'Owner of the token; None if default' ),
( 'statusType=', 'StatusType; None if default' ),
( 'status=', 'Status; None if default' ),
)
for switch in switches:
Script.registerSwitch( '', switch[ 0 ], switch[ 1 ] )
def registerUsageMessage():
'''
Takes the script __doc__ and adds the DIRAC version to it
'''
hLine = ' ' + '='*78 + '\n'
usageMessage = hLine
usageMessage += ' DIRAC %s\n' % version
usageMessage += __doc__
usageMessage += '\n' + hLine
Script.setUsageMessage( usageMessage )
def parseSwitches():
'''
Parses the arguments passed by the user
'''
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if args:
subLogger.error( "Found the following positional args '%s', but we only accept switches" % args )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
switches = dict( Script.getUnprocessedSwitches() )
# Default values
switches.setdefault( 'elementType', None )
switches.setdefault( 'name', None )
switches.setdefault( 'tokenOwner', None )
switches.setdefault( 'statusType', None )
switches.setdefault( 'status', None )
if 'element' not in switches:
subLogger.error( "element Switch missing" )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ):
|
subLogger.debug( "The switches used are:" )
map( subLogger.debug, switches.iteritems() )
return switches
#...............................................................................
def getElements():
'''
Given the switches, gets a list of elements with their respective statustype
and status attributes.
'''
rssClient = ResourceStatusClient.ResourceStatusClient()
meta = { 'columns' : [] }
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
if switchDict[ key[0].lower() + key[1:] ] is None:
meta[ 'columns' ].append( key )
elements = rssClient.selectStatusElement(
switchDict[ 'element' ], 'Status',
name = switchDict[ 'name' ].split(',') if switchDict['name'] else None,
statusType = switchDict[ 'statusType' ].split(',') if switchDict['statusType'] else None,
status = switchDict[ 'status' ].split(',') if switchDict['status'] else None,
elementType = switchDict[ 'elementType' ].split(',') if switchDict['elementType'] else None,
tokenOwner = switchDict[ 'tokenOwner' ].split(',') if switchDict['tokenOwner'] else None,
meta = meta )
return elements
def tabularPrint( elementsList ):
'''
Prints the list of elements on a tabular
'''
subLogger.notice( '' )
subLogger.notice( 'Selection parameters:' )
subLogger.notice( ' %s: %s' % ( 'element'.ljust( 15 ), switchDict[ 'element' ] ) )
titles = []
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
keyT = key[0].lower() + key[1:]
if switchDict[ keyT ] is None:
titles.append( key )
else:
subLogger.notice( ' %s: %s' % ( key.ljust( 15 ), switchDict[ keyT ] ) )
subLogger.notice( '' )
subLogger.notice( printTable( titles, elementsList, printOut = False,
numbering = False, columnSeparator = ' | ' ) )
#...............................................................................
def run():
'''
Main function of the script
'''
elements = getElements()
if not elements[ 'OK' ]:
subLogger.error( elements )
DIRACExit( 1 )
elements = elements[ 'Value' ]
tabularPrint( elements )
#...............................................................................
if __name__ == "__main__":
subLogger = gLogger.getSubLogger( __file__ )
#Script initialization
registerSwitches()
registerUsageMessage()
switchDict = parseSwitches()
#Run script
run()
#Bye
DIRACExit( 0 )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
| subLogger.error( "Found %s as element switch" % switches[ 'element' ] )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 ) | conditional_block |
dirac-rss-list-status.py | #!/usr/bin/env python
"""
dirac-rss-list-status
Script that dumps the DB information for the elements into the standard output.
If returns information concerning the StatusType and Status attributes.
Usage:
dirac-rss-list-status
--element= Element family to be Synchronized ( Site, Resource or Node )
--elementType= ElementType narrows the search; None if default
--name= ElementName; None if default
--tokenOwner= Owner of the token; None if default
--statusType= StatusType; None if default
--status= Status; None if default
Verbosity:
-o LogLevel=LEVEL NOTICE by default, levels available: INFO, DEBUG, VERBOSE..
"""
from DIRAC import gLogger, exit as DIRACExit, version
from DIRAC.Core.Base import Script
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.Core.Utilities.PrettyPrint import printTable
__RCSID__ = '$Id:$'
subLogger = None
switchDict = {}
def registerSwitches():
'''
Registers all switches that can be used while calling the script from the
command line interface.
'''
switches = (
( 'element=', 'Element family to be Synchronized ( Site, Resource or Node )' ),
( 'elementType=', 'ElementType narrows the search; None if default' ),
( 'name=', 'ElementName; None if default' ),
( 'tokenOwner=', 'Owner of the token; None if default' ),
( 'statusType=', 'StatusType; None if default' ),
( 'status=', 'Status; None if default' ),
)
for switch in switches:
Script.registerSwitch( '', switch[ 0 ], switch[ 1 ] )
def registerUsageMessage():
'''
Takes the script __doc__ and adds the DIRAC version to it
'''
hLine = ' ' + '='*78 + '\n'
usageMessage = hLine
usageMessage += ' DIRAC %s\n' % version
usageMessage += __doc__
usageMessage += '\n' + hLine
Script.setUsageMessage( usageMessage )
def parseSwitches():
'''
Parses the arguments passed by the user
'''
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if args:
subLogger.error( "Found the following positional args '%s', but we only accept switches" % args )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
switches = dict( Script.getUnprocessedSwitches() )
# Default values
switches.setdefault( 'elementType', None )
switches.setdefault( 'name', None )
switches.setdefault( 'tokenOwner', None )
switches.setdefault( 'statusType', None )
switches.setdefault( 'status', None )
if 'element' not in switches:
subLogger.error( "element Switch missing" )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ):
subLogger.error( "Found %s as element switch" % switches[ 'element' ] )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
subLogger.debug( "The switches used are:" )
map( subLogger.debug, switches.iteritems() )
return switches
#...............................................................................
def | ():
'''
Given the switches, gets a list of elements with their respective statustype
and status attributes.
'''
rssClient = ResourceStatusClient.ResourceStatusClient()
meta = { 'columns' : [] }
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
if switchDict[ key[0].lower() + key[1:] ] is None:
meta[ 'columns' ].append( key )
elements = rssClient.selectStatusElement(
switchDict[ 'element' ], 'Status',
name = switchDict[ 'name' ].split(',') if switchDict['name'] else None,
statusType = switchDict[ 'statusType' ].split(',') if switchDict['statusType'] else None,
status = switchDict[ 'status' ].split(',') if switchDict['status'] else None,
elementType = switchDict[ 'elementType' ].split(',') if switchDict['elementType'] else None,
tokenOwner = switchDict[ 'tokenOwner' ].split(',') if switchDict['tokenOwner'] else None,
meta = meta )
return elements
def tabularPrint( elementsList ):
'''
Prints the list of elements on a tabular
'''
subLogger.notice( '' )
subLogger.notice( 'Selection parameters:' )
subLogger.notice( ' %s: %s' % ( 'element'.ljust( 15 ), switchDict[ 'element' ] ) )
titles = []
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
keyT = key[0].lower() + key[1:]
if switchDict[ keyT ] is None:
titles.append( key )
else:
subLogger.notice( ' %s: %s' % ( key.ljust( 15 ), switchDict[ keyT ] ) )
subLogger.notice( '' )
subLogger.notice( printTable( titles, elementsList, printOut = False,
numbering = False, columnSeparator = ' | ' ) )
#...............................................................................
def run():
'''
Main function of the script
'''
elements = getElements()
if not elements[ 'OK' ]:
subLogger.error( elements )
DIRACExit( 1 )
elements = elements[ 'Value' ]
tabularPrint( elements )
#...............................................................................
if __name__ == "__main__":
subLogger = gLogger.getSubLogger( __file__ )
#Script initialization
registerSwitches()
registerUsageMessage()
switchDict = parseSwitches()
#Run script
run()
#Bye
DIRACExit( 0 )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
| getElements | identifier_name |
dirac-rss-list-status.py | #!/usr/bin/env python
"""
dirac-rss-list-status
Script that dumps the DB information for the elements into the standard output.
If returns information concerning the StatusType and Status attributes.
Usage:
dirac-rss-list-status
--element= Element family to be Synchronized ( Site, Resource or Node )
--elementType= ElementType narrows the search; None if default
--name= ElementName; None if default
--tokenOwner= Owner of the token; None if default
--statusType= StatusType; None if default
--status= Status; None if default
Verbosity:
-o LogLevel=LEVEL NOTICE by default, levels available: INFO, DEBUG, VERBOSE..
"""
from DIRAC import gLogger, exit as DIRACExit, version
from DIRAC.Core.Base import Script
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.Core.Utilities.PrettyPrint import printTable
__RCSID__ = '$Id:$'
subLogger = None
switchDict = {}
def registerSwitches():
'''
Registers all switches that can be used while calling the script from the
command line interface.
'''
switches = (
( 'element=', 'Element family to be Synchronized ( Site, Resource or Node )' ),
( 'elementType=', 'ElementType narrows the search; None if default' ),
( 'name=', 'ElementName; None if default' ),
( 'tokenOwner=', 'Owner of the token; None if default' ),
( 'statusType=', 'StatusType; None if default' ),
( 'status=', 'Status; None if default' ),
)
for switch in switches:
Script.registerSwitch( '', switch[ 0 ], switch[ 1 ] )
def registerUsageMessage():
'''
Takes the script __doc__ and adds the DIRAC version to it
'''
hLine = ' ' + '='*78 + '\n'
usageMessage = hLine
usageMessage += ' DIRAC %s\n' % version
usageMessage += __doc__
usageMessage += '\n' + hLine
Script.setUsageMessage( usageMessage )
def parseSwitches():
'''
Parses the arguments passed by the user
'''
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if args:
subLogger.error( "Found the following positional args '%s', but we only accept switches" % args )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
switches = dict( Script.getUnprocessedSwitches() )
# Default values
switches.setdefault( 'elementType', None )
switches.setdefault( 'name', None )
switches.setdefault( 'tokenOwner', None )
switches.setdefault( 'statusType', None )
switches.setdefault( 'status', None )
if 'element' not in switches:
subLogger.error( "element Switch missing" )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ):
subLogger.error( "Found %s as element switch" % switches[ 'element' ] )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
subLogger.debug( "The switches used are:" )
map( subLogger.debug, switches.iteritems() )
return switches
#...............................................................................
def getElements():
'''
Given the switches, gets a list of elements with their respective statustype
and status attributes.
'''
rssClient = ResourceStatusClient.ResourceStatusClient()
meta = { 'columns' : [] }
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
if switchDict[ key[0].lower() + key[1:] ] is None:
meta[ 'columns' ].append( key )
elements = rssClient.selectStatusElement(
switchDict[ 'element' ], 'Status',
name = switchDict[ 'name' ].split(',') if switchDict['name'] else None,
statusType = switchDict[ 'statusType' ].split(',') if switchDict['statusType'] else None,
status = switchDict[ 'status' ].split(',') if switchDict['status'] else None,
elementType = switchDict[ 'elementType' ].split(',') if switchDict['elementType'] else None,
tokenOwner = switchDict[ 'tokenOwner' ].split(',') if switchDict['tokenOwner'] else None,
meta = meta )
return elements
def tabularPrint( elementsList ):
'''
Prints the list of elements on a tabular
'''
subLogger.notice( '' )
subLogger.notice( 'Selection parameters:' )
subLogger.notice( ' %s: %s' % ( 'element'.ljust( 15 ), switchDict[ 'element' ] ) )
titles = []
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
keyT = key[0].lower() + key[1:]
if switchDict[ keyT ] is None:
titles.append( key )
else:
subLogger.notice( ' %s: %s' % ( key.ljust( 15 ), switchDict[ keyT ] ) )
subLogger.notice( '' )
subLogger.notice( printTable( titles, elementsList, printOut = False,
numbering = False, columnSeparator = ' | ' ) )
#...............................................................................
def run():
|
#...............................................................................
if __name__ == "__main__":
subLogger = gLogger.getSubLogger( __file__ )
#Script initialization
registerSwitches()
registerUsageMessage()
switchDict = parseSwitches()
#Run script
run()
#Bye
DIRACExit( 0 )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF
| '''
Main function of the script
'''
elements = getElements()
if not elements[ 'OK' ]:
subLogger.error( elements )
DIRACExit( 1 )
elements = elements[ 'Value' ]
tabularPrint( elements ) | identifier_body |
dirac-rss-list-status.py | #!/usr/bin/env python
"""
dirac-rss-list-status
Script that dumps the DB information for the elements into the standard output.
If returns information concerning the StatusType and Status attributes.
Usage:
dirac-rss-list-status
--element= Element family to be Synchronized ( Site, Resource or Node )
--elementType= ElementType narrows the search; None if default
--name= ElementName; None if default
--tokenOwner= Owner of the token; None if default
--statusType= StatusType; None if default
--status= Status; None if default
Verbosity:
-o LogLevel=LEVEL NOTICE by default, levels available: INFO, DEBUG, VERBOSE..
"""
from DIRAC import gLogger, exit as DIRACExit, version
from DIRAC.Core.Base import Script
from DIRAC.ResourceStatusSystem.Client import ResourceStatusClient
from DIRAC.Core.Utilities.PrettyPrint import printTable
__RCSID__ = '$Id:$'
subLogger = None
switchDict = {}
def registerSwitches():
'''
Registers all switches that can be used while calling the script from the
command line interface.
'''
switches = (
( 'element=', 'Element family to be Synchronized ( Site, Resource or Node )' ),
( 'elementType=', 'ElementType narrows the search; None if default' ),
( 'name=', 'ElementName; None if default' ),
( 'tokenOwner=', 'Owner of the token; None if default' ),
( 'statusType=', 'StatusType; None if default' ),
( 'status=', 'Status; None if default' ),
)
for switch in switches: |
def registerUsageMessage():
'''
Takes the script __doc__ and adds the DIRAC version to it
'''
hLine = ' ' + '='*78 + '\n'
usageMessage = hLine
usageMessage += ' DIRAC %s\n' % version
usageMessage += __doc__
usageMessage += '\n' + hLine
Script.setUsageMessage( usageMessage )
def parseSwitches():
'''
Parses the arguments passed by the user
'''
Script.parseCommandLine( ignoreErrors = True )
args = Script.getPositionalArgs()
if args:
subLogger.error( "Found the following positional args '%s', but we only accept switches" % args )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
switches = dict( Script.getUnprocessedSwitches() )
# Default values
switches.setdefault( 'elementType', None )
switches.setdefault( 'name', None )
switches.setdefault( 'tokenOwner', None )
switches.setdefault( 'statusType', None )
switches.setdefault( 'status', None )
if 'element' not in switches:
subLogger.error( "element Switch missing" )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
if not switches[ 'element' ] in ( 'Site', 'Resource', 'Node' ):
subLogger.error( "Found %s as element switch" % switches[ 'element' ] )
subLogger.error( "Please, check documentation below" )
Script.showHelp()
DIRACExit( 1 )
subLogger.debug( "The switches used are:" )
map( subLogger.debug, switches.iteritems() )
return switches
#...............................................................................
def getElements():
'''
Given the switches, gets a list of elements with their respective statustype
and status attributes.
'''
rssClient = ResourceStatusClient.ResourceStatusClient()
meta = { 'columns' : [] }
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
if switchDict[ key[0].lower() + key[1:] ] is None:
meta[ 'columns' ].append( key )
elements = rssClient.selectStatusElement(
switchDict[ 'element' ], 'Status',
name = switchDict[ 'name' ].split(',') if switchDict['name'] else None,
statusType = switchDict[ 'statusType' ].split(',') if switchDict['statusType'] else None,
status = switchDict[ 'status' ].split(',') if switchDict['status'] else None,
elementType = switchDict[ 'elementType' ].split(',') if switchDict['elementType'] else None,
tokenOwner = switchDict[ 'tokenOwner' ].split(',') if switchDict['tokenOwner'] else None,
meta = meta )
return elements
def tabularPrint( elementsList ):
'''
Prints the list of elements on a tabular
'''
subLogger.notice( '' )
subLogger.notice( 'Selection parameters:' )
subLogger.notice( ' %s: %s' % ( 'element'.ljust( 15 ), switchDict[ 'element' ] ) )
titles = []
for key in ( 'Name', 'StatusType', 'Status', 'ElementType', 'TokenOwner' ):
#Transforms from upper lower case to lower upper case
keyT = key[0].lower() + key[1:]
if switchDict[ keyT ] is None:
titles.append( key )
else:
subLogger.notice( ' %s: %s' % ( key.ljust( 15 ), switchDict[ keyT ] ) )
subLogger.notice( '' )
subLogger.notice( printTable( titles, elementsList, printOut = False,
numbering = False, columnSeparator = ' | ' ) )
#...............................................................................
def run():
'''
Main function of the script
'''
elements = getElements()
if not elements[ 'OK' ]:
subLogger.error( elements )
DIRACExit( 1 )
elements = elements[ 'Value' ]
tabularPrint( elements )
#...............................................................................
if __name__ == "__main__":
subLogger = gLogger.getSubLogger( __file__ )
#Script initialization
registerSwitches()
registerUsageMessage()
switchDict = parseSwitches()
#Run script
run()
#Bye
DIRACExit( 0 )
################################################################################
#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF#EOF | Script.registerSwitch( '', switch[ 0 ], switch[ 1 ] ) | random_line_split |
car.py | """
Created on 11.09.2014
@author: [email protected]
"""
from abc import ABCMeta, abstractmethod
import random
import datetime
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
"""Returns the nodes target ID
:rtype: int
"""
return self._current_position
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling station"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arrival at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning()
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
""" | return 0.08
class RefillWarning(Exception):
pass | random_line_split |
|
car.py | """
Created on 11.09.2014
@author: [email protected]
"""
from abc import ABCMeta, abstractmethod
import random
import datetime
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
"""Returns the nodes target ID
:rtype: int
"""
return self._current_position
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling station"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arrival at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning()
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.08
class | (Exception):
pass
| RefillWarning | identifier_name |
car.py | """
Created on 11.09.2014
@author: [email protected]
"""
from abc import ABCMeta, abstractmethod
import random
import datetime
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
|
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling station"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arrival at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning()
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.08
class RefillWarning(Exception):
pass
| """Returns the nodes target ID
:rtype: int
"""
return self._current_position | identifier_body |
car.py | """
Created on 11.09.2014
@author: [email protected]
"""
from abc import ABCMeta, abstractmethod
import random
import datetime
class BaseCar(metaclass=ABCMeta):
"""
Represents the fundamentals of a car
"""
def __init__(self, env, tank_size):
"""
Constructor
:type tank_size: int
:type env: simulation.environment.SimulationEnvironment
"""
env.car = self
self.env = env
self._tankSize = float(tank_size)
self._tankFilling = BaseCar._random_tank_filling(self._tankSize)
self._current_position = None
self._fuel_type = 'e5'
self._driven_distance = float(0)
# self.log = logging.getLogger('spritsim.Car' + commuter_id)
@staticmethod
def _random_tank_filling(maximum):
"""
Returns a random tank filling in litre
Method for initializing a cars with a random tank filling between 10 and maximum litres
:param maximum: maximum tank capacity
:return: A random filling
:rtype: float
"""
return random.uniform(10, maximum)
@property
def current_position(self):
"""Returns the nodes target ID
:rtype: int
"""
return self._current_position
@property
def driven_distance(self):
"""
The car's odometer
:return: The total distance the car has traveled
:rtype: float
"""
return self._driven_distance
@property
def fuel_type(self):
"""
The car's fuel type
:return: Type of fuel (e5|diesel)
:rtype: str
"""
return self._fuel_type
@property
def tank_size(self):
"""
:return: Size of the car's tank in litre
:rtype: float
"""
return self._tankSize
@property
def current_filling(self):
"""
:return: Current filling of the car's tank
:rtype: float
"""
return self._tankFilling
def consume_fuel(self, speed, distance, road_type):
"""
:param int speed: Maximum allowed speed
:param float distance: Length of the segment
:param simulation.routing.route.RouteClazz road_type: The type of the road
:return:
"""
self._tankFilling -= self.consumption_per_km * distance
@property
@abstractmethod
def consumption_per_km(self):
"""
:return: The fuel consumption of the car per km
:rtype: float
"""
pass
@property
def km_left(self):
"""
Returns the remaining km the car can drive
:return: Distance car is able to drive
:rtype: float
"""
return self.current_filling / self.consumption_per_km
def refilled(self):
"""Car has been refilled at a filling station"""
self._tankFilling = self._tankSize
def drive(self, ignore_refill_warning=False):
"""Lets the car drive the given route
On arrival at the destination the a CommuterAction for the route is returned or if the car needs refilling
the action to search for a refilling station is returned.
:param ignore_refill_warning: Tells the function not to raise a RefillWarning (default: False)
:type ignore_refill_warning: bool
:raises RefillWarning: If the tank filling is less or equal 5.0 liter
"""
for segment in self.env.route:
|
def _do_driving(self, segment):
"""
Drives the given route segment
Uses the segment data to simulate the driving of the car. Thereby fuel is consumed to the amount calculated
by the consume_fuel method.
:param segment: a single fragment of the route
:type segment: simulation.routing.route.RouteFragment
"""
self.consume_fuel(segment.speed_limit, segment.length, segment.road_type)
self._driven_distance += segment.length
self._current_position = segment.target
class PetrolCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'e5'
@property
def consumption_per_km(self):
"""
Consumes standard of 10 Liter per 100km, an equivalent of 0.1 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.1
class DieselCar(BaseCar):
def __init__(self, env):
super().__init__(env, 50)
self._fuel_type = 'diesel'
@property
def consumption_per_km(self):
"""
Consumes standard of 8 litre per 100km, an equivalent of 0.08 L/km
:return: fuel consumption per 1 km in liter
:rtype: float
"""
return 0.08
class RefillWarning(Exception):
pass
| self._do_driving(segment)
self.env.consume_time(datetime.timedelta(seconds=segment.travel_time))
# check if driving the segment has
if self._tankFilling <= 5.0 and not ignore_refill_warning:
raise RefillWarning() | conditional_block |
test_settings.py | # encoding: utf-8
import logging
# emplacement ou charger les fichier de configuration par instances
INSTANCES_DIR = '/etc/jormungandr.d'
# Start the thread at startup, True in production, False for test environments
START_MONITORING_THREAD = False
# chaine de connnection à postgresql pour la base jormungandr
SQLALCHEMY_DATABASE_URI = 'postgresql://navitia:navitia@localhost/jormun_test'
# désactivation de l'authentification
PUBLIC = True
REDIS_HOST = 'localhost'
REDIS_PORT = 6379
# indice de la base de données redis utilisé, entier de 0 à 15 par défaut |
REDIS_PASSWORD = None
# Desactive l'utilisation du cache, et donc de redis
CACHE_DISABLED = False
# durée de vie des info d'authentification dans le cache en secondes
AUTH_CACHE_TTL = 300
ERROR_HANDLER_FILE = 'jormungandr.log'
ERROR_HANDLER_TYPE = 'rotating' # can be timedrotating
ERROR_HANDLER_PARAMS = {'maxBytes': 20000000, 'backupCount': 5}
LOG_LEVEL = logging.DEBUG | REDIS_DB = 0 | random_line_split |
lib.rs | #![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", allow(used_underscore_binding))]
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
extern crate aster;
extern crate quasi;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
#[macro_use]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
#[macro_use]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
#[cfg(not(feature = "with-syntex"))]
use syntax::feature_gate::AttributeType;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in");
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) {
use syntax::{ast, fold};
/// Strip the serde attributes from the crate.
#[cfg(feature = "with-syntex")]
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
/// Helper folder that strips the serde attributes after the extensions have been expanded.
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaItemKind::List(ref n, _) if n == &"serde" => { return None; }
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
} | reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_Serialize", ser::expand_derive_serialize);
reg.add_decorator("derive_Deserialize", de::expand_derive_deserialize);
reg.add_post_expansion_pass(strip_attributes);
}
#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Serialize"),
syntax::ext::base::MultiDecorator(
Box::new(ser::expand_derive_serialize)));
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Deserialize"),
syntax::ext::base::MultiDecorator(
Box::new(de::expand_derive_deserialize)));
reg.register_attribute("serde".to_owned(), AttributeType::Normal);
} |
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
reg.add_attr("feature(custom_derive)"); | random_line_split |
lib.rs | #![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", allow(used_underscore_binding))]
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
extern crate aster;
extern crate quasi;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
#[macro_use]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
#[macro_use]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
#[cfg(not(feature = "with-syntex"))]
use syntax::feature_gate::AttributeType;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in");
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) | fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_Serialize", ser::expand_derive_serialize);
reg.add_decorator("derive_Deserialize", de::expand_derive_deserialize);
reg.add_post_expansion_pass(strip_attributes);
}
#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Serialize"),
syntax::ext::base::MultiDecorator(
Box::new(ser::expand_derive_serialize)));
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Deserialize"),
syntax::ext::base::MultiDecorator(
Box::new(de::expand_derive_deserialize)));
reg.register_attribute("serde".to_owned(), AttributeType::Normal);
}
| {
use syntax::{ast, fold};
/// Strip the serde attributes from the crate.
#[cfg(feature = "with-syntex")]
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
/// Helper folder that strips the serde attributes after the extensions have been expanded.
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaItemKind::List(ref n, _) if n == &"serde" => { return None; }
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac { | identifier_body |
lib.rs | #![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", allow(used_underscore_binding))]
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
extern crate aster;
extern crate quasi;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
#[macro_use]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
#[macro_use]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
#[cfg(not(feature = "with-syntex"))]
use syntax::feature_gate::AttributeType;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in");
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) {
use syntax::{ast, fold};
/// Strip the serde attributes from the crate.
#[cfg(feature = "with-syntex")]
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
/// Helper folder that strips the serde attributes after the extensions have been expanded.
struct StripAttributeFolder;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaItemKind::List(ref n, _) if n == &"serde" => |
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_Serialize", ser::expand_derive_serialize);
reg.add_decorator("derive_Deserialize", de::expand_derive_deserialize);
reg.add_post_expansion_pass(strip_attributes);
}
#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Serialize"),
syntax::ext::base::MultiDecorator(
Box::new(ser::expand_derive_serialize)));
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Deserialize"),
syntax::ext::base::MultiDecorator(
Box::new(de::expand_derive_deserialize)));
reg.register_attribute("serde".to_owned(), AttributeType::Normal);
}
| { return None; } | conditional_block |
lib.rs | #![cfg_attr(feature = "nightly-testing", plugin(clippy))]
#![cfg_attr(feature = "nightly-testing", feature(plugin))]
#![cfg_attr(feature = "nightly-testing", allow(used_underscore_binding))]
#![cfg_attr(not(feature = "with-syntex"), feature(rustc_private, plugin))]
#![cfg_attr(not(feature = "with-syntex"), plugin(quasi_macros))]
extern crate aster;
extern crate quasi;
#[cfg(feature = "with-syntex")]
extern crate syntex;
#[cfg(feature = "with-syntex")]
#[macro_use]
extern crate syntex_syntax as syntax;
#[cfg(not(feature = "with-syntex"))]
#[macro_use]
extern crate syntax;
#[cfg(not(feature = "with-syntex"))]
extern crate rustc_plugin;
#[cfg(not(feature = "with-syntex"))]
use syntax::feature_gate::AttributeType;
#[cfg(feature = "with-syntex")]
include!(concat!(env!("OUT_DIR"), "/lib.rs"));
#[cfg(not(feature = "with-syntex"))]
include!("lib.rs.in");
#[cfg(feature = "with-syntex")]
pub fn register(reg: &mut syntex::Registry) {
use syntax::{ast, fold};
/// Strip the serde attributes from the crate.
#[cfg(feature = "with-syntex")]
fn strip_attributes(krate: ast::Crate) -> ast::Crate {
/// Helper folder that strips the serde attributes after the extensions have been expanded.
struct | ;
impl fold::Folder for StripAttributeFolder {
fn fold_attribute(&mut self, attr: ast::Attribute) -> Option<ast::Attribute> {
match attr.node.value.node {
ast::MetaItemKind::List(ref n, _) if n == &"serde" => { return None; }
_ => {}
}
Some(attr)
}
fn fold_mac(&mut self, mac: ast::Mac) -> ast::Mac {
fold::noop_fold_mac(mac, self)
}
}
fold::Folder::fold_crate(&mut StripAttributeFolder, krate)
}
reg.add_attr("feature(custom_derive)");
reg.add_attr("feature(custom_attribute)");
reg.add_decorator("derive_Serialize", ser::expand_derive_serialize);
reg.add_decorator("derive_Deserialize", de::expand_derive_deserialize);
reg.add_post_expansion_pass(strip_attributes);
}
#[cfg(not(feature = "with-syntex"))]
pub fn register(reg: &mut rustc_plugin::Registry) {
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Serialize"),
syntax::ext::base::MultiDecorator(
Box::new(ser::expand_derive_serialize)));
reg.register_syntax_extension(
syntax::parse::token::intern("derive_Deserialize"),
syntax::ext::base::MultiDecorator(
Box::new(de::expand_derive_deserialize)));
reg.register_attribute("serde".to_owned(), AttributeType::Normal);
}
| StripAttributeFolder | identifier_name |
graph.py | v.component_id = None
else:
v.repl_info.cost = 0
v.root = v
v.component_id = v
v.repl_info.interval = 0
v.repl_info.options = 0xFFFFFFFF
# repl_info.schedule == None means "always".
v.repl_info.schedule = None
v.repl_info.duration = 84 * 8
v.demoted = False
def dijkstra(graph, edge_type, include_black):
"""Perform Dijkstra's algorithm on an intersite graph.
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: None
"""
queue = setup_dijkstra(graph, edge_type, include_black)
while len(queue) > 0:
cost, guid, vertex = heapq.heappop(queue)
for edge in vertex.edges:
for v in edge.vertices:
if v is not vertex:
# add new path from vertex to v
try_new_path(graph, queue, vertex, edge, v)
def setup_dijkstra(graph, edge_type, include_black):
"""Create a vertex queue for Dijksta's algorithm.
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: A heap queue of vertices
"""
queue = []
setup_vertices(graph)
for vertex in graph.vertices:
if vertex.is_white():
continue
if (((vertex.is_black() and not include_black)
or edge_type not in vertex.accept_black
or edge_type not in vertex.accept_red_red)):
vertex.repl_info.cost = MAX_DWORD
vertex.root = None # NULL GUID
vertex.demoted = True # Demoted appears not to be used
else:
heapq.heappush(queue, (vertex.repl_info.cost, vertex.guid, vertex))
return queue
def try_new_path(graph, queue, vfrom, edge, vto):
"""Helper function for Dijksta's algorithm.
:param graph: an IntersiteGraph object
:param queue: the empty queue to initialise.
:param vfrom: Vertex we are coming from
:param edge: an edge to try
:param vto: the other Vertex
:return: None
"""
new_repl_info = combine_repl_info(vfrom.repl_info, edge.repl_info)
# Cheaper or longer schedule goes in the heap
if (new_repl_info.cost < vto.repl_info.cost or
new_repl_info.duration > vto.repl_info.duration):
vto.root = vfrom.root
vto.component_id = vfrom.component_id
vto.repl_info = new_repl_info
heapq.heappush(queue, (vto.repl_info.cost, vto.guid, vto))
def check_demote_vertex(vertex, edge_type):
"""Demote non-white vertices that accept only white edges
This makes them seem temporarily like white vertices.
:param vertex: a Vertex()
:param edge_type: a transport type GUID
:return: None
"""
if vertex.is_white():
return
# Accepts neither red-red nor black edges, demote
if ((edge_type not in vertex.accept_black and
edge_type not in vertex.accept_red_red)):
vertex.repl_info.cost = MAX_DWORD
vertex.root = None
vertex.demoted = True # Demoted appears not to be used
def undemote_vertex(vertex):
"""Un-demote non-white vertices
Set a vertex's to an undemoted state.
:param vertex: a Vertex()
:return: None
"""
if vertex.is_white():
return
vertex.repl_info.cost = 0
vertex.root = vertex
vertex.demoted = False
def process_edge_set(graph, e_set, internal_edges):
"""Find internal edges to pass to Kruskal's algorithm
:param graph: an IntersiteGraph object
:param e_set: an edge set
:param internal_edges: a set that internal edges get added to
:return: None
"""
if e_set is None:
for edge in graph.edges:
for vertex in edge.vertices:
check_demote_vertex(vertex, edge.con_type)
process_edge(graph, edge, internal_edges)
for vertex in edge.vertices:
undemote_vertex(vertex)
else:
for edge in e_set.edges:
process_edge(graph, edge, internal_edges)
def process_edge(graph, examine, internal_edges):
"""Find the set of all vertices touching an edge to examine
:param graph: an IntersiteGraph object
:param examine: an edge
:param internal_edges: a set that internal edges get added to
:return: None
"""
vertices = []
for v in examine.vertices:
# Append a 4-tuple of color, repl cost, guid and vertex
vertices.append((v.color, v.repl_info.cost, v.ndrpacked_guid, v))
# Sort by color, lower
DEBUG("vertices is %s" % vertices)
vertices.sort()
color, cost, guid, bestv = vertices[0]
# Add to internal edges an edge from every colored vertex to bestV
for v in examine.vertices:
if v.component_id is None or v.root is None:
continue
# Only add edge if valid inter-tree edge - needs a root and
# different components
if ((bestv.component_id is not None and
bestv.root is not None and
v.component_id is not None and
v.root is not None and
bestv.component_id != v.component_id)):
add_int_edge(graph, internal_edges, examine, bestv, v)
def add_int_edge(graph, internal_edges, examine, v1, v2):
"""Add edges between compatible red and black vertices
Internal edges form the core of the tree -- white and RODC
vertices attach to it as leaf nodes. An edge needs to have black
or red endpoints with compatible replication schedules to be
accepted as an internal edge.
Here we examine an edge and add it to the set of internal edges if
it looks good.
:param graph: the graph object.
:param internal_edges: a set of internal edges
:param examine: an edge to examine for suitability.
:param v1: a Vertex
:param v2: the other Vertex
"""
root1 = v1.root
root2 = v2.root
red_red = root1.is_red() and root2.is_red()
if red_red:
if (examine.con_type not in root1.accept_red_red
or examine.con_type not in root2.accept_red_red):
return
elif (examine.con_type not in root1.accept_black
or examine.con_type not in root2.accept_black):
return
# Create the transitive replInfo for the two trees and this edge
ri = combine_repl_info(v1.repl_info, v2.repl_info)
if ri.duration == 0:
return
ri2 = combine_repl_info(ri, examine.repl_info)
if ri2.duration == 0:
return
# Order by vertex guid
if root1.ndrpacked_guid > root2.ndrpacked_guid:
root1, root2 = root2, root1
newIntEdge = InternalEdge(root1, root2, red_red, ri2, examine.con_type,
examine.site_link)
internal_edges.add(newIntEdge)
def kruskal(graph, edges):
"""Perform Kruskal's algorithm using the given set of edges
The input edges are "internal edges" -- between red and black
nodes. The output edges are a minimal spanning tree.
:param graph: the graph object.
:param edges: a set of edges
:return: a tuple of a list of edges, and the number of components
"""
for v in graph.vertices:
v.edges = []
components = set([x for x in graph.vertices if not x.is_white()])
edges = list(edges)
# Sorted based on internal comparison function of internal edge
edges.sort()
#XXX expected_num_tree_edges is never used
expected_num_tree_edges = 0 # TODO this value makes little sense
count_edges = 0
output_edges = []
index = 0
while index < len(edges): # TODO and num_components > 1
e = edges[index]
parent1 = find_component(e.v1)
parent2 = find_component(e.v2)
if parent1 is not parent2:
count_edges += 1
add_out_edge(graph, output_edges, e)
parent1.component_id = parent2
components.discard(parent1)
index += 1
return output_edges, len(components)
def find_component(vertex):
"""Kruskal helper to find the component a vertex belongs to.
:param vertex: a Vertex
:return: the Vertex object representing the component
"""
if vertex.component_id is vertex:
return vertex
current = vertex
while current.component_id is not current:
| current = current.component_id | conditional_block |
|
graph.py | the internal edges
output_edges, components = kruskal(graph, internal_edges)
# This recalculates the cost for the path connecting the
# closest red vertex. Ignoring types is fine because NO
# suboptimal edge should exist in the graph
dijkstra(graph, "EDGE_TYPE_ALL", False) # TODO rename
# Phase 3: Process the output
for v in graph.vertices:
if v.is_red():
v.dist_to_red = 0
else:
v.dist_to_red = v.repl_info.cost
if verify or dot_file_dir is not None:
graph_edges = [(e.v1.site.site_dnstr, e.v2.site.site_dnstr)
for e in internal_edges]
graph_nodes = [v.site.site_dnstr for v in graph.vertices]
verify_properties = ('multi_edge_forest',)
verify_and_dot('postkruskal', graph_edges, graph_nodes,
label=label, properties=verify_properties,
debug=DEBUG, verify=verify,
dot_file_dir=dot_file_dir)
# Ensure only one-way connections for partial-replicas,
# and make sure they point the right way.
edge_list = []
for edge in output_edges:
# We know these edges only have two endpoints because we made
# them.
v, w = edge.vertices
if v.site is my_site or w.site is my_site:
if (((v.is_black() or w.is_black()) and
v.dist_to_red != MAX_DWORD)):
edge.directed = True
if w.dist_to_red < v.dist_to_red:
edge.vertices[:] = w, v
edge_list.append(edge)
if verify or dot_file_dir is not None:
graph_edges = [[x.site.site_dnstr for x in e.vertices]
for e in edge_list]
#add the reverse edge if not directed.
graph_edges.extend([x.site.site_dnstr
for x in reversed(e.vertices)]
for e in edge_list if not e.directed)
graph_nodes = [x.site.site_dnstr for x in graph.vertices]
verify_properties = ()
verify_and_dot('post-one-way-partial', graph_edges, graph_nodes,
label=label, properties=verify_properties,
debug=DEBUG, verify=verify,
directed=True,
dot_file_dir=dot_file_dir)
# count the components
return edge_list, components
def create_edge(con_type, site_link, guid_to_vertex):
"""Set up a MultiEdge for the intersite graph
A MultiEdge can have multiple vertices.
From MS-ADTS 6.2.2.3.4.4
:param con_type: a transport type GUID
:param site_link: a kcc.kcc_utils.SiteLink object
:param guid_to_vertex: a mapping between GUIDs and vertices
:return: a MultiEdge
"""
e = MultiEdge()
e.site_link = site_link
e.vertices = []
for site_guid in site_link.site_list:
if str(site_guid) in guid_to_vertex:
e.vertices.extend(guid_to_vertex.get(str(site_guid)))
e.repl_info.cost = site_link.cost
e.repl_info.options = site_link.options
e.repl_info.interval = site_link.interval
e.repl_info.set_repltimes_from_schedule(site_link.schedule)
e.con_type = con_type
e.directed = False
return e
def create_auto_edge_set(graph, transport_guid):
"""Set up an automatic MultiEdgeSet for the intersite graph
From within MS-ADTS 6.2.2.3.4.4
:param graph: the intersite graph object
:param transport_guid: a transport type GUID
:return: a MultiEdgeSet
"""
e_set = MultiEdgeSet()
# use a NULL guid, not associated with a SiteLinkBridge object
e_set.guid = misc.GUID()
for site_link in graph.edges:
if site_link.con_type == transport_guid:
e_set.edges.append(site_link)
return e_set
def setup_vertices(graph):
"""Initialise vertices in the graph for the Dijkstra's run.
Part of MS-ADTS 6.2.2.3.4.4
The schedule and options are set to all-on, so that intersections
with real data defer to that data.
Refer to the convert_schedule_to_repltimes() docstring for an
explanation of the repltimes schedule values.
:param graph: an IntersiteGraph object
:return: None
"""
for v in graph.vertices:
if v.is_white():
v.repl_info.cost = MAX_DWORD
v.root = None
v.component_id = None
else:
v.repl_info.cost = 0
v.root = v
v.component_id = v
v.repl_info.interval = 0
v.repl_info.options = 0xFFFFFFFF
# repl_info.schedule == None means "always".
v.repl_info.schedule = None
v.repl_info.duration = 84 * 8
v.demoted = False
def dijkstra(graph, edge_type, include_black):
"""Perform Dijkstra's algorithm on an intersite graph.
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: None
"""
queue = setup_dijkstra(graph, edge_type, include_black)
while len(queue) > 0:
cost, guid, vertex = heapq.heappop(queue)
for edge in vertex.edges:
for v in edge.vertices:
if v is not vertex:
# add new path from vertex to v
try_new_path(graph, queue, vertex, edge, v)
def setup_dijkstra(graph, edge_type, include_black):
"""Create a vertex queue for Dijksta's algorithm.
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: A heap queue of vertices
"""
queue = []
setup_vertices(graph)
for vertex in graph.vertices:
if vertex.is_white():
continue
if (((vertex.is_black() and not include_black)
or edge_type not in vertex.accept_black
or edge_type not in vertex.accept_red_red)):
vertex.repl_info.cost = MAX_DWORD
vertex.root = None # NULL GUID
vertex.demoted = True # Demoted appears not to be used
else:
heapq.heappush(queue, (vertex.repl_info.cost, vertex.guid, vertex))
return queue
def try_new_path(graph, queue, vfrom, edge, vto):
"""Helper function for Dijksta's algorithm.
:param graph: an IntersiteGraph object
:param queue: the empty queue to initialise.
:param vfrom: Vertex we are coming from
:param edge: an edge to try
:param vto: the other Vertex
:return: None
"""
new_repl_info = combine_repl_info(vfrom.repl_info, edge.repl_info)
# Cheaper or longer schedule goes in the heap
if (new_repl_info.cost < vto.repl_info.cost or
new_repl_info.duration > vto.repl_info.duration):
vto.root = vfrom.root
vto.component_id = vfrom.component_id
vto.repl_info = new_repl_info
heapq.heappush(queue, (vto.repl_info.cost, vto.guid, vto))
def check_demote_vertex(vertex, edge_type):
"""Demote non-white vertices that accept only white edges
This makes them seem temporarily like white vertices.
:param vertex: a Vertex()
:param edge_type: a transport type GUID
:return: None
"""
if vertex.is_white():
return
# Accepts neither red-red nor black edges, demote
if ((edge_type not in vertex.accept_black and
edge_type not in vertex.accept_red_red)):
vertex.repl_info.cost = MAX_DWORD
vertex.root = None
vertex.demoted = True # Demoted appears not to be used
def undemote_vertex(vertex):
"""Un-demote non-white vertices
Set a vertex's to an undemoted state.
:param vertex: a Vertex()
:return: None
"""
if vertex.is_white():
return
vertex.repl_info.cost = 0
vertex.root = vertex
vertex.demoted = False
def process_edge_set(graph, e_set, internal_edges):
"""Find internal edges to pass to Kruskal's algorithm
:param graph: an IntersiteGraph object
:param e_set: an edge set
:param internal_edges: a set that internal edges get added to
:return: None
"""
if e_set is None:
for edge in graph.edges:
for vertex in edge.vertices:
check_demote_vertex(vertex, edge.con_type)
process_edge(graph, edge, internal_edges)
for vertex in edge.vertices:
undemote_vertex(vertex)
else:
for edge in e_set.edges:
process_edge(graph, edge, internal_edges)
def | process_edge | identifier_name |
|
graph.py | 1: Run Dijkstra's to get a list of internal edges, which are
# just the shortest-paths connecting colored vertices
internal_edges = set()
for e_set in graph.edge_set:
edgeType = None
for v in graph.vertices:
v.edges = []
# All con_type in an edge set is the same
for e in e_set.edges:
edgeType = e.con_type
for v in e.vertices:
v.edges.append(e)
if verify or dot_file_dir is not None:
graph_edges = [(a.site.site_dnstr, b.site.site_dnstr)
for a, b in
itertools.chain(
*(itertools.combinations(edge.vertices, 2)
for edge in e_set.edges))]
graph_nodes = [v.site.site_dnstr for v in graph.vertices]
if dot_file_dir is not None:
write_dot_file('edgeset_%s' % (edgeType,), graph_edges,
vertices=graph_nodes, label=label)
if verify:
verify_graph('spanning tree edge set %s' % edgeType,
graph_edges, vertices=graph_nodes,
properties=('complete', 'connected'),
debug=DEBUG)
# Run dijkstra's algorithm with just the red vertices as seeds
# Seed from the full replicas
dijkstra(graph, edgeType, False)
# Process edge set
process_edge_set(graph, e_set, internal_edges)
# Run dijkstra's algorithm with red and black vertices as the seeds
# Seed from both full and partial replicas
dijkstra(graph, edgeType, True)
# Process edge set
process_edge_set(graph, e_set, internal_edges)
# All vertices have root/component as itself
setup_vertices(graph)
process_edge_set(graph, None, internal_edges)
if verify or dot_file_dir is not None:
graph_edges = [(e.v1.site.site_dnstr, e.v2.site.site_dnstr)
for e in internal_edges]
graph_nodes = [v.site.site_dnstr for v in graph.vertices]
verify_properties = ('multi_edge_forest',)
verify_and_dot('prekruskal', graph_edges, graph_nodes, label=label,
properties=verify_properties, debug=DEBUG,
verify=verify, dot_file_dir=dot_file_dir)
# Phase 2: Run Kruskal's on the internal edges
output_edges, components = kruskal(graph, internal_edges)
# This recalculates the cost for the path connecting the
# closest red vertex. Ignoring types is fine because NO
# suboptimal edge should exist in the graph
dijkstra(graph, "EDGE_TYPE_ALL", False) # TODO rename
# Phase 3: Process the output
for v in graph.vertices:
if v.is_red():
v.dist_to_red = 0
else:
v.dist_to_red = v.repl_info.cost
if verify or dot_file_dir is not None:
graph_edges = [(e.v1.site.site_dnstr, e.v2.site.site_dnstr)
for e in internal_edges]
graph_nodes = [v.site.site_dnstr for v in graph.vertices]
verify_properties = ('multi_edge_forest',)
verify_and_dot('postkruskal', graph_edges, graph_nodes,
label=label, properties=verify_properties,
debug=DEBUG, verify=verify,
dot_file_dir=dot_file_dir)
# Ensure only one-way connections for partial-replicas,
# and make sure they point the right way.
edge_list = []
for edge in output_edges:
# We know these edges only have two endpoints because we made
# them.
v, w = edge.vertices
if v.site is my_site or w.site is my_site:
if (((v.is_black() or w.is_black()) and
v.dist_to_red != MAX_DWORD)):
edge.directed = True
if w.dist_to_red < v.dist_to_red:
edge.vertices[:] = w, v
edge_list.append(edge)
if verify or dot_file_dir is not None:
graph_edges = [[x.site.site_dnstr for x in e.vertices]
for e in edge_list]
#add the reverse edge if not directed.
graph_edges.extend([x.site.site_dnstr
for x in reversed(e.vertices)]
for e in edge_list if not e.directed)
graph_nodes = [x.site.site_dnstr for x in graph.vertices]
verify_properties = ()
verify_and_dot('post-one-way-partial', graph_edges, graph_nodes,
label=label, properties=verify_properties,
debug=DEBUG, verify=verify,
directed=True,
dot_file_dir=dot_file_dir)
# count the components
return edge_list, components
def create_edge(con_type, site_link, guid_to_vertex):
"""Set up a MultiEdge for the intersite graph
A MultiEdge can have multiple vertices.
From MS-ADTS 6.2.2.3.4.4
:param con_type: a transport type GUID
:param site_link: a kcc.kcc_utils.SiteLink object
:param guid_to_vertex: a mapping between GUIDs and vertices
:return: a MultiEdge
"""
e = MultiEdge()
e.site_link = site_link
e.vertices = []
for site_guid in site_link.site_list:
if str(site_guid) in guid_to_vertex:
e.vertices.extend(guid_to_vertex.get(str(site_guid)))
e.repl_info.cost = site_link.cost
e.repl_info.options = site_link.options
e.repl_info.interval = site_link.interval
e.repl_info.set_repltimes_from_schedule(site_link.schedule)
e.con_type = con_type
e.directed = False
return e
def create_auto_edge_set(graph, transport_guid):
"""Set up an automatic MultiEdgeSet for the intersite graph
From within MS-ADTS 6.2.2.3.4.4
:param graph: the intersite graph object
:param transport_guid: a transport type GUID
:return: a MultiEdgeSet
"""
e_set = MultiEdgeSet()
# use a NULL guid, not associated with a SiteLinkBridge object
e_set.guid = misc.GUID()
for site_link in graph.edges:
if site_link.con_type == transport_guid:
e_set.edges.append(site_link)
return e_set
def setup_vertices(graph):
"""Initialise vertices in the graph for the Dijkstra's run.
Part of MS-ADTS 6.2.2.3.4.4
The schedule and options are set to all-on, so that intersections
with real data defer to that data.
Refer to the convert_schedule_to_repltimes() docstring for an
explanation of the repltimes schedule values.
:param graph: an IntersiteGraph object
:return: None
"""
for v in graph.vertices:
if v.is_white():
v.repl_info.cost = MAX_DWORD
v.root = None
v.component_id = None
else:
v.repl_info.cost = 0
v.root = v
v.component_id = v
v.repl_info.interval = 0
v.repl_info.options = 0xFFFFFFFF
# repl_info.schedule == None means "always".
v.repl_info.schedule = None
v.repl_info.duration = 84 * 8
v.demoted = False
def dijkstra(graph, edge_type, include_black):
"""Perform Dijkstra's algorithm on an intersite graph.
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: None
"""
queue = setup_dijkstra(graph, edge_type, include_black)
while len(queue) > 0:
cost, guid, vertex = heapq.heappop(queue)
for edge in vertex.edges:
for v in edge.vertices:
if v is not vertex:
# add new path from vertex to v
try_new_path(graph, queue, vertex, edge, v) |
:param graph: an IntersiteGraph object
:param edge_type: a transport type GUID
:param include_black: boolean, whether to include black vertices
:return: A heap queue of vertices
"""
queue = []
setup_vertices(graph)
for vertex in graph.vertices:
if vertex.is_white():
continue
if (((vertex.is_black() and not include_black)
or edge_type not in vertex.accept_black
or edge_type not in vertex.accept_red_red)):
vertex.repl_info.cost = MAX_DWORD
vertex.root = None # NULL GUID
vertex.demoted = True # Demoted appears not to be used
else:
heapq.heappush(queue, (vertex.repl_info.cost, vertex.guid, vertex))
return queue
def try_new_path(graph, queue, vfrom, edge, vto):
"""Helper function for Dijksta's algorithm.
:param graph: an IntersiteGraph object
:param queue: the empty queue to initialise.
:param vfrom: Vertex we are coming from
|
def setup_dijkstra(graph, edge_type, include_black):
"""Create a vertex queue for Dijksta's algorithm. | random_line_split |
graph.py | get added to
:return: None
"""
vertices = []
for v in examine.vertices:
# Append a 4-tuple of color, repl cost, guid and vertex
vertices.append((v.color, v.repl_info.cost, v.ndrpacked_guid, v))
# Sort by color, lower
DEBUG("vertices is %s" % vertices)
vertices.sort()
color, cost, guid, bestv = vertices[0]
# Add to internal edges an edge from every colored vertex to bestV
for v in examine.vertices:
if v.component_id is None or v.root is None:
continue
# Only add edge if valid inter-tree edge - needs a root and
# different components
if ((bestv.component_id is not None and
bestv.root is not None and
v.component_id is not None and
v.root is not None and
bestv.component_id != v.component_id)):
add_int_edge(graph, internal_edges, examine, bestv, v)
def add_int_edge(graph, internal_edges, examine, v1, v2):
"""Add edges between compatible red and black vertices
Internal edges form the core of the tree -- white and RODC
vertices attach to it as leaf nodes. An edge needs to have black
or red endpoints with compatible replication schedules to be
accepted as an internal edge.
Here we examine an edge and add it to the set of internal edges if
it looks good.
:param graph: the graph object.
:param internal_edges: a set of internal edges
:param examine: an edge to examine for suitability.
:param v1: a Vertex
:param v2: the other Vertex
"""
root1 = v1.root
root2 = v2.root
red_red = root1.is_red() and root2.is_red()
if red_red:
if (examine.con_type not in root1.accept_red_red
or examine.con_type not in root2.accept_red_red):
return
elif (examine.con_type not in root1.accept_black
or examine.con_type not in root2.accept_black):
return
# Create the transitive replInfo for the two trees and this edge
ri = combine_repl_info(v1.repl_info, v2.repl_info)
if ri.duration == 0:
return
ri2 = combine_repl_info(ri, examine.repl_info)
if ri2.duration == 0:
return
# Order by vertex guid
if root1.ndrpacked_guid > root2.ndrpacked_guid:
root1, root2 = root2, root1
newIntEdge = InternalEdge(root1, root2, red_red, ri2, examine.con_type,
examine.site_link)
internal_edges.add(newIntEdge)
def kruskal(graph, edges):
"""Perform Kruskal's algorithm using the given set of edges
The input edges are "internal edges" -- between red and black
nodes. The output edges are a minimal spanning tree.
:param graph: the graph object.
:param edges: a set of edges
:return: a tuple of a list of edges, and the number of components
"""
for v in graph.vertices:
v.edges = []
components = set([x for x in graph.vertices if not x.is_white()])
edges = list(edges)
# Sorted based on internal comparison function of internal edge
edges.sort()
#XXX expected_num_tree_edges is never used
expected_num_tree_edges = 0 # TODO this value makes little sense
count_edges = 0
output_edges = []
index = 0
while index < len(edges): # TODO and num_components > 1
e = edges[index]
parent1 = find_component(e.v1)
parent2 = find_component(e.v2)
if parent1 is not parent2:
count_edges += 1
add_out_edge(graph, output_edges, e)
parent1.component_id = parent2
components.discard(parent1)
index += 1
return output_edges, len(components)
def find_component(vertex):
"""Kruskal helper to find the component a vertex belongs to.
:param vertex: a Vertex
:return: the Vertex object representing the component
"""
if vertex.component_id is vertex:
return vertex
current = vertex
while current.component_id is not current:
current = current.component_id
root = current
current = vertex
while current.component_id is not root:
n = current.component_id
current.component_id = root
current = n
return root
def add_out_edge(graph, output_edges, e):
"""Kruskal helper to add output edges
:param graph: the InterSiteGraph
:param output_edges: the list of spanning tree edges
:param e: the edge to be added
:return: None
"""
v1 = e.v1
v2 = e.v2
# This multi-edge is a 'real' undirected 2-vertex edge with no
# GUID. XXX It is not really the same thing at all as the
# multi-vertex edges relating to site-links. We shouldn't really
# be using the same class or storing them in the same list as the
# other ones. But we do. Historical reasons.
ee = MultiEdge()
ee.directed = False
ee.site_link = e.site_link
ee.vertices.append(v1)
ee.vertices.append(v2)
ee.con_type = e.e_type
ee.repl_info = e.repl_info
output_edges.append(ee)
v1.edges.append(ee)
v2.edges.append(ee)
def setup_graph(part, site_table, transport_guid, sitelink_table,
bridges_required):
"""Set up an IntersiteGraph based on intersite topology
The graph will have a Vertex for each site, a MultiEdge for each
siteLink object, and a MultiEdgeSet for each siteLinkBridge object
(or implied siteLinkBridge).
:param part: the partition we are dealing with
:param site_table: a mapping of guids to sites (KCC.site_table)
:param transport_guid: the GUID of the IP transport
:param sitelink_table: a mapping of dnstrs to sitelinks
:param bridges_required: boolean, asking in vain for something to do
with site link bridges
:return: a new IntersiteGraph
"""
guid_to_vertex = {}
# Create graph
g = IntersiteGraph()
# Add vertices
for site_guid, site in site_table.items():
vertex = Vertex(site, part)
vertex.guid = site_guid
vertex.ndrpacked_guid = ndr_pack(site.site_guid)
g.vertices.add(vertex)
guid_vertices = guid_to_vertex.setdefault(site_guid, [])
guid_vertices.append(vertex)
connected_vertices = set()
for site_link_dn, site_link in sitelink_table.items():
new_edge = create_edge(transport_guid, site_link,
guid_to_vertex)
connected_vertices.update(new_edge.vertices)
g.edges.add(new_edge)
# XXX we are ignoring the bridges_required option and indeed the
# whole concept of SiteLinkBridge objects.
if bridges_required:
WARN("Samba KCC ignores the bridges required option")
g.edge_set.add(create_auto_edge_set(g, transport_guid))
g.connected_vertices = connected_vertices
return g
class VertexColor(object):
"""Enumeration of vertex colours"""
(red, black, white, unknown) = range(0, 4)
class Vertex(object):
"""intersite graph representation of a Site.
There is a separate vertex for each partition.
:param site: the site to make a vertex of.
:param part: the partition.
"""
def __init__(self, site, part):
self.site = site
self.part = part
self.color = VertexColor.unknown
self.edges = []
self.accept_red_red = []
self.accept_black = []
self.repl_info = ReplInfo()
self.root = self
self.guid = None
self.component_id = self
self.demoted = False
self.options = 0
self.interval = 0
def color_vertex(self):
"""Color to indicate which kind of NC replica the vertex contains
"""
# IF s contains one or more DCs with full replicas of the
# NC cr!nCName
# SET v.Color to COLOR.RED
# ELSEIF s contains one or more partial replicas of the NC
# SET v.Color to COLOR.BLACK
#ELSE
# SET v.Color to COLOR.WHITE
# set to minimum (no replica)
self.color = VertexColor.white
for dnstr, dsa in self.site.dsa_table.items():
rep = dsa.get_current_replica(self.part.nc_dnstr)
if rep is None:
continue
# We have a full replica which is the largest
# value so exit
if not rep.is_partial():
self.color = VertexColor.red
break
else:
self.color = VertexColor.black
def is_red(self):
| assert(self.color != VertexColor.unknown)
return (self.color == VertexColor.red) | identifier_body |
|
test_cert_setup.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from keystone.common import openssl
from keystone import exception
from keystone import test
from keystone import token
import default_fixtures
ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SSLDIR = "%s/tests/ssl/" % ROOTDIR
CONF = test.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
def rootdir(*p):
return os.path.join(SSLDIR, *p)
CERTDIR = rootdir("certs")
KEYDIR = rootdir("private")
class CertSetupTestCase(test.TestCase):
def setUp(self):
super(CertSetupTestCase, self).setUp()
CONF.signing.certfile = os.path.join(CERTDIR, 'signing_cert.pem')
CONF.signing.ca_certs = os.path.join(CERTDIR, "ca.pem")
CONF.signing.ca_key = os.path.join(CERTDIR, "cakey.pem")
CONF.signing.keyfile = os.path.join(KEYDIR, "signing_key.pem")
CONF.ssl.ca_certs = CONF.signing.ca_certs
CONF.ssl.ca_key = CONF.signing.ca_key
CONF.ssl.certfile = os.path.join(CERTDIR, 'keystone.pem')
CONF.ssl.keyfile = os.path.join(KEYDIR, 'keystonekey.pem')
self.load_backends()
self.load_fixtures(default_fixtures)
self.controller = token.controllers.Auth()
def test_can_handle_missing_certs(self):
self.opt_in_group('signing', token_format='PKI')
self.opt_in_group('signing', certfile='invalid')
user = {
'id': 'fake1',
'name': 'fake1',
'password': 'fake1',
'domain_id': DEFAULT_DOMAIN_ID
}
body_dict = {
'passwordCredentials': {
'userId': user['id'],
'password': user['password'],
},
}
self.identity_api.create_user(user['id'], user)
self.assertRaises(exception.UnexpectedError,
self.controller.authenticate,
{}, body_dict)
def | (self):
pki = openssl.ConfigurePKI(None, None)
pki.run()
self.assertTrue(os.path.exists(CONF.signing.certfile))
self.assertTrue(os.path.exists(CONF.signing.ca_certs))
self.assertTrue(os.path.exists(CONF.signing.keyfile))
def test_create_ssl_certs(self):
ssl = openssl.ConfigureSSL(None, None)
ssl.run()
self.assertTrue(os.path.exists(CONF.ssl.ca_certs))
self.assertTrue(os.path.exists(CONF.ssl.certfile))
self.assertTrue(os.path.exists(CONF.ssl.keyfile))
def tearDown(self):
try:
shutil.rmtree(rootdir(SSLDIR))
except OSError:
pass
super(CertSetupTestCase, self).tearDown()
| test_create_pki_certs | identifier_name |
test_cert_setup.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import os
import shutil
from keystone.common import openssl
from keystone import exception
from keystone import test
from keystone import token
import default_fixtures
ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SSLDIR = "%s/tests/ssl/" % ROOTDIR
CONF = test.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
def rootdir(*p):
return os.path.join(SSLDIR, *p)
CERTDIR = rootdir("certs")
KEYDIR = rootdir("private")
class CertSetupTestCase(test.TestCase):
def setUp(self):
super(CertSetupTestCase, self).setUp()
CONF.signing.certfile = os.path.join(CERTDIR, 'signing_cert.pem')
CONF.signing.ca_certs = os.path.join(CERTDIR, "ca.pem")
CONF.signing.ca_key = os.path.join(CERTDIR, "cakey.pem")
CONF.signing.keyfile = os.path.join(KEYDIR, "signing_key.pem")
CONF.ssl.ca_certs = CONF.signing.ca_certs
CONF.ssl.ca_key = CONF.signing.ca_key
CONF.ssl.certfile = os.path.join(CERTDIR, 'keystone.pem')
CONF.ssl.keyfile = os.path.join(KEYDIR, 'keystonekey.pem')
self.load_backends()
self.load_fixtures(default_fixtures)
self.controller = token.controllers.Auth()
def test_can_handle_missing_certs(self):
|
def test_create_pki_certs(self):
pki = openssl.ConfigurePKI(None, None)
pki.run()
self.assertTrue(os.path.exists(CONF.signing.certfile))
self.assertTrue(os.path.exists(CONF.signing.ca_certs))
self.assertTrue(os.path.exists(CONF.signing.keyfile))
def test_create_ssl_certs(self):
ssl = openssl.ConfigureSSL(None, None)
ssl.run()
self.assertTrue(os.path.exists(CONF.ssl.ca_certs))
self.assertTrue(os.path.exists(CONF.ssl.certfile))
self.assertTrue(os.path.exists(CONF.ssl.keyfile))
def tearDown(self):
try:
shutil.rmtree(rootdir(SSLDIR))
except OSError:
pass
super(CertSetupTestCase, self).tearDown()
| self.opt_in_group('signing', token_format='PKI')
self.opt_in_group('signing', certfile='invalid')
user = {
'id': 'fake1',
'name': 'fake1',
'password': 'fake1',
'domain_id': DEFAULT_DOMAIN_ID
}
body_dict = {
'passwordCredentials': {
'userId': user['id'],
'password': user['password'],
},
}
self.identity_api.create_user(user['id'], user)
self.assertRaises(exception.UnexpectedError,
self.controller.authenticate,
{}, body_dict) | identifier_body |
test_cert_setup.py | # vim: tabstop=4 shiftwidth=4 softtabstop=4
# Copyright 2012 OpenStack LLC
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or
# implied.
# See the License for the specific language governing permissions and
# limitations under the License. | import os
import shutil
from keystone.common import openssl
from keystone import exception
from keystone import test
from keystone import token
import default_fixtures
ROOTDIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
SSLDIR = "%s/tests/ssl/" % ROOTDIR
CONF = test.CONF
DEFAULT_DOMAIN_ID = CONF.identity.default_domain_id
def rootdir(*p):
return os.path.join(SSLDIR, *p)
CERTDIR = rootdir("certs")
KEYDIR = rootdir("private")
class CertSetupTestCase(test.TestCase):
def setUp(self):
super(CertSetupTestCase, self).setUp()
CONF.signing.certfile = os.path.join(CERTDIR, 'signing_cert.pem')
CONF.signing.ca_certs = os.path.join(CERTDIR, "ca.pem")
CONF.signing.ca_key = os.path.join(CERTDIR, "cakey.pem")
CONF.signing.keyfile = os.path.join(KEYDIR, "signing_key.pem")
CONF.ssl.ca_certs = CONF.signing.ca_certs
CONF.ssl.ca_key = CONF.signing.ca_key
CONF.ssl.certfile = os.path.join(CERTDIR, 'keystone.pem')
CONF.ssl.keyfile = os.path.join(KEYDIR, 'keystonekey.pem')
self.load_backends()
self.load_fixtures(default_fixtures)
self.controller = token.controllers.Auth()
def test_can_handle_missing_certs(self):
self.opt_in_group('signing', token_format='PKI')
self.opt_in_group('signing', certfile='invalid')
user = {
'id': 'fake1',
'name': 'fake1',
'password': 'fake1',
'domain_id': DEFAULT_DOMAIN_ID
}
body_dict = {
'passwordCredentials': {
'userId': user['id'],
'password': user['password'],
},
}
self.identity_api.create_user(user['id'], user)
self.assertRaises(exception.UnexpectedError,
self.controller.authenticate,
{}, body_dict)
def test_create_pki_certs(self):
pki = openssl.ConfigurePKI(None, None)
pki.run()
self.assertTrue(os.path.exists(CONF.signing.certfile))
self.assertTrue(os.path.exists(CONF.signing.ca_certs))
self.assertTrue(os.path.exists(CONF.signing.keyfile))
def test_create_ssl_certs(self):
ssl = openssl.ConfigureSSL(None, None)
ssl.run()
self.assertTrue(os.path.exists(CONF.ssl.ca_certs))
self.assertTrue(os.path.exists(CONF.ssl.certfile))
self.assertTrue(os.path.exists(CONF.ssl.keyfile))
def tearDown(self):
try:
shutil.rmtree(rootdir(SSLDIR))
except OSError:
pass
super(CertSetupTestCase, self).tearDown() | random_line_split |
|
farmer-wife.tsx | import React from 'react'
export default function FarmerWife() | } | {
return (
<symbol
id='maphubs-icon-farmer-wife'
viewBox='0 0 124.836 308.19'
preserveAspectRatio='xMidYMid meet'
>
<g>
<path d='M40.3,38.587c0.089-0.605,0.197-1.211,0.338-1.817C43.01,26.586,51.958,19.475,62.4,19.475 c1.705,0,3.417,0.197,5.089,0.586c9.255,2.155,15.823,9.805,17.071,18.696c21.771-1.595,37.216-5.59,37.216-10.269 c0-5.103-18.362-15.919-43.273-19.271C78.265,4.103,71.659,0,63.526,0c-8.133,0-14.739,4.103-14.978,9.217 C23.639,12.569,5.276,23.385,5.276,28.488C5.276,33.008,19.691,36.891,40.3,38.587z' />
<ellipse
transform='matrix(0.2268 -0.9739 0.9739 0.2268 7.511 93.1425)'
cx='62.418'
cy='41.841'
rx='20.362'
ry='20.362'
/>
<path d='M5.86,158.108c3.813,0.631,7.417-1.95,8.049-5.765c4.038-24.668,9.4-43.387,17.028-55.036 c2.391-3.671,4.949-6.65,7.754-9.043C20.199,128.981,20.087,213.5,20.087,213.5c0,4.608,8.522,7.752,20.155,9.436V237.5 c0,5,4.052,9.052,9.052,9.052c4.998,0,9.052-4.052,9.052-9.052v-13.151c2.708,0.06,5.438,0.06,8.147,0V237.5 c0,5,4.052,9.052,9.052,9.052c4.998,0,9.052-4.052,9.052-9.052v-14.564c11.63-1.685,20.152-4.828,20.152-9.436 c0,0-0.109-84.518-18.601-125.234c5.05,4.308,9.327,10.537,13.061,19.087c4.939,11.326,8.671,26.497,11.72,44.991 c0.567,3.428,3.534,5.859,6.897,5.86c0.38,0,0.764-0.031,1.151-0.095c3.815-0.631,6.396-4.233,5.765-8.048 c-4.242-25.417-9.59-45.622-19.101-60.374c-4.762-7.351-10.706-13.324-18.007-17.339c-7.292-4.029-15.777-5.999-25.216-5.989 c-9.437-0.01-17.924,1.96-25.216,5.989c-10.99,6.056-18.748,16.368-24.409,29.421c-5.69,13.107-9.535,29.221-12.697,48.292 C-0.536,153.875,2.045,157.478,5.86,158.108z' />
</g>
</symbol>
) | identifier_body |
farmer-wife.tsx | import React from 'react'
export default function | () {
return (
<symbol
id='maphubs-icon-farmer-wife'
viewBox='0 0 124.836 308.19'
preserveAspectRatio='xMidYMid meet'
>
<g>
<path d='M40.3,38.587c0.089-0.605,0.197-1.211,0.338-1.817C43.01,26.586,51.958,19.475,62.4,19.475 c1.705,0,3.417,0.197,5.089,0.586c9.255,2.155,15.823,9.805,17.071,18.696c21.771-1.595,37.216-5.59,37.216-10.269 c0-5.103-18.362-15.919-43.273-19.271C78.265,4.103,71.659,0,63.526,0c-8.133,0-14.739,4.103-14.978,9.217 C23.639,12.569,5.276,23.385,5.276,28.488C5.276,33.008,19.691,36.891,40.3,38.587z' />
<ellipse
transform='matrix(0.2268 -0.9739 0.9739 0.2268 7.511 93.1425)'
cx='62.418'
cy='41.841'
rx='20.362'
ry='20.362'
/>
<path d='M5.86,158.108c3.813,0.631,7.417-1.95,8.049-5.765c4.038-24.668,9.4-43.387,17.028-55.036 c2.391-3.671,4.949-6.65,7.754-9.043C20.199,128.981,20.087,213.5,20.087,213.5c0,4.608,8.522,7.752,20.155,9.436V237.5 c0,5,4.052,9.052,9.052,9.052c4.998,0,9.052-4.052,9.052-9.052v-13.151c2.708,0.06,5.438,0.06,8.147,0V237.5 c0,5,4.052,9.052,9.052,9.052c4.998,0,9.052-4.052,9.052-9.052v-14.564c11.63-1.685,20.152-4.828,20.152-9.436 c0,0-0.109-84.518-18.601-125.234c5.05,4.308,9.327,10.537,13.061,19.087c4.939,11.326,8.671,26.497,11.72,44.991 c0.567,3.428,3.534,5.859,6.897,5.86c0.38,0,0.764-0.031,1.151-0.095c3.815-0.631,6.396-4.233,5.765-8.048 c-4.242-25.417-9.59-45.622-19.101-60.374c-4.762-7.351-10.706-13.324-18.007-17.339c-7.292-4.029-15.777-5.999-25.216-5.989 c-9.437-0.01-17.924,1.96-25.216,5.989c-10.99,6.056-18.748,16.368-24.409,29.421c-5.69,13.107-9.535,29.221-12.697,48.292 C-0.536,153.875,2.045,157.478,5.86,158.108z' />
</g>
</symbol>
)
} | FarmerWife | identifier_name |
HeroCard.ts | import Card from "./Card";
export default class HeroCard extends Card {
public premium = false;
public bodyTextColor = "black";
public bodyTextCoords = {
dx: 143,
dy: 627,
dWidth: 376,
dHeight: 168,
sWidth: 376,
sHeight: 168,
};
public cardFoundationAsset = null;
public cardFoundationCoords = null;
public baseCardFrameAsset = "frame-hero-";
public baseCardFrameCoords = {
sWidth: 527,
sHeight: 795,
dx: 70,
dy: 87,
dWidth: 527,
dHeight: 795,
};
public baseRarityGemAsset = "rarity-";
public eliteDragonAsset = "elite-hero";
public eliteDragonCoords = {
dx: 172,
dy: 40,
dWidth: 444,
dHeight: 298,
sWidth: 444,
sHeight: 298,
};
public nameBannerAsset = "name-banner-hero";
public nameBannerCoords = {
sWidth: 490,
sHeight: 122,
dx: 91,
dy: 458,
dWidth: 490,
dHeight: 122,
};
public nameTextCurve = {
pathMiddle: 0.5,
maxWidth: 420,
curve: [{x: 24, y: 98}, {x: 170, y: 36}, {x: 294, y: 36}, {x: 438, y: 96}],
};
public rarityGemCoords = {dx: 311, dy: 529};
public artCoords = {
sWidth: 346,
sHeight: 346,
dx: 161,
dy: 137,
dWidth: 346,
dHeight: 346,
};
public artClipPolygon = [
{x: 334, y: 134},
{x: 369, y: 143},
{x: 406, y: 164},
{x: 435, y: 187},
{x: 453, y: 213},
{x: 469, y: 245},
{x: 479, y: 270},
{x: 481, y: 290},
{x: 483, y: 332},
{x: 483, y: 380},
{x: 483, y: 438},
{x: 484, y: 485},
{x: 435, y: 473},
{x: 389, y: 467},
{x: 346, y: 465},
{x: 297, y: 466},
{x: 240, y: 473},
{x: 185, y: 486},
{x: 184, y: 445},
{x: 182, y: 357},
{x: 184, y: 302},
{x: 188, y: 271},
{x: 198, y: 240},
{x: 210, y: 217},
{x: 222, y: 198},
{x: 239, y: 178},
{x: 262, y: 160},
{x: 291, y: 145},
];
public getHealthGemAsset() {
return this.cardDef.armor ? "armor" : "health";
}
public getHealthGemCoords() {
if (this.cardDef.armor) {
return {
sWidth: 115,
sHeight: 135,
dx: 498,
dy: 752,
dWidth: 115,
dHeight: 135,
};
} else {
return {
sWidth: 109,
sHeight: 164,
dx: 504,
dy: 728,
dWidth: 109,
dHeight: 164,
};
}
}
public getHealthTextCoords() {
if (this.cardDef.armor) {
return {dx: 554, dy: 822};
} else {
return {dx: 556, dy: 825};
}
}
public getWatermarkCoords() |
}
| {
return {
dx: 247,
dy: 625,
dWidth: 170,
dHeight: 170,
};
} | identifier_body |
HeroCard.ts | import Card from "./Card";
export default class HeroCard extends Card {
public premium = false;
public bodyTextColor = "black";
public bodyTextCoords = {
dx: 143,
dy: 627,
dWidth: 376,
dHeight: 168,
sWidth: 376,
sHeight: 168,
};
public cardFoundationAsset = null;
public cardFoundationCoords = null;
public baseCardFrameAsset = "frame-hero-";
public baseCardFrameCoords = {
sWidth: 527,
sHeight: 795,
dx: 70,
dy: 87,
dWidth: 527,
dHeight: 795,
};
public baseRarityGemAsset = "rarity-";
public eliteDragonAsset = "elite-hero";
public eliteDragonCoords = {
dx: 172,
dy: 40,
dWidth: 444,
dHeight: 298,
sWidth: 444,
sHeight: 298,
};
public nameBannerAsset = "name-banner-hero";
public nameBannerCoords = {
sWidth: 490,
sHeight: 122,
dx: 91,
dy: 458,
dWidth: 490,
dHeight: 122,
};
public nameTextCurve = {
pathMiddle: 0.5,
maxWidth: 420,
curve: [{x: 24, y: 98}, {x: 170, y: 36}, {x: 294, y: 36}, {x: 438, y: 96}],
};
public rarityGemCoords = {dx: 311, dy: 529};
public artCoords = {
sWidth: 346,
sHeight: 346,
dx: 161,
dy: 137,
dWidth: 346,
dHeight: 346,
};
public artClipPolygon = [
{x: 334, y: 134},
{x: 369, y: 143},
{x: 406, y: 164},
{x: 435, y: 187},
{x: 453, y: 213},
{x: 469, y: 245},
{x: 479, y: 270},
{x: 481, y: 290},
{x: 483, y: 332},
{x: 483, y: 380},
{x: 483, y: 438},
{x: 484, y: 485},
{x: 435, y: 473},
{x: 389, y: 467},
{x: 346, y: 465},
{x: 297, y: 466},
{x: 240, y: 473},
{x: 185, y: 486},
{x: 184, y: 445},
{x: 182, y: 357},
{x: 184, y: 302},
{x: 188, y: 271},
{x: 198, y: 240},
{x: 210, y: 217},
{x: 222, y: 198},
{x: 239, y: 178},
{x: 262, y: 160},
{x: 291, y: 145},
];
public getHealthGemAsset() {
return this.cardDef.armor ? "armor" : "health";
}
public getHealthGemCoords() {
if (this.cardDef.armor) {
return {
sWidth: 115,
sHeight: 135,
dx: 498,
dy: 752,
dWidth: 115,
dHeight: 135,
};
} else {
return {
sWidth: 109,
sHeight: 164,
dx: 504,
dy: 728,
dWidth: 109,
dHeight: 164,
};
}
}
public getHealthTextCoords() {
if (this.cardDef.armor) | else {
return {dx: 556, dy: 825};
}
}
public getWatermarkCoords() {
return {
dx: 247,
dy: 625,
dWidth: 170,
dHeight: 170,
};
}
}
| {
return {dx: 554, dy: 822};
} | conditional_block |
HeroCard.ts | import Card from "./Card";
export default class HeroCard extends Card {
public premium = false;
public bodyTextColor = "black";
public bodyTextCoords = {
dx: 143,
dy: 627,
dWidth: 376,
dHeight: 168,
sWidth: 376,
sHeight: 168,
};
public cardFoundationAsset = null;
public cardFoundationCoords = null;
public baseCardFrameAsset = "frame-hero-";
public baseCardFrameCoords = {
sWidth: 527,
sHeight: 795,
dx: 70,
dy: 87,
dWidth: 527,
dHeight: 795,
};
public baseRarityGemAsset = "rarity-";
public eliteDragonAsset = "elite-hero";
public eliteDragonCoords = {
dx: 172,
dy: 40,
dWidth: 444,
dHeight: 298,
sWidth: 444,
sHeight: 298,
};
public nameBannerAsset = "name-banner-hero";
public nameBannerCoords = {
sWidth: 490,
sHeight: 122,
dx: 91,
dy: 458,
dWidth: 490,
dHeight: 122,
};
public nameTextCurve = {
pathMiddle: 0.5,
maxWidth: 420,
curve: [{x: 24, y: 98}, {x: 170, y: 36}, {x: 294, y: 36}, {x: 438, y: 96}],
};
public rarityGemCoords = {dx: 311, dy: 529};
public artCoords = {
sWidth: 346,
sHeight: 346,
dx: 161,
dy: 137,
dWidth: 346,
dHeight: 346, | public artClipPolygon = [
{x: 334, y: 134},
{x: 369, y: 143},
{x: 406, y: 164},
{x: 435, y: 187},
{x: 453, y: 213},
{x: 469, y: 245},
{x: 479, y: 270},
{x: 481, y: 290},
{x: 483, y: 332},
{x: 483, y: 380},
{x: 483, y: 438},
{x: 484, y: 485},
{x: 435, y: 473},
{x: 389, y: 467},
{x: 346, y: 465},
{x: 297, y: 466},
{x: 240, y: 473},
{x: 185, y: 486},
{x: 184, y: 445},
{x: 182, y: 357},
{x: 184, y: 302},
{x: 188, y: 271},
{x: 198, y: 240},
{x: 210, y: 217},
{x: 222, y: 198},
{x: 239, y: 178},
{x: 262, y: 160},
{x: 291, y: 145},
];
public getHealthGemAsset() {
return this.cardDef.armor ? "armor" : "health";
}
public getHealthGemCoords() {
if (this.cardDef.armor) {
return {
sWidth: 115,
sHeight: 135,
dx: 498,
dy: 752,
dWidth: 115,
dHeight: 135,
};
} else {
return {
sWidth: 109,
sHeight: 164,
dx: 504,
dy: 728,
dWidth: 109,
dHeight: 164,
};
}
}
public getHealthTextCoords() {
if (this.cardDef.armor) {
return {dx: 554, dy: 822};
} else {
return {dx: 556, dy: 825};
}
}
public getWatermarkCoords() {
return {
dx: 247,
dy: 625,
dWidth: 170,
dHeight: 170,
};
}
} | }; | random_line_split |
HeroCard.ts | import Card from "./Card";
export default class | extends Card {
public premium = false;
public bodyTextColor = "black";
public bodyTextCoords = {
dx: 143,
dy: 627,
dWidth: 376,
dHeight: 168,
sWidth: 376,
sHeight: 168,
};
public cardFoundationAsset = null;
public cardFoundationCoords = null;
public baseCardFrameAsset = "frame-hero-";
public baseCardFrameCoords = {
sWidth: 527,
sHeight: 795,
dx: 70,
dy: 87,
dWidth: 527,
dHeight: 795,
};
public baseRarityGemAsset = "rarity-";
public eliteDragonAsset = "elite-hero";
public eliteDragonCoords = {
dx: 172,
dy: 40,
dWidth: 444,
dHeight: 298,
sWidth: 444,
sHeight: 298,
};
public nameBannerAsset = "name-banner-hero";
public nameBannerCoords = {
sWidth: 490,
sHeight: 122,
dx: 91,
dy: 458,
dWidth: 490,
dHeight: 122,
};
public nameTextCurve = {
pathMiddle: 0.5,
maxWidth: 420,
curve: [{x: 24, y: 98}, {x: 170, y: 36}, {x: 294, y: 36}, {x: 438, y: 96}],
};
public rarityGemCoords = {dx: 311, dy: 529};
public artCoords = {
sWidth: 346,
sHeight: 346,
dx: 161,
dy: 137,
dWidth: 346,
dHeight: 346,
};
public artClipPolygon = [
{x: 334, y: 134},
{x: 369, y: 143},
{x: 406, y: 164},
{x: 435, y: 187},
{x: 453, y: 213},
{x: 469, y: 245},
{x: 479, y: 270},
{x: 481, y: 290},
{x: 483, y: 332},
{x: 483, y: 380},
{x: 483, y: 438},
{x: 484, y: 485},
{x: 435, y: 473},
{x: 389, y: 467},
{x: 346, y: 465},
{x: 297, y: 466},
{x: 240, y: 473},
{x: 185, y: 486},
{x: 184, y: 445},
{x: 182, y: 357},
{x: 184, y: 302},
{x: 188, y: 271},
{x: 198, y: 240},
{x: 210, y: 217},
{x: 222, y: 198},
{x: 239, y: 178},
{x: 262, y: 160},
{x: 291, y: 145},
];
public getHealthGemAsset() {
return this.cardDef.armor ? "armor" : "health";
}
public getHealthGemCoords() {
if (this.cardDef.armor) {
return {
sWidth: 115,
sHeight: 135,
dx: 498,
dy: 752,
dWidth: 115,
dHeight: 135,
};
} else {
return {
sWidth: 109,
sHeight: 164,
dx: 504,
dy: 728,
dWidth: 109,
dHeight: 164,
};
}
}
public getHealthTextCoords() {
if (this.cardDef.armor) {
return {dx: 554, dy: 822};
} else {
return {dx: 556, dy: 825};
}
}
public getWatermarkCoords() {
return {
dx: 247,
dy: 625,
dWidth: 170,
dHeight: 170,
};
}
}
| HeroCard | identifier_name |
clients.rs | // Learn stuff about our users.
+ my name is *
- <set name=<formal>>Nice to meet you, <get name>.
- <set name=<formal>><get name>, nice to meet you.
+ my name is <bot master>
- <set name=<bot master>>That's my master's name too.
+ my name is <bot name>
- <set name=<bot name>>What a coincidence! That's my name too!
- <set name=<bot name>>That's my name too!
+ call me *
- <set name=<formal>><get name>, I will call you that from now on.
+ i am * years old
- <set age=<star>>A lot of people are <get age>, you're not alone.
- <set age=<star>>Cool, I'm <bot age> myself.{weight=49}
+ i am a (@malenoun)
- <set sex=male>Alright, you're a <star>.
+ i am a (@femalenoun)
- <set sex=female>Alright, you're female.
+ i (am from|live in) *
- <set location=<formal>>I've spoken to people from <get location> before.
|
+ i have a girlfriend
- <set status=girlfriend>What's her name?
+ i have a boyfriend
- <set status=boyfriend>What's his name?
+ *
% whats her name
- <set spouse=<formal>>That's a pretty name.
+ *
% whats his name
- <set spouse=<formal>>That's a cool name.
+ my (girlfriend|boyfriend)* name is *
- <set spouse=<formal>>That's a nice name.
+ (what is my name|who am i|do you know my name|do you know who i am){weight=10}
- Your name is <get name>.
- You told me your name is <get name>.
- Aren't you <get name>?
+ (how old am i|do you know how old i am|do you know my age){weight=10}
- You are <get age> years old.
- You're <get age>.
+ am i a (@malenoun) or a (@femalenoun){weight=10}
- You're a <get sex>.
+ am i (@malenoun) or (@femalenoun){weight=10}
- You're a <get sex>.
+ what is my favorite *{weight=10}
- Your favorite <star> is <get fav<star>>
+ who is my (boyfriend|girlfriend|spouse){weight=10}
- <get spouse> | + my favorite * is *
- <set fav<star1>=<star2>>Why is it your favorite?
+ i am single
- <set status=single><set spouse=nobody>I am too. | random_line_split |
main.py | from __future__ import unicode_literals
import json
import os
from devpi_common.request import new_requests_session
from devpi_slack import __version__
def devpiserver_indexconfig_defaults():
return {"slack_icon": None, "slack_hook": None, "slack_user": None}
def devpiserver_on_upload_sync(log, application_url, stage, project, version):
slack_hook = stage.ixconfig.get("slack_hook") or os.getenv("SLACK_HOOK")
slack_icon = stage.ixconfig.get("slack_icon") or os.getenv(
"SLACK_ICON", "http://doc.devpi.net/latest/_static/devpicat.jpg")
slack_user = stage.ixconfig.get(
"slack_user") or os.getenv("SLACK_USER", "devpi")
if not slack_hook:
return
session = new_requests_session(agent=("devpi-slack", __version__))
try:
r = session.post(
slack_hook,
data={
'payload': json.dumps({
"text": "Uploaded {}=={} to {}".format(
project,
version,
application_url
),
"icon_url": slack_icon,
"username": slack_user,
})
})
except session.Errors:
raise RuntimeError("%s: failed to send Slack notification %s",
project, slack_hook)
if 200 <= r.status_code < 300:
log.info("successfully sent Slack notification: %s", slack_hook)
else:
log.error("%s: failed to send Slack notification: %s", r.status_code,
slack_hook)
log.debug(r.content.decode('utf-8'))
raise RuntimeError("%s: failed to send Slack notification: %s",
project, slack_hook) | # -*- coding: utf-8 -*- | random_line_split |
|
main.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import os
from devpi_common.request import new_requests_session
from devpi_slack import __version__
def devpiserver_indexconfig_defaults():
return {"slack_icon": None, "slack_hook": None, "slack_user": None}
def devpiserver_on_upload_sync(log, application_url, stage, project, version):
slack_hook = stage.ixconfig.get("slack_hook") or os.getenv("SLACK_HOOK")
slack_icon = stage.ixconfig.get("slack_icon") or os.getenv(
"SLACK_ICON", "http://doc.devpi.net/latest/_static/devpicat.jpg")
slack_user = stage.ixconfig.get(
"slack_user") or os.getenv("SLACK_USER", "devpi")
if not slack_hook:
|
session = new_requests_session(agent=("devpi-slack", __version__))
try:
r = session.post(
slack_hook,
data={
'payload': json.dumps({
"text": "Uploaded {}=={} to {}".format(
project,
version,
application_url
),
"icon_url": slack_icon,
"username": slack_user,
})
})
except session.Errors:
raise RuntimeError("%s: failed to send Slack notification %s",
project, slack_hook)
if 200 <= r.status_code < 300:
log.info("successfully sent Slack notification: %s", slack_hook)
else:
log.error("%s: failed to send Slack notification: %s", r.status_code,
slack_hook)
log.debug(r.content.decode('utf-8'))
raise RuntimeError("%s: failed to send Slack notification: %s",
project, slack_hook)
| return | conditional_block |
main.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import os
from devpi_common.request import new_requests_session
from devpi_slack import __version__
def devpiserver_indexconfig_defaults():
|
def devpiserver_on_upload_sync(log, application_url, stage, project, version):
slack_hook = stage.ixconfig.get("slack_hook") or os.getenv("SLACK_HOOK")
slack_icon = stage.ixconfig.get("slack_icon") or os.getenv(
"SLACK_ICON", "http://doc.devpi.net/latest/_static/devpicat.jpg")
slack_user = stage.ixconfig.get(
"slack_user") or os.getenv("SLACK_USER", "devpi")
if not slack_hook:
return
session = new_requests_session(agent=("devpi-slack", __version__))
try:
r = session.post(
slack_hook,
data={
'payload': json.dumps({
"text": "Uploaded {}=={} to {}".format(
project,
version,
application_url
),
"icon_url": slack_icon,
"username": slack_user,
})
})
except session.Errors:
raise RuntimeError("%s: failed to send Slack notification %s",
project, slack_hook)
if 200 <= r.status_code < 300:
log.info("successfully sent Slack notification: %s", slack_hook)
else:
log.error("%s: failed to send Slack notification: %s", r.status_code,
slack_hook)
log.debug(r.content.decode('utf-8'))
raise RuntimeError("%s: failed to send Slack notification: %s",
project, slack_hook)
| return {"slack_icon": None, "slack_hook": None, "slack_user": None} | identifier_body |
main.py | # -*- coding: utf-8 -*-
from __future__ import unicode_literals
import json
import os
from devpi_common.request import new_requests_session
from devpi_slack import __version__
def devpiserver_indexconfig_defaults():
return {"slack_icon": None, "slack_hook": None, "slack_user": None}
def | (log, application_url, stage, project, version):
slack_hook = stage.ixconfig.get("slack_hook") or os.getenv("SLACK_HOOK")
slack_icon = stage.ixconfig.get("slack_icon") or os.getenv(
"SLACK_ICON", "http://doc.devpi.net/latest/_static/devpicat.jpg")
slack_user = stage.ixconfig.get(
"slack_user") or os.getenv("SLACK_USER", "devpi")
if not slack_hook:
return
session = new_requests_session(agent=("devpi-slack", __version__))
try:
r = session.post(
slack_hook,
data={
'payload': json.dumps({
"text": "Uploaded {}=={} to {}".format(
project,
version,
application_url
),
"icon_url": slack_icon,
"username": slack_user,
})
})
except session.Errors:
raise RuntimeError("%s: failed to send Slack notification %s",
project, slack_hook)
if 200 <= r.status_code < 300:
log.info("successfully sent Slack notification: %s", slack_hook)
else:
log.error("%s: failed to send Slack notification: %s", r.status_code,
slack_hook)
log.debug(r.content.decode('utf-8'))
raise RuntimeError("%s: failed to send Slack notification: %s",
project, slack_hook)
| devpiserver_on_upload_sync | identifier_name |
setup.py | from setuptools import setup, find_packages
setup(
name = 'django-dzenlog',
version = __import__('django_dzenlog').__version__,
description = '''Django Dzenlog is a set of models and templates, which can be '''
'''used to create blogs with different kinds media.''',
long_description = '''
Django Dzenlog is a set of models and templates, which can be
used to create blogs with different kinds media.
Dzenlog relies on new django's feature -- model inheritance,
so you can derive you own models from dzenlog's models and
add an actual information.
This is very effective way of the code reuse, because dzenlog
will take care about all publishing options, and all what you
need is to describe details, specific to you particular blog. | For example, for can create a blog with two post types: textual
posts and links to internet resources. In that case, all you need
is to define two models: `TextPost` and `LinkPost`. Each of these
models should be derived from `django_dzenlog.models.GeneralPost`.
Features
========
* Simple way to add new types of posts.
* All post types can be agregated in one feed.
* Separate feed for each post type.
* Example projects, which uses most features of this application.
* Tagging support.
''',
keywords = 'django apps blogging',
license = 'New BSD License',
author = 'Alexander Artemenko',
author_email = '[email protected]',
url = 'http://github.com/svetlyak40wt/django-dzenlog/',
install_requires = [],
extras_require = {
'tagging': ['tagging>=0.3-pre'],
},
dependency_links = ['http://pypi.aartemenko.com', ],
classifiers=[
'Development Status :: 3 - Alpha',
'Environment :: Plugins',
'Framework :: Django',
'Intended Audience :: Developers',
'License :: OSI Approved :: BSD License',
'Programming Language :: Python',
'Topic :: Software Development :: Libraries :: Python Modules',
],
packages = find_packages(exclude=['example*']),
package_data = {
'templates': ['*.html'],
},
include_package_data = True,
zip_safe = False,
) | random_line_split |
|
controllers.js | var cartApp = angular.module('cartApp', []);
cartApp.controller('cartCtrl', function($scope, $http) {
$scope.refreshCart = function(cartId) {
$http.get('/webstore/rest/cart/' + $scope.cartId)
.success(function(data) {
$scope.cart = data;
});
}; | $http.delete('/webstore/rest/cart/' + $scope.cartId)
.success(function(data) {
$scope.refreshCart($scope.cartId);
});
};
$scope.initCartId = function(cartId) {
$scope.cartId = cartId;
$scope.refreshCart($scope.cartId);
};
$scope.addToCart = function(productId) {
$http.put('/webstore/rest/cart/add/' + productId)
.success(function(data) {
alert("Product Successfully added to the Cart!");
});
};
$scope.removeFromCart = function(productId) {
$http.put('/webstore/rest/cart/remove/' + productId)
.success(function(data) {
$scope.refreshCart($scope.cartId);
});
};
}); |
$scope.clearCart = function() { | random_line_split |
getRendererFrom.ts | /* eslint-disable no-underscore-dangle */
import { OPT_OUT } from '../../shared/constants';
import {
AddonOptions,
AnyFunctionReturns,
ContextNode,
GenericProp,
PropsMap,
} from '../../shared/types.d';
/**
* @private
* Aggregate component vNodes with activated props in a descending order,
* based on the given options in the contextual environment setup.
*
* @param {function} h - the associated `createElement` vNode creator from the framework
*/
type _getAggregatedWrap = <T>(
h: AnyFunctionReturns<T>
) => (
components: ContextNode['components'],
props: GenericProp | typeof OPT_OUT,
options: AddonOptions
) => AnyFunctionReturns<T>;
export const _getAggregatedWrap: _getAggregatedWrap = h => (
components,
props,
options
) => vNode => {
const last = components.length - 1;
const isSkipped =
// when set to disable
options.disable ||
// when opt-out context
(options.cancelable && props === OPT_OUT);
return isSkipped | // reverse the array to get the correct wrapping sequence (i.e. left(right))
.reverse()
.reduce((acc, C, index) => h(C, options.deep || index === last ? props : null, acc), vNode);
};
/**
* @nosideeffects
* Aggregate aggregated-components among all contextual nodes in a descending order;
* this is the core of this addon, which is based on the general virtual DOM implementation.
*
* @param {function} h - the associated `createElement` vNode creator from the framework
*/
type getRendererFrom = <T>(
h: AnyFunctionReturns<T>
) => (contextNodes: ContextNode[], propsMap: PropsMap, getStoryVNode: AnyFunctionReturns<T>) => T;
export const getRendererFrom: getRendererFrom = h => (contextNodes, propsMap, getStoryVNode) =>
contextNodes
// map over contextual nodes to get the wrapping function
.map(({ nodeId, components, options }) =>
_getAggregatedWrap(h)(components, propsMap[nodeId], options)
)
// reverse the array to get the correct wrapping sequence (i.e. top(down))
.reverse()
// stitch everything to get the final vNode
.reduce((vNode, wrap) => wrap(vNode), getStoryVNode()); | ? vNode
: components
// shallow clone the array since .reverse() is not pure
.concat() | random_line_split |
commands.py | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, isdir
from shutil import rmtree
from tarfile import open as taropen
from tempfile import mkdtemp
from os import environ
from traceback import format_exc
from moi.job import system_call
from qiita_db.artifact import Artifact
from qiita_db.logger import LogEntry
from qiita_core.qiita_settings import qiita_config
from qiita_ware.ebi import EBISubmission
from qiita_ware.exceptions import ComputeError, EBISubmissionError
def submit_EBI(preprocessed_data_id, action, send):
"""Submit a preprocessed data to EBI
Parameters
----------
preprocessed_data_id : int
The preprocesssed data id
action : %s
The action to perform with this data
send : bool
True to actually send the files
"""
# step 1: init and validate
ebi_submission = EBISubmission(preprocessed_data_id, action)
# step 2: generate demux fastq files
ebi_submission.study.ebi_submission_status = 'submitting'
try:
ebi_submission.generate_demultiplexed_fastq()
except:
error_msg = format_exc()
if isdir(ebi_submission.full_ebi_dir):
rmtree(ebi_submission.full_ebi_dir)
ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg
LogEntry.create('Runtime', error_msg,
info={'ebi_submission': preprocessed_data_id})
raise
# step 3: generate and write xml files
ebi_submission.generate_xml_files()
if send:
# step 4: sending sequences
if action != 'MODIFY':
old_ascp_pass = environ.get('ASPERA_SCP_PASS', '')
environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass
LogEntry.create('Runtime',
("Submitting sequences for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
for cmd in ebi_submission.generate_send_sequences_cmd():
|
finally:
environ['ASPERA_SCP_PASS'] = old_ascp_pass
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
# step 5: sending xml and parsing answer
xmls_cmds = ebi_submission.generate_curl_command()
LogEntry.create('Runtime',
("Submitting XMLs for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
xml_content, stderr, _ = system_call(xmls_cmds)
except Exception as e:
xml_content = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
else:
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
finally:
open(ebi_submission.curl_reply, 'w').write(
'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr))
try:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = \
ebi_submission.parse_EBI_reply(xml_content)
except EBISubmissionError as e:
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML parsing, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
ebi_submission.study.ebi_submission_status = 'submitted'
if action == 'ADD':
if st_acc:
ebi_submission.study.ebi_study_accession = st_acc
if sa_acc:
ebi_submission.sample_template.ebi_sample_accessions = sa_acc
if bio_acc:
ebi_submission.sample_template.biosample_accessions = bio_acc
if ex_acc:
ebi_submission.prep_template.ebi_experiment_accessions = ex_acc
ebi_submission.artifact.ebi_run_accessions = run_acc
else:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None
return st_acc, sa_acc, bio_acc, ex_acc, run_acc
def submit_VAMPS(artifact_id):
"""Submit artifact to VAMPS
Parameters
----------
artifact_id : int
The artifact id
Raises
------
ComputeError
- If the artifact cannot be submitted to VAMPS
- If the artifact is associated with more than one prep template
"""
artifact = Artifact(artifact_id)
if not artifact.can_be_submitted_to_vamps:
raise ComputeError("Artifact %d cannot be submitted to VAMPS"
% artifact_id)
study = artifact.study
sample_template = study.sample_template
prep_templates = artifact.prep_templates
if len(prep_templates) > 1:
raise ComputeError(
"Multiple prep templates associated with the artifact: %s"
% artifact_id)
prep_template = prep_templates[0]
# Also need to check that is not submitting (see item in #1523)
if artifact.is_submitted_to_vamps:
raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id)
# Generating a tgz
targz_folder = mkdtemp(prefix=qiita_config.working_dir)
targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id,
prep_template.id,
artifact_id))
targz = taropen(targz_fp, mode='w:gz')
# adding sample/prep
samp_fp = join(targz_folder, 'sample_metadata.txt')
sample_template.to_file(samp_fp)
targz.add(samp_fp, arcname='sample_metadata.txt')
prep_fp = join(targz_folder, 'prep_metadata.txt')
prep_template.to_file(prep_fp)
targz.add(prep_fp, arcname='prep_metadata.txt')
# adding preprocessed data
for _, fp, fp_type in artifact.filepaths:
if fp_type == 'preprocessed_fasta':
targz.add(fp, arcname='preprocessed_fasta.fna')
targz.close()
# submitting
cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F "
"press=UploadFile %s" % (qiita_config.vamps_user,
qiita_config.vamps_pass,
targz_fp,
qiita_config.vamps_url))
obs, _, _ = system_call(cmd)
exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n"
"<body>\n</body>\n</html>")
if obs != exp:
return False
else:
artifact.is_submitted_to_vamps = True
return True
| try:
stdout, stderr, _ = system_call(cmd)
except Exception as e:
stdout = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd,
str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: ASCP submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: "
"%d" % le.id)
finally:
open(ebi_submission.ascp_reply, 'a').write(
'stdout:\n%s\n\nstderr: %s' % (stdout, stderr)) | conditional_block |
commands.py | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, isdir
from shutil import rmtree
from tarfile import open as taropen
from tempfile import mkdtemp
from os import environ
from traceback import format_exc
from moi.job import system_call
from qiita_db.artifact import Artifact
from qiita_db.logger import LogEntry
from qiita_core.qiita_settings import qiita_config
from qiita_ware.ebi import EBISubmission
from qiita_ware.exceptions import ComputeError, EBISubmissionError
def submit_EBI(preprocessed_data_id, action, send):
"""Submit a preprocessed data to EBI
Parameters
----------
preprocessed_data_id : int
The preprocesssed data id
action : %s
The action to perform with this data
send : bool
True to actually send the files
"""
# step 1: init and validate
ebi_submission = EBISubmission(preprocessed_data_id, action)
# step 2: generate demux fastq files
ebi_submission.study.ebi_submission_status = 'submitting'
try:
ebi_submission.generate_demultiplexed_fastq()
except:
error_msg = format_exc()
if isdir(ebi_submission.full_ebi_dir):
rmtree(ebi_submission.full_ebi_dir)
ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg
LogEntry.create('Runtime', error_msg,
info={'ebi_submission': preprocessed_data_id})
raise
# step 3: generate and write xml files
ebi_submission.generate_xml_files()
if send:
# step 4: sending sequences
if action != 'MODIFY':
old_ascp_pass = environ.get('ASPERA_SCP_PASS', '')
environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass
LogEntry.create('Runtime',
("Submitting sequences for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
for cmd in ebi_submission.generate_send_sequences_cmd():
try:
stdout, stderr, _ = system_call(cmd)
except Exception as e:
stdout = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd,
str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: ASCP submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: "
"%d" % le.id)
finally:
open(ebi_submission.ascp_reply, 'a').write(
'stdout:\n%s\n\nstderr: %s' % (stdout, stderr))
finally:
environ['ASPERA_SCP_PASS'] = old_ascp_pass
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
# step 5: sending xml and parsing answer
xmls_cmds = ebi_submission.generate_curl_command()
LogEntry.create('Runtime',
("Submitting XMLs for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
xml_content, stderr, _ = system_call(xmls_cmds)
except Exception as e:
xml_content = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
else:
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
finally:
open(ebi_submission.curl_reply, 'w').write(
'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr))
try:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = \
ebi_submission.parse_EBI_reply(xml_content)
except EBISubmissionError as e:
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML parsing, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
ebi_submission.study.ebi_submission_status = 'submitted'
if action == 'ADD':
if st_acc:
ebi_submission.study.ebi_study_accession = st_acc
if sa_acc:
ebi_submission.sample_template.ebi_sample_accessions = sa_acc
if bio_acc:
ebi_submission.sample_template.biosample_accessions = bio_acc
if ex_acc:
ebi_submission.prep_template.ebi_experiment_accessions = ex_acc
ebi_submission.artifact.ebi_run_accessions = run_acc
else:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None
return st_acc, sa_acc, bio_acc, ex_acc, run_acc
def submit_VAMPS(artifact_id):
| if len(prep_templates) > 1:
raise ComputeError(
"Multiple prep templates associated with the artifact: %s"
% artifact_id)
prep_template = prep_templates[0]
# Also need to check that is not submitting (see item in #1523)
if artifact.is_submitted_to_vamps:
raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id)
# Generating a tgz
targz_folder = mkdtemp(prefix=qiita_config.working_dir)
targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id,
prep_template.id,
artifact_id))
targz = taropen(targz_fp, mode='w:gz')
# adding sample/prep
samp_fp = join(targz_folder, 'sample_metadata.txt')
sample_template.to_file(samp_fp)
targz.add(samp_fp, arcname='sample_metadata.txt')
prep_fp = join(targz_folder, 'prep_metadata.txt')
prep_template.to_file(prep_fp)
targz.add(prep_fp, arcname='prep_metadata.txt')
# adding preprocessed data
for _, fp, fp_type in artifact.filepaths:
if fp_type == 'preprocessed_fasta':
targz.add(fp, arcname='preprocessed_fasta.fna')
targz.close()
# submitting
cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F "
"press=UploadFile %s" % (qiita_config.vamps_user,
qiita_config.vamps_pass,
targz_fp,
qiita_config.vamps_url))
obs, _, _ = system_call(cmd)
exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n"
"<body>\n</body>\n</html>")
if obs != exp:
return False
else:
artifact.is_submitted_to_vamps = True
return True
| """Submit artifact to VAMPS
Parameters
----------
artifact_id : int
The artifact id
Raises
------
ComputeError
- If the artifact cannot be submitted to VAMPS
- If the artifact is associated with more than one prep template
"""
artifact = Artifact(artifact_id)
if not artifact.can_be_submitted_to_vamps:
raise ComputeError("Artifact %d cannot be submitted to VAMPS"
% artifact_id)
study = artifact.study
sample_template = study.sample_template
prep_templates = artifact.prep_templates | identifier_body |
commands.py | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, isdir
from shutil import rmtree
from tarfile import open as taropen
from tempfile import mkdtemp
from os import environ
from traceback import format_exc
from moi.job import system_call
from qiita_db.artifact import Artifact
from qiita_db.logger import LogEntry
from qiita_core.qiita_settings import qiita_config
from qiita_ware.ebi import EBISubmission
from qiita_ware.exceptions import ComputeError, EBISubmissionError
def submit_EBI(preprocessed_data_id, action, send):
"""Submit a preprocessed data to EBI
Parameters
----------
preprocessed_data_id : int
The preprocesssed data id
action : %s
The action to perform with this data
send : bool
True to actually send the files
"""
# step 1: init and validate
ebi_submission = EBISubmission(preprocessed_data_id, action)
# step 2: generate demux fastq files
ebi_submission.study.ebi_submission_status = 'submitting'
try:
ebi_submission.generate_demultiplexed_fastq()
except:
error_msg = format_exc()
if isdir(ebi_submission.full_ebi_dir):
rmtree(ebi_submission.full_ebi_dir)
ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg
LogEntry.create('Runtime', error_msg,
info={'ebi_submission': preprocessed_data_id})
raise
# step 3: generate and write xml files
ebi_submission.generate_xml_files()
if send:
# step 4: sending sequences
if action != 'MODIFY':
old_ascp_pass = environ.get('ASPERA_SCP_PASS', '')
environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass
LogEntry.create('Runtime',
("Submitting sequences for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
for cmd in ebi_submission.generate_send_sequences_cmd():
try:
stdout, stderr, _ = system_call(cmd)
except Exception as e:
stdout = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd,
str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: ASCP submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: "
"%d" % le.id)
finally:
open(ebi_submission.ascp_reply, 'a').write(
'stdout:\n%s\n\nstderr: %s' % (stdout, stderr))
finally:
environ['ASPERA_SCP_PASS'] = old_ascp_pass
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
# step 5: sending xml and parsing answer
xmls_cmds = ebi_submission.generate_curl_command()
LogEntry.create('Runtime',
("Submitting XMLs for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
xml_content, stderr, _ = system_call(xmls_cmds)
except Exception as e:
xml_content = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
else:
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
finally:
open(ebi_submission.curl_reply, 'w').write(
'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr))
try:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = \
ebi_submission.parse_EBI_reply(xml_content)
except EBISubmissionError as e:
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML parsing, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
| ebi_submission.study.ebi_study_accession = st_acc
if sa_acc:
ebi_submission.sample_template.ebi_sample_accessions = sa_acc
if bio_acc:
ebi_submission.sample_template.biosample_accessions = bio_acc
if ex_acc:
ebi_submission.prep_template.ebi_experiment_accessions = ex_acc
ebi_submission.artifact.ebi_run_accessions = run_acc
else:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None
return st_acc, sa_acc, bio_acc, ex_acc, run_acc
def submit_VAMPS(artifact_id):
"""Submit artifact to VAMPS
Parameters
----------
artifact_id : int
The artifact id
Raises
------
ComputeError
- If the artifact cannot be submitted to VAMPS
- If the artifact is associated with more than one prep template
"""
artifact = Artifact(artifact_id)
if not artifact.can_be_submitted_to_vamps:
raise ComputeError("Artifact %d cannot be submitted to VAMPS"
% artifact_id)
study = artifact.study
sample_template = study.sample_template
prep_templates = artifact.prep_templates
if len(prep_templates) > 1:
raise ComputeError(
"Multiple prep templates associated with the artifact: %s"
% artifact_id)
prep_template = prep_templates[0]
# Also need to check that is not submitting (see item in #1523)
if artifact.is_submitted_to_vamps:
raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id)
# Generating a tgz
targz_folder = mkdtemp(prefix=qiita_config.working_dir)
targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id,
prep_template.id,
artifact_id))
targz = taropen(targz_fp, mode='w:gz')
# adding sample/prep
samp_fp = join(targz_folder, 'sample_metadata.txt')
sample_template.to_file(samp_fp)
targz.add(samp_fp, arcname='sample_metadata.txt')
prep_fp = join(targz_folder, 'prep_metadata.txt')
prep_template.to_file(prep_fp)
targz.add(prep_fp, arcname='prep_metadata.txt')
# adding preprocessed data
for _, fp, fp_type in artifact.filepaths:
if fp_type == 'preprocessed_fasta':
targz.add(fp, arcname='preprocessed_fasta.fna')
targz.close()
# submitting
cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F "
"press=UploadFile %s" % (qiita_config.vamps_user,
qiita_config.vamps_pass,
targz_fp,
qiita_config.vamps_url))
obs, _, _ = system_call(cmd)
exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n"
"<body>\n</body>\n</html>")
if obs != exp:
return False
else:
artifact.is_submitted_to_vamps = True
return True | ebi_submission.study.ebi_submission_status = 'submitted'
if action == 'ADD':
if st_acc: | random_line_split |
commands.py | # -----------------------------------------------------------------------------
# Copyright (c) 2014--, The Qiita Development Team.
#
# Distributed under the terms of the BSD 3-clause License.
#
# The full license is in the file LICENSE, distributed with this software.
# -----------------------------------------------------------------------------
from os.path import join, isdir
from shutil import rmtree
from tarfile import open as taropen
from tempfile import mkdtemp
from os import environ
from traceback import format_exc
from moi.job import system_call
from qiita_db.artifact import Artifact
from qiita_db.logger import LogEntry
from qiita_core.qiita_settings import qiita_config
from qiita_ware.ebi import EBISubmission
from qiita_ware.exceptions import ComputeError, EBISubmissionError
def submit_EBI(preprocessed_data_id, action, send):
"""Submit a preprocessed data to EBI
Parameters
----------
preprocessed_data_id : int
The preprocesssed data id
action : %s
The action to perform with this data
send : bool
True to actually send the files
"""
# step 1: init and validate
ebi_submission = EBISubmission(preprocessed_data_id, action)
# step 2: generate demux fastq files
ebi_submission.study.ebi_submission_status = 'submitting'
try:
ebi_submission.generate_demultiplexed_fastq()
except:
error_msg = format_exc()
if isdir(ebi_submission.full_ebi_dir):
rmtree(ebi_submission.full_ebi_dir)
ebi_submission.study.ebi_submission_status = 'failed: %s' % error_msg
LogEntry.create('Runtime', error_msg,
info={'ebi_submission': preprocessed_data_id})
raise
# step 3: generate and write xml files
ebi_submission.generate_xml_files()
if send:
# step 4: sending sequences
if action != 'MODIFY':
old_ascp_pass = environ.get('ASPERA_SCP_PASS', '')
environ['ASPERA_SCP_PASS'] = qiita_config.ebi_seq_xfer_pass
LogEntry.create('Runtime',
("Submitting sequences for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
for cmd in ebi_submission.generate_send_sequences_cmd():
try:
stdout, stderr, _ = system_call(cmd)
except Exception as e:
stdout = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd,
str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: ASCP submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: "
"%d" % le.id)
finally:
open(ebi_submission.ascp_reply, 'a').write(
'stdout:\n%s\n\nstderr: %s' % (stdout, stderr))
finally:
environ['ASPERA_SCP_PASS'] = old_ascp_pass
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
# step 5: sending xml and parsing answer
xmls_cmds = ebi_submission.generate_curl_command()
LogEntry.create('Runtime',
("Submitting XMLs for pre_processed_id: "
"%d" % preprocessed_data_id))
try:
xml_content, stderr, _ = system_call(xmls_cmds)
except Exception as e:
xml_content = ''
stderr = str(e)
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (cmd, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML submission, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
else:
LogEntry.create('Runtime',
('Submission of sequences of pre_processed_id: '
'%d completed successfully' %
preprocessed_data_id))
finally:
open(ebi_submission.curl_reply, 'w').write(
'stdout:\n%s\n\nstderr: %s' % (xml_content, stderr))
try:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = \
ebi_submission.parse_EBI_reply(xml_content)
except EBISubmissionError as e:
le = LogEntry.create(
'Fatal', "Command: %s\nError: %s\n" % (xml_content, str(e)),
info={'ebi_submission': preprocessed_data_id})
ebi_submission.study.ebi_submission_status = (
"failed: XML parsing, log id: %d" % le.id)
raise ComputeError("EBI Submission failed! Log id: %d" % le.id)
ebi_submission.study.ebi_submission_status = 'submitted'
if action == 'ADD':
if st_acc:
ebi_submission.study.ebi_study_accession = st_acc
if sa_acc:
ebi_submission.sample_template.ebi_sample_accessions = sa_acc
if bio_acc:
ebi_submission.sample_template.biosample_accessions = bio_acc
if ex_acc:
ebi_submission.prep_template.ebi_experiment_accessions = ex_acc
ebi_submission.artifact.ebi_run_accessions = run_acc
else:
st_acc, sa_acc, bio_acc, ex_acc, run_acc = None, None, None, None, None
return st_acc, sa_acc, bio_acc, ex_acc, run_acc
def | (artifact_id):
"""Submit artifact to VAMPS
Parameters
----------
artifact_id : int
The artifact id
Raises
------
ComputeError
- If the artifact cannot be submitted to VAMPS
- If the artifact is associated with more than one prep template
"""
artifact = Artifact(artifact_id)
if not artifact.can_be_submitted_to_vamps:
raise ComputeError("Artifact %d cannot be submitted to VAMPS"
% artifact_id)
study = artifact.study
sample_template = study.sample_template
prep_templates = artifact.prep_templates
if len(prep_templates) > 1:
raise ComputeError(
"Multiple prep templates associated with the artifact: %s"
% artifact_id)
prep_template = prep_templates[0]
# Also need to check that is not submitting (see item in #1523)
if artifact.is_submitted_to_vamps:
raise ValueError("Cannot resubmit artifact %s to VAMPS!" % artifact_id)
# Generating a tgz
targz_folder = mkdtemp(prefix=qiita_config.working_dir)
targz_fp = join(targz_folder, '%d_%d_%d.tgz' % (study.id,
prep_template.id,
artifact_id))
targz = taropen(targz_fp, mode='w:gz')
# adding sample/prep
samp_fp = join(targz_folder, 'sample_metadata.txt')
sample_template.to_file(samp_fp)
targz.add(samp_fp, arcname='sample_metadata.txt')
prep_fp = join(targz_folder, 'prep_metadata.txt')
prep_template.to_file(prep_fp)
targz.add(prep_fp, arcname='prep_metadata.txt')
# adding preprocessed data
for _, fp, fp_type in artifact.filepaths:
if fp_type == 'preprocessed_fasta':
targz.add(fp, arcname='preprocessed_fasta.fna')
targz.close()
# submitting
cmd = ("curl -F user=%s -F pass='%s' -F uploadFile=@%s -F "
"press=UploadFile %s" % (qiita_config.vamps_user,
qiita_config.vamps_pass,
targz_fp,
qiita_config.vamps_url))
obs, _, _ = system_call(cmd)
exp = ("<html>\n<head>\n<title>Process Uploaded File</title>\n</head>\n"
"<body>\n</body>\n</html>")
if obs != exp:
return False
else:
artifact.is_submitted_to_vamps = True
return True
| submit_VAMPS | identifier_name |
utils.py | # -*- coding: utf-8 -*-
import re
from django.utils.translation import ugettext_lazy as _
from .router import Patterns
HTTP_STATUS_CODES = (
# Infomational
(100, _('Continue')),
(101, _('Switching Protocols')),
(102, _('Processing (WebDAV)')),
# Success
(200, _('OK')),
(201, _('Created')),
(202, _('Accepted')),
(203, _('Non-Authoritative Information')),
(204, _('No Content')),
(205, _('Reset Content')),
(206, _('Partial Content')),
(207, _('Multi-Status (WebDAV)')),
# Redirection
(300, _('Multiple Choices')),
(301, _('Moved Permanently')),
(302, _('Found')),
(303, _('See Other')),
(304, _('Not Modified')),
(305, _('Use Proxy')),
(306, _('Switch Proxy')), # No longer used
(307, _('Temporary Redirect')),
# Client Error
(400, _('Bad Request')),
(401, _('Unauthorized')),
(402, _('Payment Required')),
(403, _('Forbidden')),
(404, _('Not Found')),
(405, _('Method Not Allowed')),
(406, _('Not Acceptable')),
(407, _('Proxy Authentication Required')),
(408, _('Request Timeout')),
(409, _('Conflict')),
(410, _('Gone')),
(411, _('Length Required')),
(412, _('Precondition Failed')),
(413, _('Request Entity Too Large')),
(414, _('Request-URI Too Long')),
(415, _('Unsupported Media Type')),
(416, _('Requested Range Not Satisfiable')),
(417, _('Expectation Failed')),
(418, _('I\'m a teapot')), # April Fools
(422, _('Unprocessable Entity (WebDAV)')),
(423, _('Locked (WebDAV)')),
(424, _('Failed Dependency (WebDAV)')),
(425, _('Unordered Collection')),
(426, _('Upgrade Required')),
(449, _('Retry With')),
# Server Error
(500, _('Internal Server Error')),
(501, _('Not Implemented')),
(502, _('Bad Gateway')),
(503, _('Service Unavailable')),
(504, _('Gateway Timeout')),
(505, _('HTTP Version Not Supported')),
(506, _('Variant Also Negotiates')),
(507, _('Insufficient Storage (WebDAV)')),
(509, _('Bandwidth Limit Exceeded')),
(510, _('Not Extended')),
)
browsers = Patterns(
('Unknown', {}),
# Browsers
(r'AOL (?P<version>[\d+\.\d+]+)', 'AOL'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<msie_version>[-.\w]+); ' +
r'( ?)( ?)America Online Browser (?P<version>[-.\w]+);',
'AOL',
),
(r'Camino/(?P<version>[-.\w]+)', 'Camino'),
(r'Chrome/(?P<version>[-.\w]+)', 'Google Chrome'),
(r'Firefox(/(?P<version>[-.\w]+)?)', 'Firefox'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<version>[-.\w]+); ' +
r'( ?)( ?)(Win|Mac)',
'Internet Explorer',
),
(r'Konqueror/(?P<version>[-.\w]+)', 'Konqueror'),
(r'Opera( |/)(?P<version>[-.\w]+)', 'Opera'),
(r'OmniWeb(/(?P<version>[-.\w]+)?)', 'OmniWeb'),
(r'Safari/(?P<version>[-.\w]+)', 'Safari'),
(r'(Netscape([\d]?)|Navigator)/(?P<version>[-.\w]+)', 'Netscape'),
(r'Wget/(?P<version>[-.\w]+)', 'Wget'),
(r'Minefield(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox nightly trunk builds
(r'Shiretoko(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'GranParadiso(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'Iceweasel(/(?P<version>[-.\w]+)?)', 'Firefox'), # Debian re-branded firefox
# RSS Reader
(r'(NetNewsWire|NewsGatorOnline)/(?P<version>[-.\w]+)', 'NetNewsWire'),
(r'Feedfetcher-Google', 'Google Reader'),
# Bots
(r'Googlebot', 'Google'),
(r'Yahoo! Slurp', 'Yahoo'),
(r'msnbot', 'MSN Bot'),
(r'(Baiduspider|BaiduImagespider)', 'Baiduspider'),
(r'Ask Jeeves', 'Ask Jeeves'),
(r'FollowSite', 'FollowSite'),
(r'WebAlta Crawler', 'WebAlta Crawler'),
(r'ScoutJet', 'ScoutJet'),
(r'SurveyBot', 'domaintools.com'),
(r'Gigabot', 'Gigabot'),
(r'Speedy Spider', 'entireweb'),
(r'discobot', 'Discovery Engine'),
(r'Purebot(/(?P<version>[-.\w]+)?);', 'Purity search'),
(r'Yandex(/(?P<version>[-.\w]+)?)', 'Yandex'),
(r'PostRank(/(?P<version>[-.\w]+)?)', 'PostRank'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; DotBot/(?P<version>[-.\w]+); ' +
r'http://www.dotnetdotcom.org/, [email protected]\)',
'Dotbot',
),
(r'IrssiUrlLog(/(?P<version>[-.\w]+)?)', 'irssi'),
(r'Linguee Bot \(http://www.linguee.com/bot; [email protected]\)', 'Linguee'),
(r'Sphider', 'Sphider'),
# Other
(r'Mediapartners-Google', 'Google Ads'),
(r'Apple-PubSub', 'Apple-PubSub'),
(r'Python-urllib', 'Python'),
)
engines = Patterns(
None,
(r'^https?:\/\/([\.\w]+)?yahoo.*(?:&|\?)p=(?P<keywords>[\+-_\w]+)', 'Yahoo'),
(r'^https?:\/\/([\.\w]+)?google.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Google'),
(r'^https?:\/\/([\.\w]+)?bing.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Bing'),
)
def get_verbose_name(class_name):
| '''
Calculate the verbose_name by converting from InitialCaps to
"lowercase with spaces".
'''
return re.sub(
'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))',
' \\1',
class_name,
).strip() | identifier_body |
|
utils.py | # -*- coding: utf-8 -*-
import re
from django.utils.translation import ugettext_lazy as _
from .router import Patterns
HTTP_STATUS_CODES = (
# Infomational
(100, _('Continue')),
(101, _('Switching Protocols')),
(102, _('Processing (WebDAV)')),
# Success
(200, _('OK')),
(201, _('Created')),
(202, _('Accepted')),
(203, _('Non-Authoritative Information')),
(204, _('No Content')),
(205, _('Reset Content')),
(206, _('Partial Content')),
(207, _('Multi-Status (WebDAV)')),
# Redirection
(300, _('Multiple Choices')),
(301, _('Moved Permanently')),
(302, _('Found')),
(303, _('See Other')),
(304, _('Not Modified')),
(305, _('Use Proxy')),
(306, _('Switch Proxy')), # No longer used
(307, _('Temporary Redirect')),
# Client Error
(400, _('Bad Request')),
(401, _('Unauthorized')),
(402, _('Payment Required')),
(403, _('Forbidden')),
(404, _('Not Found')),
(405, _('Method Not Allowed')),
(406, _('Not Acceptable')),
(407, _('Proxy Authentication Required')),
(408, _('Request Timeout')),
(409, _('Conflict')),
(410, _('Gone')),
(411, _('Length Required')),
(412, _('Precondition Failed')),
(413, _('Request Entity Too Large')),
(414, _('Request-URI Too Long')),
(415, _('Unsupported Media Type')),
(416, _('Requested Range Not Satisfiable')),
(417, _('Expectation Failed')),
(418, _('I\'m a teapot')), # April Fools
(422, _('Unprocessable Entity (WebDAV)')),
(423, _('Locked (WebDAV)')),
(424, _('Failed Dependency (WebDAV)')),
(425, _('Unordered Collection')),
(426, _('Upgrade Required')),
(449, _('Retry With')),
# Server Error
(500, _('Internal Server Error')),
(501, _('Not Implemented')),
(502, _('Bad Gateway')),
(503, _('Service Unavailable')),
(504, _('Gateway Timeout')),
(505, _('HTTP Version Not Supported')),
(506, _('Variant Also Negotiates')),
(507, _('Insufficient Storage (WebDAV)')),
(509, _('Bandwidth Limit Exceeded')),
(510, _('Not Extended')),
)
browsers = Patterns(
('Unknown', {}),
# Browsers
(r'AOL (?P<version>[\d+\.\d+]+)', 'AOL'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<msie_version>[-.\w]+); ' +
r'( ?)( ?)America Online Browser (?P<version>[-.\w]+);',
'AOL',
),
(r'Camino/(?P<version>[-.\w]+)', 'Camino'),
(r'Chrome/(?P<version>[-.\w]+)', 'Google Chrome'),
(r'Firefox(/(?P<version>[-.\w]+)?)', 'Firefox'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<version>[-.\w]+); ' +
r'( ?)( ?)(Win|Mac)',
'Internet Explorer',
),
(r'Konqueror/(?P<version>[-.\w]+)', 'Konqueror'),
(r'Opera( |/)(?P<version>[-.\w]+)', 'Opera'),
(r'OmniWeb(/(?P<version>[-.\w]+)?)', 'OmniWeb'),
(r'Safari/(?P<version>[-.\w]+)', 'Safari'),
(r'(Netscape([\d]?)|Navigator)/(?P<version>[-.\w]+)', 'Netscape'),
(r'Wget/(?P<version>[-.\w]+)', 'Wget'),
(r'Minefield(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox nightly trunk builds
(r'Shiretoko(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'GranParadiso(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'Iceweasel(/(?P<version>[-.\w]+)?)', 'Firefox'), # Debian re-branded firefox
# RSS Reader
(r'(NetNewsWire|NewsGatorOnline)/(?P<version>[-.\w]+)', 'NetNewsWire'),
(r'Feedfetcher-Google', 'Google Reader'),
# Bots
(r'Googlebot', 'Google'),
(r'Yahoo! Slurp', 'Yahoo'),
(r'msnbot', 'MSN Bot'),
(r'(Baiduspider|BaiduImagespider)', 'Baiduspider'),
(r'Ask Jeeves', 'Ask Jeeves'),
(r'FollowSite', 'FollowSite'),
(r'WebAlta Crawler', 'WebAlta Crawler'),
(r'ScoutJet', 'ScoutJet'),
(r'SurveyBot', 'domaintools.com'),
(r'Gigabot', 'Gigabot'),
(r'Speedy Spider', 'entireweb'),
(r'discobot', 'Discovery Engine'),
(r'Purebot(/(?P<version>[-.\w]+)?);', 'Purity search'),
(r'Yandex(/(?P<version>[-.\w]+)?)', 'Yandex'),
(r'PostRank(/(?P<version>[-.\w]+)?)', 'PostRank'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; DotBot/(?P<version>[-.\w]+); ' +
r'http://www.dotnetdotcom.org/, [email protected]\)',
'Dotbot',
),
(r'IrssiUrlLog(/(?P<version>[-.\w]+)?)', 'irssi'),
(r'Linguee Bot \(http://www.linguee.com/bot; [email protected]\)', 'Linguee'),
(r'Sphider', 'Sphider'),
# Other
(r'Mediapartners-Google', 'Google Ads'),
(r'Apple-PubSub', 'Apple-PubSub'),
(r'Python-urllib', 'Python'),
)
engines = Patterns(
None,
(r'^https?:\/\/([\.\w]+)?yahoo.*(?:&|\?)p=(?P<keywords>[\+-_\w]+)', 'Yahoo'),
(r'^https?:\/\/([\.\w]+)?google.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Google'),
(r'^https?:\/\/([\.\w]+)?bing.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Bing'),
)
def | (class_name):
'''
Calculate the verbose_name by converting from InitialCaps to
"lowercase with spaces".
'''
return re.sub(
'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))',
' \\1',
class_name,
).strip()
| get_verbose_name | identifier_name |
utils.py | # -*- coding: utf-8 -*-
import re
from django.utils.translation import ugettext_lazy as _
from .router import Patterns
HTTP_STATUS_CODES = (
# Infomational
(100, _('Continue')),
(101, _('Switching Protocols')),
(102, _('Processing (WebDAV)')),
# Success
(200, _('OK')),
(201, _('Created')),
(202, _('Accepted')),
(203, _('Non-Authoritative Information')),
(204, _('No Content')),
(205, _('Reset Content')),
(206, _('Partial Content')),
(207, _('Multi-Status (WebDAV)')),
# Redirection
(300, _('Multiple Choices')),
(301, _('Moved Permanently')),
(302, _('Found')),
(303, _('See Other')),
(304, _('Not Modified')),
(305, _('Use Proxy')),
(306, _('Switch Proxy')), # No longer used
(307, _('Temporary Redirect')),
# Client Error
(400, _('Bad Request')),
(401, _('Unauthorized')),
(402, _('Payment Required')),
(403, _('Forbidden')),
(404, _('Not Found')),
(405, _('Method Not Allowed')),
(406, _('Not Acceptable')),
(407, _('Proxy Authentication Required')),
(408, _('Request Timeout')),
(409, _('Conflict')),
(410, _('Gone')),
(411, _('Length Required')),
(412, _('Precondition Failed')),
(413, _('Request Entity Too Large')),
(414, _('Request-URI Too Long')), | (423, _('Locked (WebDAV)')),
(424, _('Failed Dependency (WebDAV)')),
(425, _('Unordered Collection')),
(426, _('Upgrade Required')),
(449, _('Retry With')),
# Server Error
(500, _('Internal Server Error')),
(501, _('Not Implemented')),
(502, _('Bad Gateway')),
(503, _('Service Unavailable')),
(504, _('Gateway Timeout')),
(505, _('HTTP Version Not Supported')),
(506, _('Variant Also Negotiates')),
(507, _('Insufficient Storage (WebDAV)')),
(509, _('Bandwidth Limit Exceeded')),
(510, _('Not Extended')),
)
browsers = Patterns(
('Unknown', {}),
# Browsers
(r'AOL (?P<version>[\d+\.\d+]+)', 'AOL'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<msie_version>[-.\w]+); ' +
r'( ?)( ?)America Online Browser (?P<version>[-.\w]+);',
'AOL',
),
(r'Camino/(?P<version>[-.\w]+)', 'Camino'),
(r'Chrome/(?P<version>[-.\w]+)', 'Google Chrome'),
(r'Firefox(/(?P<version>[-.\w]+)?)', 'Firefox'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; ( ?)MSIE (?P<version>[-.\w]+); ' +
r'( ?)( ?)(Win|Mac)',
'Internet Explorer',
),
(r'Konqueror/(?P<version>[-.\w]+)', 'Konqueror'),
(r'Opera( |/)(?P<version>[-.\w]+)', 'Opera'),
(r'OmniWeb(/(?P<version>[-.\w]+)?)', 'OmniWeb'),
(r'Safari/(?P<version>[-.\w]+)', 'Safari'),
(r'(Netscape([\d]?)|Navigator)/(?P<version>[-.\w]+)', 'Netscape'),
(r'Wget/(?P<version>[-.\w]+)', 'Wget'),
(r'Minefield(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox nightly trunk builds
(r'Shiretoko(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'GranParadiso(/(?P<version>[-.\w]+)?)', 'Firefox'), # Firefox testing browser
(r'Iceweasel(/(?P<version>[-.\w]+)?)', 'Firefox'), # Debian re-branded firefox
# RSS Reader
(r'(NetNewsWire|NewsGatorOnline)/(?P<version>[-.\w]+)', 'NetNewsWire'),
(r'Feedfetcher-Google', 'Google Reader'),
# Bots
(r'Googlebot', 'Google'),
(r'Yahoo! Slurp', 'Yahoo'),
(r'msnbot', 'MSN Bot'),
(r'(Baiduspider|BaiduImagespider)', 'Baiduspider'),
(r'Ask Jeeves', 'Ask Jeeves'),
(r'FollowSite', 'FollowSite'),
(r'WebAlta Crawler', 'WebAlta Crawler'),
(r'ScoutJet', 'ScoutJet'),
(r'SurveyBot', 'domaintools.com'),
(r'Gigabot', 'Gigabot'),
(r'Speedy Spider', 'entireweb'),
(r'discobot', 'Discovery Engine'),
(r'Purebot(/(?P<version>[-.\w]+)?);', 'Purity search'),
(r'Yandex(/(?P<version>[-.\w]+)?)', 'Yandex'),
(r'PostRank(/(?P<version>[-.\w]+)?)', 'PostRank'),
(
r'Mozilla/(?P<mozilla_version>[-.\w]+) \(compatible; DotBot/(?P<version>[-.\w]+); ' +
r'http://www.dotnetdotcom.org/, [email protected]\)',
'Dotbot',
),
(r'IrssiUrlLog(/(?P<version>[-.\w]+)?)', 'irssi'),
(r'Linguee Bot \(http://www.linguee.com/bot; [email protected]\)', 'Linguee'),
(r'Sphider', 'Sphider'),
# Other
(r'Mediapartners-Google', 'Google Ads'),
(r'Apple-PubSub', 'Apple-PubSub'),
(r'Python-urllib', 'Python'),
)
engines = Patterns(
None,
(r'^https?:\/\/([\.\w]+)?yahoo.*(?:&|\?)p=(?P<keywords>[\+-_\w]+)', 'Yahoo'),
(r'^https?:\/\/([\.\w]+)?google.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Google'),
(r'^https?:\/\/([\.\w]+)?bing.*(?:&|\?)q=(?P<keywords>[\+-_\w]+)', 'Bing'),
)
def get_verbose_name(class_name):
'''
Calculate the verbose_name by converting from InitialCaps to
"lowercase with spaces".
'''
return re.sub(
'(((?<=[a-z])[A-Z])|([A-Z](?![A-Z]|$)))',
' \\1',
class_name,
).strip() | (415, _('Unsupported Media Type')),
(416, _('Requested Range Not Satisfiable')),
(417, _('Expectation Failed')),
(418, _('I\'m a teapot')), # April Fools
(422, _('Unprocessable Entity (WebDAV)')), | random_line_split |
views.py | from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.contrib.auth.views import logout_then_login, LoginView
from django.contrib.auth import authenticate, login
from django.contrib.flatpages.models import FlatPage
from django.views.decorators.cache import never_cache
from django.core.cache import cache
from accounts.utils import throttle_login, clear_throttled_login
from redis.exceptions import ConnectionError
from .forms import BrpAuthenticationForm
import re
@never_cache
def throttled_login(request):
| try:
cache.get('')
except ConnectionError:
form = {
'non_field_errors': ['Redis not connected. Unable to create session.']
}
return render(request, template_name, {
'form': form,
'is_IE': is_IE,
})
except:
raise
login_allowed = throttle_login(request)
if login_allowed:
try:
username = request.POST['email']
password = request.POST['password']
except:
error_message = "Please enter both username and password"
if (not username or not password):
error_message = "Please enter both username and password"
else:
user = authenticate(request,
username=username,
password=password)
if user is not None:
if user.is_active is False:
request.META['action'] = 'Login unsuccessful.'
error_message = "User is inactive. If error persists please see 'forgot password' link below for instructions"
else:
request.META['action'] = 'Login successful.'
# We know if the response is a redirect, the login
# was successful, thus we can clear the throttled login counter
clear_throttled_login(request)
login(request, user)
return redirect('#/')
else:
error_message = "Username or password is incorrect. If error persists please see 'forgot password' link below for instructions"
else:
error_message = "Too many Login attempts. Please see 'forgot password' link below for instructions"
return render(request, template_name, {
'login_not_allowed': not login_allowed,
'is_IE': is_IE,
'error': error_message,
})
@never_cache
def eula(request, readonly=True, redirect_to=None):
redirect_to = redirect_to or settings.LOGIN_REDIRECT_URL
if request.method == 'POST':
# only if these agree do we let them pass, otherwise they get logged out
if request.POST.get('decision', '').lower() == 'i agree':
request.user.profile.eula = True
request.user.profile.save()
return HttpResponseRedirect(redirect_to)
return logout_then_login(request)
flatpage = FlatPage.objects.get(url='/eula/')
return render(request, 'accounts/eula.html', {
'flatpage': flatpage,
'readonly': readonly,
})
| "Displays the login form and handles the login action."
is_IE = False
user_agent = request.META['HTTP_USER_AGENT']
error_message = None
# if the user is already logged-in, simply redirect them to the entry page
if request.user.is_authenticated:
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
if (re.findall(r'MSIE', user_agent) or re.findall(r'Trident', user_agent)):
is_IE = True
template_name = 'accounts/login.html'
login_allowed = request.session.get('login_allowed', True)
if request.method == 'POST':
# if the session has already been flagged to not allow login attempts, then
# simply redirect back to the login page
if not login_allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# Check if cache is available | identifier_body |
views.py | from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.contrib.auth.views import logout_then_login, LoginView
from django.contrib.auth import authenticate, login
from django.contrib.flatpages.models import FlatPage
from django.views.decorators.cache import never_cache
from django.core.cache import cache
from accounts.utils import throttle_login, clear_throttled_login
from redis.exceptions import ConnectionError
from .forms import BrpAuthenticationForm
import re
@never_cache
def throttled_login(request):
"Displays the login form and handles the login action."
is_IE = False
user_agent = request.META['HTTP_USER_AGENT']
error_message = None
# if the user is already logged-in, simply redirect them to the entry page
if request.user.is_authenticated:
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
if (re.findall(r'MSIE', user_agent) or re.findall(r'Trident', user_agent)):
is_IE = True
template_name = 'accounts/login.html'
login_allowed = request.session.get('login_allowed', True)
if request.method == 'POST':
# if the session has already been flagged to not allow login attempts, then
# simply redirect back to the login page
if not login_allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# Check if cache is available
try:
cache.get('')
except ConnectionError:
form = {
'non_field_errors': ['Redis not connected. Unable to create session.']
}
return render(request, template_name, {
'form': form,
'is_IE': is_IE,
})
except:
raise
login_allowed = throttle_login(request)
if login_allowed:
| clear_throttled_login(request)
login(request, user)
return redirect('#/')
else:
error_message = "Username or password is incorrect. If error persists please see 'forgot password' link below for instructions"
else:
error_message = "Too many Login attempts. Please see 'forgot password' link below for instructions"
return render(request, template_name, {
'login_not_allowed': not login_allowed,
'is_IE': is_IE,
'error': error_message,
})
@never_cache
def eula(request, readonly=True, redirect_to=None):
redirect_to = redirect_to or settings.LOGIN_REDIRECT_URL
if request.method == 'POST':
# only if these agree do we let them pass, otherwise they get logged out
if request.POST.get('decision', '').lower() == 'i agree':
request.user.profile.eula = True
request.user.profile.save()
return HttpResponseRedirect(redirect_to)
return logout_then_login(request)
flatpage = FlatPage.objects.get(url='/eula/')
return render(request, 'accounts/eula.html', {
'flatpage': flatpage,
'readonly': readonly,
})
| try:
username = request.POST['email']
password = request.POST['password']
except:
error_message = "Please enter both username and password"
if (not username or not password):
error_message = "Please enter both username and password"
else:
user = authenticate(request,
username=username,
password=password)
if user is not None:
if user.is_active is False:
request.META['action'] = 'Login unsuccessful.'
error_message = "User is inactive. If error persists please see 'forgot password' link below for instructions"
else:
request.META['action'] = 'Login successful.'
# We know if the response is a redirect, the login
# was successful, thus we can clear the throttled login counter | conditional_block |
views.py | from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.contrib.auth.views import logout_then_login, LoginView
from django.contrib.auth import authenticate, login
from django.contrib.flatpages.models import FlatPage
from django.views.decorators.cache import never_cache
from django.core.cache import cache
from accounts.utils import throttle_login, clear_throttled_login
from redis.exceptions import ConnectionError
from .forms import BrpAuthenticationForm
import re
@never_cache
def throttled_login(request):
"Displays the login form and handles the login action."
is_IE = False
user_agent = request.META['HTTP_USER_AGENT']
error_message = None
# if the user is already logged-in, simply redirect them to the entry page
if request.user.is_authenticated:
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
if (re.findall(r'MSIE', user_agent) or re.findall(r'Trident', user_agent)):
is_IE = True
template_name = 'accounts/login.html'
login_allowed = request.session.get('login_allowed', True)
if request.method == 'POST':
# if the session has already been flagged to not allow login attempts, then
# simply redirect back to the login page
if not login_allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# Check if cache is available
try:
cache.get('')
except ConnectionError:
form = {
'non_field_errors': ['Redis not connected. Unable to create session.']
}
return render(request, template_name, {
'form': form,
'is_IE': is_IE,
})
except:
raise
login_allowed = throttle_login(request)
if login_allowed:
try:
username = request.POST['email']
password = request.POST['password']
except:
error_message = "Please enter both username and password"
if (not username or not password):
error_message = "Please enter both username and password"
else:
user = authenticate(request,
username=username,
password=password)
if user is not None:
if user.is_active is False:
request.META['action'] = 'Login unsuccessful.'
error_message = "User is inactive. If error persists please see 'forgot password' link below for instructions"
else:
request.META['action'] = 'Login successful.'
# We know if the response is a redirect, the login
# was successful, thus we can clear the throttled login counter
clear_throttled_login(request)
login(request, user)
return redirect('#/')
else:
error_message = "Username or password is incorrect. If error persists please see 'forgot password' link below for instructions"
else:
error_message = "Too many Login attempts. Please see 'forgot password' link below for instructions"
return render(request, template_name, {
'login_not_allowed': not login_allowed,
'is_IE': is_IE,
'error': error_message,
})
@never_cache
def | (request, readonly=True, redirect_to=None):
redirect_to = redirect_to or settings.LOGIN_REDIRECT_URL
if request.method == 'POST':
# only if these agree do we let them pass, otherwise they get logged out
if request.POST.get('decision', '').lower() == 'i agree':
request.user.profile.eula = True
request.user.profile.save()
return HttpResponseRedirect(redirect_to)
return logout_then_login(request)
flatpage = FlatPage.objects.get(url='/eula/')
return render(request, 'accounts/eula.html', {
'flatpage': flatpage,
'readonly': readonly,
})
| eula | identifier_name |
views.py | from django.conf import settings
from django.http import HttpResponseRedirect
from django.shortcuts import render, redirect
from django.contrib.auth.views import logout_then_login, LoginView
from django.contrib.auth import authenticate, login
from django.contrib.flatpages.models import FlatPage
from django.views.decorators.cache import never_cache
from django.core.cache import cache
from accounts.utils import throttle_login, clear_throttled_login
from redis.exceptions import ConnectionError
from .forms import BrpAuthenticationForm |
@never_cache
def throttled_login(request):
"Displays the login form and handles the login action."
is_IE = False
user_agent = request.META['HTTP_USER_AGENT']
error_message = None
# if the user is already logged-in, simply redirect them to the entry page
if request.user.is_authenticated:
return HttpResponseRedirect(settings.LOGIN_REDIRECT_URL)
if (re.findall(r'MSIE', user_agent) or re.findall(r'Trident', user_agent)):
is_IE = True
template_name = 'accounts/login.html'
login_allowed = request.session.get('login_allowed', True)
if request.method == 'POST':
# if the session has already been flagged to not allow login attempts, then
# simply redirect back to the login page
if not login_allowed:
return HttpResponseRedirect(settings.LOGIN_URL)
# Check if cache is available
try:
cache.get('')
except ConnectionError:
form = {
'non_field_errors': ['Redis not connected. Unable to create session.']
}
return render(request, template_name, {
'form': form,
'is_IE': is_IE,
})
except:
raise
login_allowed = throttle_login(request)
if login_allowed:
try:
username = request.POST['email']
password = request.POST['password']
except:
error_message = "Please enter both username and password"
if (not username or not password):
error_message = "Please enter both username and password"
else:
user = authenticate(request,
username=username,
password=password)
if user is not None:
if user.is_active is False:
request.META['action'] = 'Login unsuccessful.'
error_message = "User is inactive. If error persists please see 'forgot password' link below for instructions"
else:
request.META['action'] = 'Login successful.'
# We know if the response is a redirect, the login
# was successful, thus we can clear the throttled login counter
clear_throttled_login(request)
login(request, user)
return redirect('#/')
else:
error_message = "Username or password is incorrect. If error persists please see 'forgot password' link below for instructions"
else:
error_message = "Too many Login attempts. Please see 'forgot password' link below for instructions"
return render(request, template_name, {
'login_not_allowed': not login_allowed,
'is_IE': is_IE,
'error': error_message,
})
@never_cache
def eula(request, readonly=True, redirect_to=None):
redirect_to = redirect_to or settings.LOGIN_REDIRECT_URL
if request.method == 'POST':
# only if these agree do we let them pass, otherwise they get logged out
if request.POST.get('decision', '').lower() == 'i agree':
request.user.profile.eula = True
request.user.profile.save()
return HttpResponseRedirect(redirect_to)
return logout_then_login(request)
flatpage = FlatPage.objects.get(url='/eula/')
return render(request, 'accounts/eula.html', {
'flatpage': flatpage,
'readonly': readonly,
}) |
import re | random_line_split |
mod_dir_path.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_macros)]
// ignore-pretty issue #37195
mod mod_dir_simple {
#[path = "test.rs"]
pub mod syrup;
}
pub fn main() | {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
#[path = "auxiliary"]
mod foo {
mod two_macros_2;
}
#[path = "auxiliary"]
mod bar {
macro_rules! m { () => { mod two_macros_2; } }
m!();
}
} | identifier_body |
|
mod_dir_path.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_macros)]
// ignore-pretty issue #37195
mod mod_dir_simple {
#[path = "test.rs"]
pub mod syrup;
}
pub fn | () {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
#[path = "auxiliary"]
mod foo {
mod two_macros_2;
}
#[path = "auxiliary"]
mod bar {
macro_rules! m { () => { mod two_macros_2; } }
m!();
}
}
| main | identifier_name |
mod_dir_path.rs | // Copyright 2012-2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
// run-pass
#![allow(unused_macros)]
// ignore-pretty issue #37195
mod mod_dir_simple {
#[path = "test.rs"]
pub mod syrup;
} | mod foo {
mod two_macros_2;
}
#[path = "auxiliary"]
mod bar {
macro_rules! m { () => { mod two_macros_2; } }
m!();
}
} |
pub fn main() {
assert_eq!(mod_dir_simple::syrup::foo(), 10);
#[path = "auxiliary"] | random_line_split |
sensor.py | """Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def | (self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
| state | identifier_name |
sensor.py | """Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
|
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
| """Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT | identifier_body |
sensor.py | """Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available.""" |
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"])
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration | if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"]) | random_line_split |
sensor.py | """Get ride details and liveboard details for NMBS (Belgian railway)."""
import logging
import voluptuous as vol
from homeassistant.components.sensor import PLATFORM_SCHEMA
from homeassistant.const import (
ATTR_ATTRIBUTION,
ATTR_LATITUDE,
ATTR_LONGITUDE,
CONF_NAME,
CONF_SHOW_ON_MAP,
)
import homeassistant.helpers.config_validation as cv
from homeassistant.helpers.entity import Entity
import homeassistant.util.dt as dt_util
_LOGGER = logging.getLogger(__name__)
DEFAULT_NAME = "NMBS"
DEFAULT_ICON = "mdi:train"
DEFAULT_ICON_ALERT = "mdi:alert-octagon"
CONF_STATION_FROM = "station_from"
CONF_STATION_TO = "station_to"
CONF_STATION_LIVE = "station_live"
CONF_EXCLUDE_VIAS = "exclude_vias"
PLATFORM_SCHEMA = PLATFORM_SCHEMA.extend(
{
vol.Required(CONF_STATION_FROM): cv.string,
vol.Required(CONF_STATION_TO): cv.string,
vol.Optional(CONF_STATION_LIVE): cv.string,
vol.Optional(CONF_EXCLUDE_VIAS, default=False): cv.boolean,
vol.Optional(CONF_NAME, default=DEFAULT_NAME): cv.string,
vol.Optional(CONF_SHOW_ON_MAP, default=False): cv.boolean,
}
)
def get_time_until(departure_time=None):
"""Calculate the time between now and a train's departure time."""
if departure_time is None:
return 0
delta = dt_util.utc_from_timestamp(int(departure_time)) - dt_util.now()
return round((delta.total_seconds() / 60))
def get_delay_in_minutes(delay=0):
"""Get the delay in minutes from a delay in seconds."""
return round((int(delay) / 60))
def get_ride_duration(departure_time, arrival_time, delay=0):
"""Calculate the total travel time in minutes."""
duration = dt_util.utc_from_timestamp(
int(arrival_time)
) - dt_util.utc_from_timestamp(int(departure_time))
duration_time = int(round((duration.total_seconds() / 60)))
return duration_time + get_delay_in_minutes(delay)
def setup_platform(hass, config, add_entities, discovery_info=None):
"""Set up the NMBS sensor with iRail API."""
from pyrail import iRail
api_client = iRail()
name = config[CONF_NAME]
show_on_map = config[CONF_SHOW_ON_MAP]
station_from = config[CONF_STATION_FROM]
station_to = config[CONF_STATION_TO]
station_live = config.get(CONF_STATION_LIVE)
excl_vias = config[CONF_EXCLUDE_VIAS]
sensors = [
NMBSSensor(api_client, name, show_on_map, station_from, station_to, excl_vias)
]
if station_live is not None:
sensors.append(NMBSLiveBoard(api_client, station_live))
add_entities(sensors, True)
class NMBSLiveBoard(Entity):
"""Get the next train from a station's liveboard."""
def __init__(self, api_client, live_station):
"""Initialize the sensor for getting liveboard data."""
self._station = live_station
self._api_client = api_client
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the sensor default name."""
return "NMBS Live"
@property
def icon(self):
"""Return the default icon or an alert icon if delays."""
if self._attrs and int(self._attrs["delay"]) > 0:
return DEFAULT_ICON_ALERT
return DEFAULT_ICON
@property
def state(self):
"""Return sensor state."""
return self._state
@property
def device_state_attributes(self):
"""Return the sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["delay"])
departure = get_time_until(self._attrs["time"])
attrs = {
"departure": f"In {departure} minutes",
"extra_train": int(self._attrs["isExtra"]) > 0,
"vehicle_id": self._attrs["vehicle"],
"monitored_station": self._station,
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
def update(self):
"""Set the state equal to the next departure."""
liveboard = self._api_client.get_liveboard(self._station)
next_departure = liveboard["departures"]["departure"][0]
self._attrs = next_departure
self._state = "Track {} - {}".format(
next_departure["platform"], next_departure["station"]
)
class NMBSSensor(Entity):
"""Get the the total travel time for a given connection."""
def __init__(
self, api_client, name, show_on_map, station_from, station_to, excl_vias
):
"""Initialize the NMBS connection sensor."""
self._name = name
self._show_on_map = show_on_map
self._api_client = api_client
self._station_from = station_from
self._station_to = station_to
self._excl_vias = excl_vias
self._attrs = {}
self._state = None
@property
def name(self):
"""Return the name of the sensor."""
return self._name
@property
def unit_of_measurement(self):
"""Return the unit of measurement."""
return "min"
@property
def icon(self):
"""Return the sensor default icon or an alert icon if any delay."""
if self._attrs:
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
if delay > 0:
return "mdi:alert-octagon"
return "mdi:train"
@property
def device_state_attributes(self):
"""Return sensor attributes if data is available."""
if self._state is None or not self._attrs:
return None
delay = get_delay_in_minutes(self._attrs["departure"]["delay"])
departure = get_time_until(self._attrs["departure"]["time"])
attrs = {
"departure": f"In {departure} minutes",
"destination": self._station_to,
"direction": self._attrs["departure"]["direction"]["name"],
"platform_arriving": self._attrs["arrival"]["platform"],
"platform_departing": self._attrs["departure"]["platform"],
"vehicle_id": self._attrs["departure"]["vehicle"],
ATTR_ATTRIBUTION: "https://api.irail.be/",
}
if self._show_on_map and self.station_coordinates:
attrs[ATTR_LATITUDE] = self.station_coordinates[0]
attrs[ATTR_LONGITUDE] = self.station_coordinates[1]
if self.is_via_connection and not self._excl_vias:
|
if delay > 0:
attrs["delay"] = f"{delay} minutes"
return attrs
@property
def state(self):
"""Return the state of the device."""
return self._state
@property
def station_coordinates(self):
"""Get the lat, long coordinates for station."""
if self._state is None or not self._attrs:
return []
latitude = float(self._attrs["departure"]["stationinfo"]["locationY"])
longitude = float(self._attrs["departure"]["stationinfo"]["locationX"])
return [latitude, longitude]
@property
def is_via_connection(self):
"""Return whether the connection goes through another station."""
if not self._attrs:
return False
return "vias" in self._attrs and int(self._attrs["vias"]["number"]) > 0
def update(self):
"""Set the state to the duration of a connection."""
connections = self._api_client.get_connections(
self._station_from, self._station_to
)
if int(connections["connection"][0]["departure"]["left"]) > 0:
next_connection = connections["connection"][1]
else:
next_connection = connections["connection"][0]
self._attrs = next_connection
if self._excl_vias and self.is_via_connection:
_LOGGER.debug(
"Skipping update of NMBSSensor \
because this connection is a via"
)
return
duration = get_ride_duration(
next_connection["departure"]["time"],
next_connection["arrival"]["time"],
next_connection["departure"]["delay"],
)
self._state = duration
| via = self._attrs["vias"]["via"][0]
attrs["via"] = via["station"]
attrs["via_arrival_platform"] = via["arrival"]["platform"]
attrs["via_transfer_platform"] = via["departure"]["platform"]
attrs["via_transfer_time"] = get_delay_in_minutes(
via["timeBetween"]
) + get_delay_in_minutes(via["departure"]["delay"]) | conditional_block |
actions.js | * Copyright (C) 2009-2016 SonarSource SA
* mailto:contact AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import { getDefinitions, getValues, setSettingValue, resetSettingValue } from '../../../api/settings';
import { receiveValues } from './values/actions';
import { receiveDefinitions } from './definitions/actions';
import { startLoading, stopLoading } from './settingsPage/loading/actions';
import { parseError } from '../../code/utils';
import { addGlobalErrorMessage, closeAllGlobalMessages } from '../../../store/globalMessages/duck';
import { passValidation, failValidation } from './settingsPage/validationMessages/actions';
import { cancelChange } from './settingsPage/changedValues/actions';
import { isEmptyValue } from '../utils';
import { translate } from '../../../helpers/l10n';
import { getSettingsAppDefinition, getSettingsAppChangedValue } from '../../../store/rootReducer';
export const fetchSettings = componentKey => dispatch => {
return getDefinitions(componentKey)
.then(definitions => {
const withoutLicenses = definitions.filter(definition => definition.type !== 'LICENSE');
dispatch(receiveDefinitions(withoutLicenses));
const keys = withoutLicenses.map(definition => definition.key).join();
return getValues(keys, componentKey);
})
.then(settings => {
dispatch(receiveValues(settings));
dispatch(closeAllGlobalMessages());
})
.catch(e => parseError(e).then(message => dispatch(addGlobalErrorMessage(message))));
};
export const saveValue = (key, componentKey) => (dispatch, getState) => {
dispatch(startLoading(key));
const state = getState();
const definition = getSettingsAppDefinition(state, key);
const value = getSettingsAppChangedValue(state, key);
if (isEmptyValue(definition, value)) {
dispatch(failValidation(key, translate('settings.state.value_cant_be_empty')));
dispatch(stopLoading(key));
return Promise.reject();
}
return setSettingValue(definition, value, componentKey)
.then(() => getValues(key, componentKey))
.then(values => {
dispatch(receiveValues(values));
dispatch(cancelChange(key));
dispatch(passValidation(key));
dispatch(stopLoading(key));
})
.catch(e => {
dispatch(stopLoading(key));
parseError(e).then(message => dispatch(failValidation(key, message)));
return Promise.reject();
});
};
export const resetValue = (key, componentKey) => dispatch => {
dispatch(startLoading(key));
return resetSettingValue(key, componentKey)
.then(() => getValues(key, componentKey))
.then(values => {
if (values.length > 0) {
dispatch(receiveValues(values));
} else {
dispatch(receiveValues([{ key }]));
}
dispatch(passValidation(key));
dispatch(stopLoading(key));
})
.catch(e => {
dispatch(stopLoading(key));
parseError(e).then(message => dispatch(failValidation(key, message)));
return Promise.reject();
});
}; | /*
* SonarQube | random_line_split |
|
actions.js | /*
* SonarQube
* Copyright (C) 2009-2016 SonarSource SA
* mailto:contact AT sonarsource DOT com
*
* This program is free software; you can redistribute it and/or
* modify it under the terms of the GNU Lesser General Public
* License as published by the Free Software Foundation; either
* version 3 of the License, or (at your option) any later version.
*
* This program is distributed in the hope that it will be useful,
* but WITHOUT ANY WARRANTY; without even the implied warranty of
* MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU
* Lesser General Public License for more details.
*
* You should have received a copy of the GNU Lesser General Public License
* along with this program; if not, write to the Free Software Foundation,
* Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA.
*/
import { getDefinitions, getValues, setSettingValue, resetSettingValue } from '../../../api/settings';
import { receiveValues } from './values/actions';
import { receiveDefinitions } from './definitions/actions';
import { startLoading, stopLoading } from './settingsPage/loading/actions';
import { parseError } from '../../code/utils';
import { addGlobalErrorMessage, closeAllGlobalMessages } from '../../../store/globalMessages/duck';
import { passValidation, failValidation } from './settingsPage/validationMessages/actions';
import { cancelChange } from './settingsPage/changedValues/actions';
import { isEmptyValue } from '../utils';
import { translate } from '../../../helpers/l10n';
import { getSettingsAppDefinition, getSettingsAppChangedValue } from '../../../store/rootReducer';
export const fetchSettings = componentKey => dispatch => {
return getDefinitions(componentKey)
.then(definitions => {
const withoutLicenses = definitions.filter(definition => definition.type !== 'LICENSE');
dispatch(receiveDefinitions(withoutLicenses));
const keys = withoutLicenses.map(definition => definition.key).join();
return getValues(keys, componentKey);
})
.then(settings => {
dispatch(receiveValues(settings));
dispatch(closeAllGlobalMessages());
})
.catch(e => parseError(e).then(message => dispatch(addGlobalErrorMessage(message))));
};
export const saveValue = (key, componentKey) => (dispatch, getState) => {
dispatch(startLoading(key));
const state = getState();
const definition = getSettingsAppDefinition(state, key);
const value = getSettingsAppChangedValue(state, key);
if (isEmptyValue(definition, value)) {
dispatch(failValidation(key, translate('settings.state.value_cant_be_empty')));
dispatch(stopLoading(key));
return Promise.reject();
}
return setSettingValue(definition, value, componentKey)
.then(() => getValues(key, componentKey))
.then(values => {
dispatch(receiveValues(values));
dispatch(cancelChange(key));
dispatch(passValidation(key));
dispatch(stopLoading(key));
})
.catch(e => {
dispatch(stopLoading(key));
parseError(e).then(message => dispatch(failValidation(key, message)));
return Promise.reject();
});
};
export const resetValue = (key, componentKey) => dispatch => {
dispatch(startLoading(key));
return resetSettingValue(key, componentKey)
.then(() => getValues(key, componentKey))
.then(values => {
if (values.length > 0) | else {
dispatch(receiveValues([{ key }]));
}
dispatch(passValidation(key));
dispatch(stopLoading(key));
})
.catch(e => {
dispatch(stopLoading(key));
parseError(e).then(message => dispatch(failValidation(key, message)));
return Promise.reject();
});
};
| {
dispatch(receiveValues(values));
} | conditional_block |
model.js | /**
* Copyright 2013-present NightWorld.
*
* Licensed under the Apache License, Version 2.0 (the "License");
* you may not use this file except in compliance with the License.
* You may obtain a copy of the License at
*
* http://www.apache.org/licenses/LICENSE-2.0
*
* Unless required by applicable law or agreed to in writing, software
* distributed under the License is distributed on an "AS IS" BASIS,
* WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
* See the License for the specific language governing permissions and
* limitations under the License.
*/
var dal = require('./dal.js');
model = module.exports;
var OAuthAccessTokenTable = "oauth2accesstoken";
var OAuthAuthCodeTable = "oauth2authcode";
var OAuthRefreshTokenTable = "oauth2refreshtoken";
var OAuthClientTable = "oauth2client";
var OAuthUserTable = "userid_map";
//
// oauth2-server callbacks
//
model.getAccessToken = function (bearerToken, callback) { | console.log('in getAccessToken (bearerToken: ' + bearerToken + ')');
dal.doGet(OAuthAccessTokenTable,
{"accessToken": {"S": bearerToken}}, true, function(err, data) {
if (data && data.expires) {
data.expires = new Date(data.expires * 1000);
}
callback(err, data);
});
};
model.getClient = function (clientId, clientSecret, callback) {
console.log('in getClient (clientId: ' + clientId + ', clientSecret: ' + clientSecret + ')');
dal.doGet(OAuthClientTable, { clientId: { S: clientId }}, true,
function(err, data) {
if (err || !data) return callback(err, data);
if (clientSecret !== null && data.clientSecret !== clientSecret) {
return callback();
}
callback(null, data);
});
};
// This will very much depend on your setup, I wouldn't advise doing anything exactly like this but
// it gives an example of how to use the method to restrict certain grant types
var authorizedClientIds = ['abc1', 'def2'];
model.grantTypeAllowed = function (clientId, grantType, callback) {
console.log('in grantTypeAllowed (clientId: ' + clientId + ', grantType: ' + grantType + ')');
if (grantType === 'password') {
return callback(false, authorizedClientIds.indexOf(clientId) >= 0);
}
callback(false, true);
};
model.saveAccessToken = function (accessToken, clientId, expires, user, callback) {
console.log('in saveAccessToken (accessToken: ' + accessToken + ', clientId: ' + clientId + ', userId: ' + user.id + ', expires: ' + expires + ')');
var token = {
accessToken: accessToken,
clientId: clientId,
userId: user.id
};
if (expires) token.expires = parseInt(expires / 1000, 10);
console.log('saving', token);
dal.doSet(token, OAuthAccessTokenTable, { accessToken: { S: accessToken }}, callback);
};
model.saveRefreshToken = function (refreshToken, clientId, expires, user, callback) {
console.log('in saveRefreshToken (refreshToken: ' + refreshToken + ', clientId: ' + clientId + ', userId: ' + user.id + ', expires: ' + expires + ')');
var token = {
refreshToken: refreshToken,
clientId: clientId,
userId: user.id
};
if (expires) token.expires = parseInt(expires / 1000, 10);
console.log('saving', token);
dal.doSet(token, OAuthRefreshTokenTable, { refreshToken: { S: refreshToken }}, callback);
};
model.getRefreshToken = function (bearerToken, callback) {
console.log("in getRefreshToken (bearerToken: " + bearerToken + ")");
dal.doGet(OAuthRefreshTokenTable, { refreshToken: { S: bearerToken }}, true, function(err, data) {
if (data && data.expires) {
data.expires = new Date(data.expires * 1000);
}
callback(err, data);
});
};
model.revokeRefreshToken = function(bearerToken, callback) {
console.log("in revokeRefreshToken (bearerToken: " + bearerToken + ")");
dal.doDelete(OAuthRefreshTokenTable, { refreshToken: { S: bearerToken }}, callback);
};
model.getAuthCode = function (bearerCode, callback) {
console.log("in getAuthCode (bearerCode: " + bearerCode + ")");
dal.doGet(OAuthAuthCodeTable, { authCode: { S: bearerCode }}, true, function(err, data) {
if (data && data.expires) {
data.expires = new Date(data.expires * 1000);
}
callback(err, data);
});
};
model.saveAuthCode = function (authCode, clientId, expires, user, callback) {
console.log('in saveAuthCode (authCode: ' + authCode + ', clientId: ' + clientId + ', userId: ' + user.id + ', expires: ' + expires + ')');
var code = {
authCode: authCode,
clientId: clientId,
userId: user.id
};
if (expires) code.expires = parseInt(expires / 1000, 10);
console.log("saving", code);
dal.doSet(code, OAuthAuthCodeTable, { authCode: { S: authCode }}, callback);
};
/*
* Required to support password grant type
*/
model.getUser = function (username, password, callback) {
console.log('in getUser (username: ' + username + ', password: ' + password + ')');
dal.doGet(OAuthUserTable, { id: { S: "email:" + username}}, true, function(err, data) {
if (err) return callback(err);
callback(null, { id: data.userId });
});
}; | random_line_split |
|
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn | <'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
{
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
}
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a>
where T::Item : Clone
{
let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {}
| ok1 | identifier_name |
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn ok1<'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
|
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a>
where T::Item : Clone
{
let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {}
| {
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
} | identifier_body |
regions-close-associated-type-into-object.rs | // Copyright 2014 The Rust Project Developers. See the COPYRIGHT
// file at the top-level directory of this distribution and at
// http://rust-lang.org/COPYRIGHT.
//
// Licensed under the Apache License, Version 2.0 <LICENSE-APACHE or
// http://www.apache.org/licenses/LICENSE-2.0> or the MIT license
// <LICENSE-MIT or http://opensource.org/licenses/MIT>, at your
// option. This file may not be copied, modified, or distributed
// except according to those terms.
#![feature(box_syntax)]
// FIXME (#22405): Replace `Box::new` with `box` here when/if possible.
trait X {}
trait Iter {
type Item: X;
fn into_item(self) -> Self::Item;
fn as_item(&self) -> &Self::Item;
}
fn bad1<T: Iter>(v: T) -> Box<X+'static>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad2<T: Iter>(v: T) -> Box<X+'static>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad3<'a, T: Iter>(v: T) -> Box<X+'a>
{
let item = v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn bad4<'a, T: Iter>(v: T) -> Box<X+'a>
where Box<T::Item> : X
{
let item: Box<_> = box v.into_item();
Box::new(item) //~ ERROR associated type `<T as Iter>::Item` may not live long enough
}
fn ok1<'a, T: Iter>(v: T) -> Box<X+'a>
where T::Item : 'a
{
let item = v.into_item();
Box::new(item) // OK, T::Item : 'a is declared
}
fn ok2<'a, T: Iter>(v: &T, w: &'a T::Item) -> Box<X+'a> | let item = Clone::clone(w);
Box::new(item) // OK, T::Item : 'a is implied
}
fn ok3<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone + 'a
{
let item = Clone::clone(v.as_item());
Box::new(item) // OK, T::Item : 'a was declared
}
fn meh1<'a, T: Iter>(v: &'a T) -> Box<X+'a>
where T::Item : Clone
{
// This case is kind of interesting. It's the same as `ok3` but
// without the explicit declaration. This is valid because `T: 'a
// => T::Item: 'a`, and the former we can deduce from our argument
// of type `&'a T`.
let item = Clone::clone(v.as_item());
Box::new(item)
}
fn main() {} | where T::Item : Clone
{ | random_line_split |
object_proxy.js | /**
@module ember
@submodule ember-runtime
*/
import Ember from "ember-metal/core"; // Ember.assert
import {get} from "ember-metal/property_get";
import {set} from "ember-metal/property_set";
import {meta} from "ember-metal/utils";
import {addObserver, removeObserver, addBeforeObserver, removeBeforeObserver} from "ember-metal/observer";
import {propertyWillChange, propertyDidChange} from "ember-metal/property_events";
import {computed} from "ember-metal/computed";
import {defineProperty} from "ember-metal/properties";
import {observer} from "ember-metal/mixin";
import EmberStringUtils from "ember-runtime/system/string";
import EmberObject from "ember-runtime/system/object";
function contentPropertyWillChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyWillChange(this, key);
}
function contentPropertyDidChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyDidChange(this, key);
}
/**
`Ember.ObjectProxy` forwards all properties not defined by the proxy itself
to a proxied `content` object.
```javascript
object = Ember.Object.create({
name: 'Foo'
});
proxy = Ember.ObjectProxy.create({
content: object
});
// Access and change existing properties
proxy.get('name') // 'Foo'
proxy.set('name', 'Bar');
object.get('name') // 'Bar'
// Create new 'description' property on `object`
proxy.set('description', 'Foo is a whizboo baz');
object.get('description') // 'Foo is a whizboo baz'
```
While `content` is unset, setting a property to be delegated will throw an
Error.
```javascript
proxy = Ember.ObjectProxy.create({
content: null,
flag: null
});
proxy.set('flag', true);
proxy.get('flag'); // true
proxy.get('foo'); // undefined
proxy.set('foo', 'data'); // throws Error
```
Delegated properties can be bound to and will change when content is updated.
Computed properties on the proxy itself can depend on delegated properties.
```javascript
ProxyWithComputedProperty = Ember.ObjectProxy.extend({
fullName: function () {
var firstName = this.get('firstName'),
lastName = this.get('lastName');
if (firstName && lastName) {
return firstName + ' ' + lastName;
}
return firstName || lastName;
}.property('firstName', 'lastName')
});
proxy = ProxyWithComputedProperty.create();
|
proxy.get('fullName'); // 'Tom Dale'
```
@class ObjectProxy
@namespace Ember
@extends Ember.Object
*/
var ObjectProxy = EmberObject.extend({
/**
The object whose properties will be forwarded.
@property content
@type Ember.Object
@default null
*/
content: null,
_contentDidChange: observer('content', function() {
Ember.assert("Can't set ObjectProxy's content to itself", get(this, 'content') !== this);
}),
isTruthy: computed.bool('content'),
_debugContainerKey: null,
willWatchProperty: function (key) {
var contentKey = 'content.' + key;
addBeforeObserver(this, contentKey, null, contentPropertyWillChange);
addObserver(this, contentKey, null, contentPropertyDidChange);
},
didUnwatchProperty: function (key) {
var contentKey = 'content.' + key;
removeBeforeObserver(this, contentKey, null, contentPropertyWillChange);
removeObserver(this, contentKey, null, contentPropertyDidChange);
},
unknownProperty: function (key) {
var content = get(this, 'content');
if (content) {
return get(content, key);
}
},
setUnknownProperty: function (key, value) {
var m = meta(this);
if (m.proto === this) {
// if marked as prototype then just defineProperty
// rather than delegate
defineProperty(this, key, null, value);
return value;
}
var content = get(this, 'content');
Ember.assert(EmberStringUtils.fmt("Cannot delegate set('%@', %@) to the 'content' property of object proxy %@: its 'content' is undefined.", [key, value, this]), content);
return set(content, key, value);
}
});
export default ObjectProxy; | proxy.get('fullName'); // undefined
proxy.set('content', {
firstName: 'Tom', lastName: 'Dale'
}); // triggers property change for fullName on proxy | random_line_split |
object_proxy.js | /**
@module ember
@submodule ember-runtime
*/
import Ember from "ember-metal/core"; // Ember.assert
import {get} from "ember-metal/property_get";
import {set} from "ember-metal/property_set";
import {meta} from "ember-metal/utils";
import {addObserver, removeObserver, addBeforeObserver, removeBeforeObserver} from "ember-metal/observer";
import {propertyWillChange, propertyDidChange} from "ember-metal/property_events";
import {computed} from "ember-metal/computed";
import {defineProperty} from "ember-metal/properties";
import {observer} from "ember-metal/mixin";
import EmberStringUtils from "ember-runtime/system/string";
import EmberObject from "ember-runtime/system/object";
function contentPropertyWillChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyWillChange(this, key);
}
function contentPropertyDidChange(content, contentKey) |
/**
`Ember.ObjectProxy` forwards all properties not defined by the proxy itself
to a proxied `content` object.
```javascript
object = Ember.Object.create({
name: 'Foo'
});
proxy = Ember.ObjectProxy.create({
content: object
});
// Access and change existing properties
proxy.get('name') // 'Foo'
proxy.set('name', 'Bar');
object.get('name') // 'Bar'
// Create new 'description' property on `object`
proxy.set('description', 'Foo is a whizboo baz');
object.get('description') // 'Foo is a whizboo baz'
```
While `content` is unset, setting a property to be delegated will throw an
Error.
```javascript
proxy = Ember.ObjectProxy.create({
content: null,
flag: null
});
proxy.set('flag', true);
proxy.get('flag'); // true
proxy.get('foo'); // undefined
proxy.set('foo', 'data'); // throws Error
```
Delegated properties can be bound to and will change when content is updated.
Computed properties on the proxy itself can depend on delegated properties.
```javascript
ProxyWithComputedProperty = Ember.ObjectProxy.extend({
fullName: function () {
var firstName = this.get('firstName'),
lastName = this.get('lastName');
if (firstName && lastName) {
return firstName + ' ' + lastName;
}
return firstName || lastName;
}.property('firstName', 'lastName')
});
proxy = ProxyWithComputedProperty.create();
proxy.get('fullName'); // undefined
proxy.set('content', {
firstName: 'Tom', lastName: 'Dale'
}); // triggers property change for fullName on proxy
proxy.get('fullName'); // 'Tom Dale'
```
@class ObjectProxy
@namespace Ember
@extends Ember.Object
*/
var ObjectProxy = EmberObject.extend({
/**
The object whose properties will be forwarded.
@property content
@type Ember.Object
@default null
*/
content: null,
_contentDidChange: observer('content', function() {
Ember.assert("Can't set ObjectProxy's content to itself", get(this, 'content') !== this);
}),
isTruthy: computed.bool('content'),
_debugContainerKey: null,
willWatchProperty: function (key) {
var contentKey = 'content.' + key;
addBeforeObserver(this, contentKey, null, contentPropertyWillChange);
addObserver(this, contentKey, null, contentPropertyDidChange);
},
didUnwatchProperty: function (key) {
var contentKey = 'content.' + key;
removeBeforeObserver(this, contentKey, null, contentPropertyWillChange);
removeObserver(this, contentKey, null, contentPropertyDidChange);
},
unknownProperty: function (key) {
var content = get(this, 'content');
if (content) {
return get(content, key);
}
},
setUnknownProperty: function (key, value) {
var m = meta(this);
if (m.proto === this) {
// if marked as prototype then just defineProperty
// rather than delegate
defineProperty(this, key, null, value);
return value;
}
var content = get(this, 'content');
Ember.assert(EmberStringUtils.fmt("Cannot delegate set('%@', %@) to the 'content' property of object proxy %@: its 'content' is undefined.", [key, value, this]), content);
return set(content, key, value);
}
});
export default ObjectProxy;
| {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyDidChange(this, key);
} | identifier_body |
object_proxy.js | /**
@module ember
@submodule ember-runtime
*/
import Ember from "ember-metal/core"; // Ember.assert
import {get} from "ember-metal/property_get";
import {set} from "ember-metal/property_set";
import {meta} from "ember-metal/utils";
import {addObserver, removeObserver, addBeforeObserver, removeBeforeObserver} from "ember-metal/observer";
import {propertyWillChange, propertyDidChange} from "ember-metal/property_events";
import {computed} from "ember-metal/computed";
import {defineProperty} from "ember-metal/properties";
import {observer} from "ember-metal/mixin";
import EmberStringUtils from "ember-runtime/system/string";
import EmberObject from "ember-runtime/system/object";
function | (content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyWillChange(this, key);
}
function contentPropertyDidChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyDidChange(this, key);
}
/**
`Ember.ObjectProxy` forwards all properties not defined by the proxy itself
to a proxied `content` object.
```javascript
object = Ember.Object.create({
name: 'Foo'
});
proxy = Ember.ObjectProxy.create({
content: object
});
// Access and change existing properties
proxy.get('name') // 'Foo'
proxy.set('name', 'Bar');
object.get('name') // 'Bar'
// Create new 'description' property on `object`
proxy.set('description', 'Foo is a whizboo baz');
object.get('description') // 'Foo is a whizboo baz'
```
While `content` is unset, setting a property to be delegated will throw an
Error.
```javascript
proxy = Ember.ObjectProxy.create({
content: null,
flag: null
});
proxy.set('flag', true);
proxy.get('flag'); // true
proxy.get('foo'); // undefined
proxy.set('foo', 'data'); // throws Error
```
Delegated properties can be bound to and will change when content is updated.
Computed properties on the proxy itself can depend on delegated properties.
```javascript
ProxyWithComputedProperty = Ember.ObjectProxy.extend({
fullName: function () {
var firstName = this.get('firstName'),
lastName = this.get('lastName');
if (firstName && lastName) {
return firstName + ' ' + lastName;
}
return firstName || lastName;
}.property('firstName', 'lastName')
});
proxy = ProxyWithComputedProperty.create();
proxy.get('fullName'); // undefined
proxy.set('content', {
firstName: 'Tom', lastName: 'Dale'
}); // triggers property change for fullName on proxy
proxy.get('fullName'); // 'Tom Dale'
```
@class ObjectProxy
@namespace Ember
@extends Ember.Object
*/
var ObjectProxy = EmberObject.extend({
/**
The object whose properties will be forwarded.
@property content
@type Ember.Object
@default null
*/
content: null,
_contentDidChange: observer('content', function() {
Ember.assert("Can't set ObjectProxy's content to itself", get(this, 'content') !== this);
}),
isTruthy: computed.bool('content'),
_debugContainerKey: null,
willWatchProperty: function (key) {
var contentKey = 'content.' + key;
addBeforeObserver(this, contentKey, null, contentPropertyWillChange);
addObserver(this, contentKey, null, contentPropertyDidChange);
},
didUnwatchProperty: function (key) {
var contentKey = 'content.' + key;
removeBeforeObserver(this, contentKey, null, contentPropertyWillChange);
removeObserver(this, contentKey, null, contentPropertyDidChange);
},
unknownProperty: function (key) {
var content = get(this, 'content');
if (content) {
return get(content, key);
}
},
setUnknownProperty: function (key, value) {
var m = meta(this);
if (m.proto === this) {
// if marked as prototype then just defineProperty
// rather than delegate
defineProperty(this, key, null, value);
return value;
}
var content = get(this, 'content');
Ember.assert(EmberStringUtils.fmt("Cannot delegate set('%@', %@) to the 'content' property of object proxy %@: its 'content' is undefined.", [key, value, this]), content);
return set(content, key, value);
}
});
export default ObjectProxy;
| contentPropertyWillChange | identifier_name |
object_proxy.js | /**
@module ember
@submodule ember-runtime
*/
import Ember from "ember-metal/core"; // Ember.assert
import {get} from "ember-metal/property_get";
import {set} from "ember-metal/property_set";
import {meta} from "ember-metal/utils";
import {addObserver, removeObserver, addBeforeObserver, removeBeforeObserver} from "ember-metal/observer";
import {propertyWillChange, propertyDidChange} from "ember-metal/property_events";
import {computed} from "ember-metal/computed";
import {defineProperty} from "ember-metal/properties";
import {observer} from "ember-metal/mixin";
import EmberStringUtils from "ember-runtime/system/string";
import EmberObject from "ember-runtime/system/object";
function contentPropertyWillChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyWillChange(this, key);
}
function contentPropertyDidChange(content, contentKey) {
var key = contentKey.slice(8); // remove "content."
if (key in this) { return; } // if shadowed in proxy
propertyDidChange(this, key);
}
/**
`Ember.ObjectProxy` forwards all properties not defined by the proxy itself
to a proxied `content` object.
```javascript
object = Ember.Object.create({
name: 'Foo'
});
proxy = Ember.ObjectProxy.create({
content: object
});
// Access and change existing properties
proxy.get('name') // 'Foo'
proxy.set('name', 'Bar');
object.get('name') // 'Bar'
// Create new 'description' property on `object`
proxy.set('description', 'Foo is a whizboo baz');
object.get('description') // 'Foo is a whizboo baz'
```
While `content` is unset, setting a property to be delegated will throw an
Error.
```javascript
proxy = Ember.ObjectProxy.create({
content: null,
flag: null
});
proxy.set('flag', true);
proxy.get('flag'); // true
proxy.get('foo'); // undefined
proxy.set('foo', 'data'); // throws Error
```
Delegated properties can be bound to and will change when content is updated.
Computed properties on the proxy itself can depend on delegated properties.
```javascript
ProxyWithComputedProperty = Ember.ObjectProxy.extend({
fullName: function () {
var firstName = this.get('firstName'),
lastName = this.get('lastName');
if (firstName && lastName) {
return firstName + ' ' + lastName;
}
return firstName || lastName;
}.property('firstName', 'lastName')
});
proxy = ProxyWithComputedProperty.create();
proxy.get('fullName'); // undefined
proxy.set('content', {
firstName: 'Tom', lastName: 'Dale'
}); // triggers property change for fullName on proxy
proxy.get('fullName'); // 'Tom Dale'
```
@class ObjectProxy
@namespace Ember
@extends Ember.Object
*/
var ObjectProxy = EmberObject.extend({
/**
The object whose properties will be forwarded.
@property content
@type Ember.Object
@default null
*/
content: null,
_contentDidChange: observer('content', function() {
Ember.assert("Can't set ObjectProxy's content to itself", get(this, 'content') !== this);
}),
isTruthy: computed.bool('content'),
_debugContainerKey: null,
willWatchProperty: function (key) {
var contentKey = 'content.' + key;
addBeforeObserver(this, contentKey, null, contentPropertyWillChange);
addObserver(this, contentKey, null, contentPropertyDidChange);
},
didUnwatchProperty: function (key) {
var contentKey = 'content.' + key;
removeBeforeObserver(this, contentKey, null, contentPropertyWillChange);
removeObserver(this, contentKey, null, contentPropertyDidChange);
},
unknownProperty: function (key) {
var content = get(this, 'content');
if (content) {
return get(content, key);
}
},
setUnknownProperty: function (key, value) {
var m = meta(this);
if (m.proto === this) |
var content = get(this, 'content');
Ember.assert(EmberStringUtils.fmt("Cannot delegate set('%@', %@) to the 'content' property of object proxy %@: its 'content' is undefined.", [key, value, this]), content);
return set(content, key, value);
}
});
export default ObjectProxy;
| {
// if marked as prototype then just defineProperty
// rather than delegate
defineProperty(this, key, null, value);
return value;
} | conditional_block |
env.py | from __future__ import with_statement
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
| run_migrations_online() | conditional_block |
|
env.py | from __future__ import with_statement
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides | # This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online() | # access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging. | random_line_split |
env.py | from __future__ import with_statement
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
"""Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations()
def | ():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| run_migrations_online | identifier_name |
env.py | from __future__ import with_statement
from logging.config import fileConfig
from alembic import context
from sqlalchemy import engine_from_config, pool
# this is the Alembic Config object, which provides
# access to the values within the .ini file in use.
config = context.config
# Interpret the config file for Python logging.
# This line sets up loggers basically.
fileConfig(config.config_file_name)
# add your model's MetaData object here
# for 'autogenerate' support
# from myapp import mymodel
# target_metadata = mymodel.Base.metadata
target_metadata = None
# other values from the config, defined by the needs of env.py,
# can be acquired:
# my_important_option = config.get_main_option("my_important_option")
# ... etc.
def run_migrations_offline():
|
def run_migrations_online():
"""Run migrations in 'online' mode.
In this scenario we need to create an Engine
and associate a connection with the context.
"""
connectable = engine_from_config(
config.get_section(config.config_ini_section),
prefix='sqlalchemy.',
poolclass=pool.NullPool)
with connectable.connect() as connection:
context.configure(
connection=connection,
target_metadata=target_metadata
)
with context.begin_transaction():
context.run_migrations()
if context.is_offline_mode():
run_migrations_offline()
else:
run_migrations_online()
| """Run migrations in 'offline' mode.
This configures the context with just a URL
and not an Engine, though an Engine is acceptable
here as well. By skipping the Engine creation
we don't even need a DBAPI to be available.
Calls to context.execute() here emit the given string to the
script output.
"""
url = config.get_main_option("sqlalchemy.url")
context.configure(
url=url, target_metadata=target_metadata, literal_binds=True)
with context.begin_transaction():
context.run_migrations() | identifier_body |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
|
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
}
| {
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
} | identifier_body |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn | (self) -> Self::Abort {}
}
| abort | identifier_name |
builder.rs | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer};
use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N |
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
}
| {
self.indices.extend(keys.iter());
Ok(())
} | conditional_block |
builder.rs | use crate::builder::{FacetBuilder, MeshBuilder, SurfaceBuilder};
use crate::constant::{Constant, ToType, TypeOf};
use crate::geometry::{FromGeometry, IntoGeometry};
use crate::index::{Flat, Grouping, IndexBuffer};
use crate::primitive::Topological;
use crate::transact::{ClosedInput, Transact};
use crate::Arity;
// TODO: It should not be possible to manufacture keys without placing
// additional constraints on the type bounds of `FacetBuilder` (for
// example, `FacetBuilder<Key = usize>`). Is it important to check for
// out-of-bounds indices in `insert_facet`?
pub type VertexKey<R> = <Vec<<R as Grouping>::Group> as IndexBuffer<R>>::Index;
pub struct BufferBuilder<R, G>
where
R: Grouping,
{
indices: Vec<R::Group>,
vertices: Vec<G>,
}
impl<R, G> Default for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
fn default() -> Self {
BufferBuilder {
indices: Default::default(),
vertices: Default::default(),
}
}
}
impl<R, G> ClosedInput for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Input = ();
}
impl<K, G, const N: usize> FacetBuilder<K> for BufferBuilder<Flat<K, N>, G>
where
Constant<N>: ToType,
TypeOf<N>: NonZero,
K: Copy + Hash + Integer + Unsigned,
Vec<K>: IndexBuffer<Flat<K, N>>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[K]>,
{
let keys = keys.as_ref();
if keys.len() == N {
self.indices.extend(keys.iter());
Ok(())
}
else {
// TODO: These numbers do not necessarily represent arity (i.e., the
// number of edges of each topological structure). Use a
// different error variant to express this.
Err(BufferError::ArityConflict {
expected: N,
actual: keys.len(),
})
}
}
}
impl<P, G> FacetBuilder<P::Vertex> for BufferBuilder<P, G>
where
P: Grouping<Group = P> + Topological,
P::Vertex: Copy + Hash + Integer + Unsigned,
Vec<P>: IndexBuffer<P>,
{
type Facet = ();
type Key = ();
fn insert_facet<T, U>(&mut self, keys: T, _: U) -> Result<Self::Key, Self::Error>
where
Self::Facet: FromGeometry<U>,
T: AsRef<[P::Vertex]>,
{
let arity = keys.as_ref().len();
P::try_from_slice(keys)
.ok_or(BufferError::ArityConflict {
expected: P::ARITY.into_interval().0,
actual: arity,
})
.map(|polygon| self.indices.push(polygon))
}
}
impl<R, G> MeshBuilder for BufferBuilder<R, G>
where
Self: SurfaceBuilder<Vertex = G, Facet = ()>,
R: Grouping,
VertexKey<R>: Hash,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Vertex = G;
type Facet = ();
fn surface_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
}
impl<R, G> SurfaceBuilder for BufferBuilder<R, G>
where
Self: FacetBuilder<VertexKey<R>, Facet = ()>,
Self::Error: From<BufferError>, // TODO: Why is this necessary?
R: Grouping,
VertexKey<R>: Hash + NumCast,
Vec<R::Group>: IndexBuffer<R>,
{
type Builder = Self;
type Key = VertexKey<R>;
type Vertex = G;
type Facet = ();
fn facets_with<F, T, E>(&mut self, f: F) -> Result<T, Self::Error>
where
Self::Error: From<E>,
F: FnOnce(&mut Self::Builder) -> Result<T, E>,
{
f(self).map_err(|error| error.into())
}
fn insert_vertex<T>(&mut self, data: T) -> Result<Self::Key, Self::Error>
where
Self::Vertex: FromGeometry<T>,
{
let key = <VertexKey<R> as NumCast>::from(self.vertices.len())
.ok_or(BufferError::IndexOverflow)?;
self.vertices.push(data.into_geometry());
Ok(key)
}
}
impl<R, G> Transact<<Self as ClosedInput>::Input> for BufferBuilder<R, G>
where
R: Grouping,
Vec<R::Group>: IndexBuffer<R>,
{
type Commit = MeshBuffer<R, G>;
type Abort = ();
type Error = BufferError;
fn commit(self) -> Result<Self::Commit, (Self::Abort, Self::Error)> {
let BufferBuilder { indices, vertices } = self;
Ok(MeshBuffer::from_raw_buffers_unchecked(indices, vertices))
}
fn abort(self) -> Self::Abort {}
} | use num::{Integer, NumCast, Unsigned};
use std::hash::Hash;
use typenum::NonZero;
use crate::buffer::{BufferError, MeshBuffer}; | random_line_split |
|
HealPetPet.py | from neolib.plots.Step import Step
from neolib.NST import NST
import time
class HealPetPet(Step):
_paths = {
'links': '//*[@id="content"]/table/tr/td[2]//a/@href',
'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src',
'cert': '//area/@href',
}
_HEALS = {
'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0,
'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1,
'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2,
'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3,
}
def __init__(self, usr):
super().__init__(usr, '', '', False)
# Setup link
self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1',
'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s']
# Setup checks
self._checks = ['']
def execute(self, last_pg=None):
# Heal the PetPet 10 times to get the certificate
check = ''
for i in range(0, 11):
if check:
pg = self._usr.get_page(check)
else:
|
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
links = self._xpath('links', pg)
action = self._HEALS[self._xpath('img', pg)[0]]
url = self._base_url + links[action]
print('URL: ' + url)
pg = self._usr.get_page(url)
links = self._xpath('links', pg)
check = self._base_url + links[4]
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
# Wait till the next minute to check on the petpet
wait = (60 - NST.sec) + 1
print('Waiting ' + str(wait) + ' seconds')
time.sleep(wait)
| pg = self._usr.get_page(self.link[0]) | conditional_block |
HealPetPet.py | from neolib.plots.Step import Step
from neolib.NST import NST
import time
class HealPetPet(Step):
_paths = {
'links': '//*[@id="content"]/table/tr/td[2]//a/@href',
'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src',
'cert': '//area/@href',
}
_HEALS = {
'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0,
'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1,
'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2,
'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3,
}
def __init__(self, usr):
super().__init__(usr, '', '', False)
# Setup link
self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1',
'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s']
# Setup checks
self._checks = ['']
def execute(self, last_pg=None):
# Heal the PetPet 10 times to get the certificate
| print('Saved page')
exit()
links = self._xpath('links', pg)
action = self._HEALS[self._xpath('img', pg)[0]]
url = self._base_url + links[action]
print('URL: ' + url)
pg = self._usr.get_page(url)
links = self._xpath('links', pg)
check = self._base_url + links[4]
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
# Wait till the next minute to check on the petpet
wait = (60 - NST.sec) + 1
print('Waiting ' + str(wait) + ' seconds')
time.sleep(wait)
| check = ''
for i in range(0, 11):
if check:
pg = self._usr.get_page(check)
else:
pg = self._usr.get_page(self.link[0])
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
| identifier_body |
HealPetPet.py | from neolib.plots.Step import Step
from neolib.NST import NST
import time
class HealPetPet(Step):
_paths = {
'links': '//*[@id="content"]/table/tr/td[2]//a/@href',
'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src',
'cert': '//area/@href',
}
_HEALS = {
'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0,
'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1,
'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2,
'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3,
}
def __init__(self, usr):
super().__init__(usr, '', '', False)
# Setup link
self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1',
'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s']
# Setup checks
self._checks = ['']
def | (self, last_pg=None):
# Heal the PetPet 10 times to get the certificate
check = ''
for i in range(0, 11):
if check:
pg = self._usr.get_page(check)
else:
pg = self._usr.get_page(self.link[0])
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
links = self._xpath('links', pg)
action = self._HEALS[self._xpath('img', pg)[0]]
url = self._base_url + links[action]
print('URL: ' + url)
pg = self._usr.get_page(url)
links = self._xpath('links', pg)
check = self._base_url + links[4]
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
# Wait till the next minute to check on the petpet
wait = (60 - NST.sec) + 1
print('Waiting ' + str(wait) + ' seconds')
time.sleep(wait)
| execute | identifier_name |
HealPetPet.py | from neolib.plots.Step import Step
from neolib.NST import NST
import time
class HealPetPet(Step):
_paths = {
'links': '//*[@id="content"]/table/tr/td[2]//a/@href',
'img': '//*[@id="content"]/table/tr/td[2]/div/img/@src',
'cert': '//area/@href',
}
_HEALS = {
'http://images.neopets.com/altador/misc/petpet_act_b_ffabe6bc57.gif': 0,
'http://images.neopets.com/altador/misc/petpet_act_a_2a605ae262.gif': 1, | 'http://images.neopets.com/altador/misc/petpet_act_d_42b934a33b.gif': 3,
}
def __init__(self, usr):
super().__init__(usr, '', '', False)
# Setup link
self.link = ['http://www.neopets.com/altador/petpet.phtml?ppheal=1',
'http://www.neopets.com/altador/petpet.phtml?ppheal=1&sthv=%s']
# Setup checks
self._checks = ['']
def execute(self, last_pg=None):
# Heal the PetPet 10 times to get the certificate
check = ''
for i in range(0, 11):
if check:
pg = self._usr.get_page(check)
else:
pg = self._usr.get_page(self.link[0])
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
links = self._xpath('links', pg)
action = self._HEALS[self._xpath('img', pg)[0]]
url = self._base_url + links[action]
print('URL: ' + url)
pg = self._usr.get_page(url)
links = self._xpath('links', pg)
check = self._base_url + links[4]
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
if len(self._xpath('cert', pg)) > 0:
print('Found certificate!')
url = self._base_url + self._xpath('cert', pg)[0]
pg = self._usr.get_page(url)
f = open('test.html', 'w', encoding='utf-8')
f.write(pg.content)
f.close()
print('Saved page')
exit()
# Wait till the next minute to check on the petpet
wait = (60 - NST.sec) + 1
print('Waiting ' + str(wait) + ' seconds')
time.sleep(wait) | 'http://images.neopets.com/altador/misc/petpet_act_c_5f4438778c.gif': 2, | random_line_split |
hanna.persona.ts | import { HttpErrorResponse } from '@angular/common/http';
import { Injector } from '@angular/core';
import { Scope } from '@dev/translatr-model';
import {
AccessTokenService,
errorMessage,
KeyService,
LocaleService,
MessageService,
ProjectService,
UserService
} from '@dev/translatr-sdk';
import { pickRandomly } from '@translatr/utils';
import { Observable, of } from 'rxjs';
import { catchError, concatMap, filter, map } from 'rxjs/operators';
import { chooseAccessToken } from '../access-token';
import { LoadGeneratorConfig } from '../load-generator-config';
import { selectRandomProjectAccessToken } from '../project';
import { WeightedPersona } from '../weighted-persona';
import { Persona } from './persona';
import { personas } from './personas';
const info: WeightedPersona = {
section: 'key',
type: 'update',
name: 'Hanna',
description: "I'm going to update a key of a random project of mine.",
weight: 10
};
const suffix = '.updated';
export class | extends Persona {
private readonly accessTokenService: AccessTokenService;
private readonly userService: UserService;
private readonly projectService: ProjectService;
private readonly localeService: LocaleService;
private readonly keyService: KeyService;
private readonly messageService: MessageService;
constructor(config: LoadGeneratorConfig, injector: Injector) {
super(info.name, config, injector);
this.accessTokenService = injector.get(AccessTokenService);
this.userService = injector.get(UserService);
this.projectService = injector.get(ProjectService);
this.localeService = injector.get(LocaleService);
this.keyService = injector.get(KeyService);
this.messageService = injector.get(MessageService);
}
execute(): Observable<string> {
return selectRandomProjectAccessToken(
this.accessTokenService,
this.userService,
this.projectService,
this.localeService,
this.keyService,
this.messageService
).pipe(
concatMap(({ accessToken, project }) =>
this.keyService
.find({
projectId: project.id,
access_token: chooseAccessToken(
accessToken,
this.config.accessToken,
Scope.ProjectRead,
Scope.KeyRead
)
})
.pipe(map(paged => ({ accessToken, project, key: pickRandomly(paged.list) })))
),
filter(({ key }) => Boolean(key)),
concatMap(({ accessToken, project, key }) =>
this.keyService
.update(
{
...key,
name: key.name.endsWith(suffix)
? key.name.replace(suffix + '$', '')
: key.name + suffix
},
{
params: {
access_token: chooseAccessToken(
accessToken,
this.config.accessToken,
Scope.ProjectRead,
Scope.KeyWrite
)
}
}
)
.pipe(map(k => ({ project, key: k })))
),
map(
({ project, key }) =>
`key ${key.name} of project ${project.ownerUsername}/${project.name} updated`
),
catchError((err: HttpErrorResponse) => of(errorMessage(err)))
);
}
}
personas.push({
...info,
create: (config: LoadGeneratorConfig, injector: Injector) => new HannaPersona(config, injector)
});
| HannaPersona | identifier_name |
hanna.persona.ts | import { HttpErrorResponse } from '@angular/common/http';
import { Injector } from '@angular/core';
import { Scope } from '@dev/translatr-model'; | KeyService,
LocaleService,
MessageService,
ProjectService,
UserService
} from '@dev/translatr-sdk';
import { pickRandomly } from '@translatr/utils';
import { Observable, of } from 'rxjs';
import { catchError, concatMap, filter, map } from 'rxjs/operators';
import { chooseAccessToken } from '../access-token';
import { LoadGeneratorConfig } from '../load-generator-config';
import { selectRandomProjectAccessToken } from '../project';
import { WeightedPersona } from '../weighted-persona';
import { Persona } from './persona';
import { personas } from './personas';
const info: WeightedPersona = {
section: 'key',
type: 'update',
name: 'Hanna',
description: "I'm going to update a key of a random project of mine.",
weight: 10
};
const suffix = '.updated';
export class HannaPersona extends Persona {
private readonly accessTokenService: AccessTokenService;
private readonly userService: UserService;
private readonly projectService: ProjectService;
private readonly localeService: LocaleService;
private readonly keyService: KeyService;
private readonly messageService: MessageService;
constructor(config: LoadGeneratorConfig, injector: Injector) {
super(info.name, config, injector);
this.accessTokenService = injector.get(AccessTokenService);
this.userService = injector.get(UserService);
this.projectService = injector.get(ProjectService);
this.localeService = injector.get(LocaleService);
this.keyService = injector.get(KeyService);
this.messageService = injector.get(MessageService);
}
execute(): Observable<string> {
return selectRandomProjectAccessToken(
this.accessTokenService,
this.userService,
this.projectService,
this.localeService,
this.keyService,
this.messageService
).pipe(
concatMap(({ accessToken, project }) =>
this.keyService
.find({
projectId: project.id,
access_token: chooseAccessToken(
accessToken,
this.config.accessToken,
Scope.ProjectRead,
Scope.KeyRead
)
})
.pipe(map(paged => ({ accessToken, project, key: pickRandomly(paged.list) })))
),
filter(({ key }) => Boolean(key)),
concatMap(({ accessToken, project, key }) =>
this.keyService
.update(
{
...key,
name: key.name.endsWith(suffix)
? key.name.replace(suffix + '$', '')
: key.name + suffix
},
{
params: {
access_token: chooseAccessToken(
accessToken,
this.config.accessToken,
Scope.ProjectRead,
Scope.KeyWrite
)
}
}
)
.pipe(map(k => ({ project, key: k })))
),
map(
({ project, key }) =>
`key ${key.name} of project ${project.ownerUsername}/${project.name} updated`
),
catchError((err: HttpErrorResponse) => of(errorMessage(err)))
);
}
}
personas.push({
...info,
create: (config: LoadGeneratorConfig, injector: Injector) => new HannaPersona(config, injector)
}); | import {
AccessTokenService,
errorMessage, | random_line_split |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn | <F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len != 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
}
| create_helper | identifier_name |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len != 0 {
crate::NUM_RETRIES
} else | ;
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
}
| {
1
} | conditional_block |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
{
let num_retries = if random_len != 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res, | io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base)
} | };
}
Err(io::Error::new( | random_line_split |
util.rs | use fastrand;
use std::ffi::{OsStr, OsString};
use std::path::{Path, PathBuf};
use std::{io, iter::repeat_with};
use crate::error::IoResultExt;
fn tmpname(prefix: &OsStr, suffix: &OsStr, rand_len: usize) -> OsString {
let mut buf = OsString::with_capacity(prefix.len() + suffix.len() + rand_len);
buf.push(prefix);
let mut char_buf = [0u8; 4];
for c in repeat_with(fastrand::alphanumeric).take(rand_len) {
buf.push(c.encode_utf8(&mut char_buf));
}
buf.push(suffix);
buf
}
pub fn create_helper<F, R>(
base: &Path,
prefix: &OsStr,
suffix: &OsStr,
random_len: usize,
f: F,
) -> io::Result<R>
where
F: Fn(PathBuf) -> io::Result<R>,
| }
| {
let num_retries = if random_len != 0 {
crate::NUM_RETRIES
} else {
1
};
for _ in 0..num_retries {
let path = base.join(tmpname(prefix, suffix, random_len));
return match f(path) {
Err(ref e) if e.kind() == io::ErrorKind::AlreadyExists => continue,
res => res,
};
}
Err(io::Error::new(
io::ErrorKind::AlreadyExists,
"too many temporary files exist",
))
.with_err_path(|| base) | identifier_body |
378_kth_smallest_element_in_a_sorted_matrix.py | # 378. Kth Smallest Element in a Sorted Matrix
#
# Given a n x n matrix where each of the rows and columns are sorted in ascending order, find the kth smallest element in the matrix.
#
# Note that it is the kth smallest element in the sorted order, not the kth distinct element.
#
# Example:
#
# matrix = [
# [ 1, 5, 9],
# [10, 11, 13],
# [12, 13, 15]
# ],
# k = 8,
#
# return 13.
#
# Note:
# You may assume k is always valid, 1 <= k <= n2.
# https://nb4799.neu.edu/wordpress/?p=2017
from heapq import *
class Solution:
def | (self, matrix, k):
"""
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
# heapq.merge: Merge multiple sorted inputs into a single sorted output
# (for example, merge timestamped entries from multiple log files).
# Returns an iterator over the sorted values.
return list(merge(*matrix))[k-1]
# Maintain a min-heap with k element, initialized by the elements of the first row.
# Since it is a min-heap, and note the property that rows and columns are already sorted in ascending order,
# the heap root after popping k-1 times is the k-th smallest element of the whole matrix.
# When popping the heap, we also need to push necessary matrix elements into the heap.
# Time complexity is O(KlogK) (every heap operation takes O(logK))
def kthSmallest(self, matrix, k):
# element in the heap: (val, x coord, y coord)
h = []
for i in range(min(len(matrix[0]), k)):
heappush(h, (matrix[0][i], 0, i))
# pop k-1 times
for i in range(k-1):
val, x, y = heappop(h)
if x < len(matrix) - 1:
heappush(h, (matrix[x+1][y], x+1, y))
return h[0][0] # smallest element in heap. 0th index in tuple
# binary search
# We can eventually find the k-th smallest element by shrinking the search range in binary search.
# Binary search is feasible for this problem since left, right,and mid in binary search are integers
# and we know that matrix elements are integers.
# The algorithm takes O(nlogN) time (N is the range of matrix[0][0] ~ matrix[n-1][n-1]) and O(1) space.
# Time complexity analysis: the outer loop executes at most O(logN) times.
# The inner for loop executes at most O(n) times.
def kthSmallest(self, matrix, k):
n = len(matrix)
L = matrix[0][0]
R = matrix[n-1][n-1]
while L < R:
mid = L + ((R - L) >> 1)
count = 0
j = n - 1
for i in range(n):
while j >= 0 and matrix[i][j] > mid:
j -= 1
count += j+1
if count >= k:
R = mid
else:
L = mid + 1
return L
sol = Solution()
matrix = [
[ 1, 5, 9],
[10, 11, 13],
[12, 13, 15]
]
k = 8
print(sol.kthSmallest(matrix, k))
| kthSmallest | identifier_name |
378_kth_smallest_element_in_a_sorted_matrix.py | # 378. Kth Smallest Element in a Sorted Matrix
#
# Given a n x n matrix where each of the rows and columns are sorted in ascending order, find the kth smallest element in the matrix.
#
# Note that it is the kth smallest element in the sorted order, not the kth distinct element.
#
# Example:
#
# matrix = [
# [ 1, 5, 9],
# [10, 11, 13],
# [12, 13, 15]
# ],
# k = 8,
#
# return 13.
#
# Note:
# You may assume k is always valid, 1 <= k <= n2.
# https://nb4799.neu.edu/wordpress/?p=2017
from heapq import *
class Solution:
def kthSmallest(self, matrix, k):
|
# Maintain a min-heap with k element, initialized by the elements of the first row.
# Since it is a min-heap, and note the property that rows and columns are already sorted in ascending order,
# the heap root after popping k-1 times is the k-th smallest element of the whole matrix.
# When popping the heap, we also need to push necessary matrix elements into the heap.
# Time complexity is O(KlogK) (every heap operation takes O(logK))
def kthSmallest(self, matrix, k):
# element in the heap: (val, x coord, y coord)
h = []
for i in range(min(len(matrix[0]), k)):
heappush(h, (matrix[0][i], 0, i))
# pop k-1 times
for i in range(k-1):
val, x, y = heappop(h)
if x < len(matrix) - 1:
heappush(h, (matrix[x+1][y], x+1, y))
return h[0][0] # smallest element in heap. 0th index in tuple
# binary search
# We can eventually find the k-th smallest element by shrinking the search range in binary search.
# Binary search is feasible for this problem since left, right,and mid in binary search are integers
# and we know that matrix elements are integers.
# The algorithm takes O(nlogN) time (N is the range of matrix[0][0] ~ matrix[n-1][n-1]) and O(1) space.
# Time complexity analysis: the outer loop executes at most O(logN) times.
# The inner for loop executes at most O(n) times.
def kthSmallest(self, matrix, k):
n = len(matrix)
L = matrix[0][0]
R = matrix[n-1][n-1]
while L < R:
mid = L + ((R - L) >> 1)
count = 0
j = n - 1
for i in range(n):
while j >= 0 and matrix[i][j] > mid:
j -= 1
count += j+1
if count >= k:
R = mid
else:
L = mid + 1
return L
sol = Solution()
matrix = [
[ 1, 5, 9],
[10, 11, 13],
[12, 13, 15]
]
k = 8
print(sol.kthSmallest(matrix, k))
| """
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
# heapq.merge: Merge multiple sorted inputs into a single sorted output
# (for example, merge timestamped entries from multiple log files).
# Returns an iterator over the sorted values.
return list(merge(*matrix))[k-1] | identifier_body |
378_kth_smallest_element_in_a_sorted_matrix.py | # 378. Kth Smallest Element in a Sorted Matrix
#
# Given a n x n matrix where each of the rows and columns are sorted in ascending order, find the kth smallest element in the matrix.
#
# Note that it is the kth smallest element in the sorted order, not the kth distinct element.
#
# Example:
#
# matrix = [
# [ 1, 5, 9],
# [10, 11, 13],
# [12, 13, 15]
# ],
# k = 8,
#
# return 13.
#
# Note:
# You may assume k is always valid, 1 <= k <= n2.
# https://nb4799.neu.edu/wordpress/?p=2017
from heapq import *
class Solution:
def kthSmallest(self, matrix, k):
"""
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
# heapq.merge: Merge multiple sorted inputs into a single sorted output
# (for example, merge timestamped entries from multiple log files).
# Returns an iterator over the sorted values.
return list(merge(*matrix))[k-1]
# Maintain a min-heap with k element, initialized by the elements of the first row.
# Since it is a min-heap, and note the property that rows and columns are already sorted in ascending order,
# the heap root after popping k-1 times is the k-th smallest element of the whole matrix.
# When popping the heap, we also need to push necessary matrix elements into the heap.
# Time complexity is O(KlogK) (every heap operation takes O(logK))
def kthSmallest(self, matrix, k):
# element in the heap: (val, x coord, y coord)
h = []
for i in range(min(len(matrix[0]), k)):
heappush(h, (matrix[0][i], 0, i))
# pop k-1 times
for i in range(k-1):
val, x, y = heappop(h)
if x < len(matrix) - 1:
heappush(h, (matrix[x+1][y], x+1, y))
return h[0][0] # smallest element in heap. 0th index in tuple
# binary search
# We can eventually find the k-th smallest element by shrinking the search range in binary search.
# Binary search is feasible for this problem since left, right,and mid in binary search are integers
# and we know that matrix elements are integers.
# The algorithm takes O(nlogN) time (N is the range of matrix[0][0] ~ matrix[n-1][n-1]) and O(1) space.
# Time complexity analysis: the outer loop executes at most O(logN) times.
# The inner for loop executes at most O(n) times.
def kthSmallest(self, matrix, k):
n = len(matrix)
L = matrix[0][0]
R = matrix[n-1][n-1]
while L < R:
mid = L + ((R - L) >> 1)
count = 0
j = n - 1
for i in range(n):
while j >= 0 and matrix[i][j] > mid:
j -= 1
count += j+1
if count >= k:
R = mid
else:
|
return L
sol = Solution()
matrix = [
[ 1, 5, 9],
[10, 11, 13],
[12, 13, 15]
]
k = 8
print(sol.kthSmallest(matrix, k))
| L = mid + 1 | conditional_block |
378_kth_smallest_element_in_a_sorted_matrix.py | # 378. Kth Smallest Element in a Sorted Matrix
#
# Given a n x n matrix where each of the rows and columns are sorted in ascending order, find the kth smallest element in the matrix.
#
# Note that it is the kth smallest element in the sorted order, not the kth distinct element.
#
# Example:
#
# matrix = [
# [ 1, 5, 9],
# [10, 11, 13],
# [12, 13, 15]
# ],
# k = 8,
#
# return 13.
#
# Note:
# You may assume k is always valid, 1 <= k <= n2.
# https://nb4799.neu.edu/wordpress/?p=2017
from heapq import *
class Solution:
def kthSmallest(self, matrix, k):
"""
:type matrix: List[List[int]]
:type k: int
:rtype: int
"""
# heapq.merge: Merge multiple sorted inputs into a single sorted output
# (for example, merge timestamped entries from multiple log files).
# Returns an iterator over the sorted values.
return list(merge(*matrix))[k-1]
# Maintain a min-heap with k element, initialized by the elements of the first row.
# Since it is a min-heap, and note the property that rows and columns are already sorted in ascending order,
# the heap root after popping k-1 times is the k-th smallest element of the whole matrix.
# When popping the heap, we also need to push necessary matrix elements into the heap.
# Time complexity is O(KlogK) (every heap operation takes O(logK))
def kthSmallest(self, matrix, k):
# element in the heap: (val, x coord, y coord)
h = []
for i in range(min(len(matrix[0]), k)):
heappush(h, (matrix[0][i], 0, i))
# pop k-1 times
for i in range(k-1):
val, x, y = heappop(h)
if x < len(matrix) - 1:
heappush(h, (matrix[x+1][y], x+1, y))
return h[0][0] # smallest element in heap. 0th index in tuple
# binary search
# We can eventually find the k-th smallest element by shrinking the search range in binary search.
# Binary search is feasible for this problem since left, right,and mid in binary search are integers
# and we know that matrix elements are integers.
# The algorithm takes O(nlogN) time (N is the range of matrix[0][0] ~ matrix[n-1][n-1]) and O(1) space.
# Time complexity analysis: the outer loop executes at most O(logN) times.
# The inner for loop executes at most O(n) times.
def kthSmallest(self, matrix, k):
n = len(matrix)
L = matrix[0][0]
R = matrix[n-1][n-1]
while L < R:
mid = L + ((R - L) >> 1)
count = 0
j = n - 1
for i in range(n):
while j >= 0 and matrix[i][j] > mid: | j -= 1
count += j+1
if count >= k:
R = mid
else:
L = mid + 1
return L
sol = Solution()
matrix = [
[ 1, 5, 9],
[10, 11, 13],
[12, 13, 15]
]
k = 8
print(sol.kthSmallest(matrix, k)) | random_line_split |
|
lib.rs | : unsafe { ::std::mem::uninit() },
sparse: unsafe { ::std::mem::uninit() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: uint, groups: &Captures) {
let t = &mut self.queue[self.size];
t.pc = pc;
match self.which {
Exists => {},
Location => {
t.groups[0] = groups[0];
t.groups[1] = groups[1];
}
Submatches => {
for (slot, val) in t.groups.mut_iter().zip(groups.iter()) {
*slot = *val;
}
}
}
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn add_empty(&mut self, pc: uint) {
self.queue[self.size].pc = pc;
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn contains(&self, pc: uint) -> bool {
let s = self.sparse[pc];
s < self.size && self.queue[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: uint) -> uint {
self.queue[i].pc
}
#[inline]
fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures {
&'r mut self.queue[i].groups
}
}
}
::regex::Regex {
original: $regex.to_owned(),
names: vec!$cap_names,
p: ::regex::native::Native(exec),
}
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
EmptyBegin(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_begin()
|| self.chars.prev == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_begin())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx, !self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
Match => {
quote_expr!(self.cx, {
match self.which {
Exists => {
return StepMatchEarlyReturn
}
Location => {
groups[0] = caps[0];
groups[1] = caps[1];
return StepMatch
}
Submatches => {
for (slot, val) in groups.mut_iter().zip(caps.iter()) {
*slot = *val;
}
return StepMatch
}
}
})
}
OneChar(c, flags) => {
if flags & FLAG_NOCASE > 0 {
let upc = c.to_uppercase();
quote_expr!(self.cx, {
let upc = self.chars.prev.map(|c| c.to_uppercase());
if upc == Some($upc) {
self.add(nlist, $nextpc, caps);
}
})
} else {
quote_expr!(self.cx, {
if self.chars.prev == Some($c) {
self.add(nlist, $nextpc, caps);
}
})
}
}
CharClass(ref ranges, flags) => {
let negate = flags & FLAG_NEGATED > 0;
let casei = flags & FLAG_NOCASE > 0;
let get_char =
if casei {
quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase())
} else {
quote_expr!(self.cx, self.chars.prev.unwrap())
};
let negcond =
if negate {
quote_expr!(self.cx, !found)
} else {
quote_expr!(self.cx, found)
};
let mranges = self.match_class(casei, ranges.as_slice());
quote_expr!(self.cx, {
if self.chars.prev.is_some() {
let c = $get_char;
let found = $mranges;
if $negcond {
self.add(nlist, $nextpc, caps);
}
}
})
}
Any(flags) => {
if flags & FLAG_DOTNL > 0 {
quote_expr!(self.cx, self.add(nlist, $nextpc, caps))
} else {
quote_expr!(self.cx, {
if self.chars.prev != Some('\n') {
self.add(nlist, $nextpc, caps)
}
()
})
}
}
// EmptyBegin, EmptyEnd, EmptyWordBoundary, Save, Jump, Split
_ => self.empty_block(),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> @ast::Expr {
let expr_true = quote_expr!(self.cx, true);
let mut arms = ranges.iter().map(|&(mut start, mut end)| {
if casei {
start = start.to_uppercase();
end = end.to_uppercase();
}
let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start),
quote_expr!(self.cx, $end)));
self.cx.arm(self.sp, vec!(pat), expr_true)
}).collect::<Vec<ast::Arm>>();
arms.push(self.wild_arm_expr(quote_expr!(self.cx, false)));
let match_on = quote_expr!(self.cx, c);
self.cx.expr_match(self.sp, match_on, arms)
}
// Generates code for checking a literal prefix of the search string.
// The code is only generated if the regex *has* a literal prefix.
// Otherwise, a no-op is returned.
fn | check_prefix | identifier_name |
|
lib.rs | .step_insts();
let add_insts = self.add_insts();
let regex = self.original.as_slice();
quote_expr!(self.cx, {
fn exec<'t>(which: ::regex::native::MatchKind, input: &'t str,
start: uint, end: uint) -> Vec<Option<uint>> {
#![allow(unused_imports)]
use regex::native::{
MatchKind, Exists, Location, Submatches,
StepState, StepMatchEarlyReturn, StepMatch, StepContinue,
CharReader, find_prefix,
};
return Nfa {
which: which,
input: input,
ic: 0,
chars: CharReader::new(input),
}.run(start, end);
type Captures = [Option<uint>, ..$num_cap_locs];
struct Nfa<'t> {
which: MatchKind,
input: &'t str,
ic: uint,
chars: CharReader<'t>,
}
impl<'t> Nfa<'t> {
#[allow(unused_variable)]
fn run(&mut self, start: uint, end: uint) -> Vec<Option<uint>> {
let mut matched = false;
let prefix_bytes: &[u8] = &$prefix_bytes;
let mut clist = &mut Threads::new(self.which);
let mut nlist = &mut Threads::new(self.which);
let mut groups = $init_groups;
self.ic = start;
let mut next_ic = self.chars.set(start);
while self.ic <= end {
if clist.size == 0 {
if matched {
break
}
$check_prefix
}
if clist.size == 0 || (!$prefix_anchor && !matched) {
self.add(clist, 0, &mut groups)
}
self.ic = next_ic;
next_ic = self.chars.advance();
for i in range(0, clist.size) {
let pc = clist.pc(i);
let step_state = self.step(&mut groups, nlist,
clist.groups(i), pc);
match step_state {
StepMatchEarlyReturn =>
return vec![Some(0u), Some(0u)],
StepMatch => { matched = true; break },
StepContinue => {},
}
}
::std::mem::swap(&mut clist, &mut nlist);
nlist.empty();
}
match self.which {
Exists if matched => vec![Some(0u), Some(0u)],
Exists => vec![None, None],
Location | Submatches => groups.iter().map(|x| *x).collect(),
}
}
// Sometimes `nlist` is never used (for empty regexes).
#[allow(unused_variable)]
#[inline]
fn step(&self, groups: &mut Captures, nlist: &mut Threads,
caps: &mut Captures, pc: uint) -> StepState {
$step_insts
StepContinue
}
fn add(&self, nlist: &mut Threads, pc: uint,
groups: &mut Captures) {
if nlist.contains(pc) {
return
}
$add_insts
}
}
struct Thread {
pc: uint,
groups: Captures,
}
struct Threads {
which: MatchKind,
queue: [Thread, ..$num_insts],
sparse: [uint, ..$num_insts],
size: uint,
}
impl Threads {
fn new(which: MatchKind) -> Threads {
Threads {
which: which,
// These unsafe blocks are used for performance reasons, as it
// gives us a zero-cost initialization of a sparse set. The
// trick is described in more detail here:
// http://research.swtch.com/sparse
// The idea here is to avoid initializing threads that never
// need to be initialized, particularly for larger regexs with
// a lot of instructions.
queue: unsafe { ::std::mem::uninit() },
sparse: unsafe { ::std::mem::uninit() },
size: 0,
}
}
#[inline]
fn add(&mut self, pc: uint, groups: &Captures) {
let t = &mut self.queue[self.size];
t.pc = pc;
match self.which {
Exists => {},
Location => {
t.groups[0] = groups[0];
t.groups[1] = groups[1];
}
Submatches => {
for (slot, val) in t.groups.mut_iter().zip(groups.iter()) {
*slot = *val;
}
}
}
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn add_empty(&mut self, pc: uint) {
self.queue[self.size].pc = pc;
self.sparse[pc] = self.size;
self.size += 1;
}
#[inline]
fn contains(&self, pc: uint) -> bool {
let s = self.sparse[pc];
s < self.size && self.queue[s].pc == pc
}
#[inline]
fn empty(&mut self) {
self.size = 0;
}
#[inline]
fn pc(&self, i: uint) -> uint {
self.queue[i].pc
}
#[inline]
fn groups<'r>(&'r mut self, i: uint) -> &'r mut Captures {
&'r mut self.queue[i].groups
}
}
}
::regex::Regex {
original: $regex.to_owned(),
names: vec!$cap_names,
p: ::regex::native::Native(exec),
}
})
}
// Generates code for the `add` method, which is responsible for adding
// zero-width states to the next queue of states to visit.
fn add_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
EmptyBegin(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_begin()
|| self.chars.prev == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_begin())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx, !self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr { | random_line_split |
||
lib.rs | { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyEnd(flags) => {
let cond =
if flags & FLAG_MULTI > 0 {
quote_expr!(self.cx,
self.chars.is_end()
|| self.chars.cur == Some('\n')
)
} else {
quote_expr!(self.cx, self.chars.is_end())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
EmptyWordBoundary(flags) => {
let cond =
if flags & FLAG_NEGATED > 0 {
quote_expr!(self.cx, !self.chars.is_word_boundary())
} else {
quote_expr!(self.cx, self.chars.is_word_boundary())
};
quote_expr!(self.cx, {
nlist.add_empty($pc);
if $cond { self.add(nlist, $nextpc, &mut *groups) }
})
}
Save(slot) => {
let save = quote_expr!(self.cx, {
let old = groups[$slot];
groups[$slot] = Some(self.ic);
self.add(nlist, $nextpc, &mut *groups);
groups[$slot] = old;
});
let add = quote_expr!(self.cx, {
self.add(nlist, $nextpc, &mut *groups);
});
// If this is saving a submatch location but we request
// existence or only full match location, then we can skip
// right over it every time.
if slot > 1 {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches => $save,
Exists | Location => $add,
}
})
} else {
quote_expr!(self.cx, {
nlist.add_empty($pc);
match self.which {
Submatches | Location => $save,
Exists => $add,
}
})
}
}
Jump(to) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $to, &mut *groups);
})
}
Split(x, y) => {
quote_expr!(self.cx, {
nlist.add_empty($pc);
self.add(nlist, $x, &mut *groups);
self.add(nlist, $y, &mut *groups);
})
}
// For Match, OneChar, CharClass, Any
_ => quote_expr!(self.cx, nlist.add($pc, &*groups)),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Generates the code for the `step` method, which processes all states
// in the current queue that consume a single character.
fn step_insts(&self) -> @ast::Expr {
let arms = self.prog.insts.iter().enumerate().map(|(pc, inst)| {
let nextpc = pc + 1;
let body = match *inst {
Match => {
quote_expr!(self.cx, {
match self.which {
Exists => {
return StepMatchEarlyReturn
}
Location => {
groups[0] = caps[0];
groups[1] = caps[1];
return StepMatch
}
Submatches => {
for (slot, val) in groups.mut_iter().zip(caps.iter()) {
*slot = *val;
}
return StepMatch
}
}
})
}
OneChar(c, flags) => {
if flags & FLAG_NOCASE > 0 {
let upc = c.to_uppercase();
quote_expr!(self.cx, {
let upc = self.chars.prev.map(|c| c.to_uppercase());
if upc == Some($upc) {
self.add(nlist, $nextpc, caps);
}
})
} else {
quote_expr!(self.cx, {
if self.chars.prev == Some($c) {
self.add(nlist, $nextpc, caps);
}
})
}
}
CharClass(ref ranges, flags) => {
let negate = flags & FLAG_NEGATED > 0;
let casei = flags & FLAG_NOCASE > 0;
let get_char =
if casei {
quote_expr!(self.cx, self.chars.prev.unwrap().to_uppercase())
} else {
quote_expr!(self.cx, self.chars.prev.unwrap())
};
let negcond =
if negate {
quote_expr!(self.cx, !found)
} else {
quote_expr!(self.cx, found)
};
let mranges = self.match_class(casei, ranges.as_slice());
quote_expr!(self.cx, {
if self.chars.prev.is_some() {
let c = $get_char;
let found = $mranges;
if $negcond {
self.add(nlist, $nextpc, caps);
}
}
})
}
Any(flags) => {
if flags & FLAG_DOTNL > 0 {
quote_expr!(self.cx, self.add(nlist, $nextpc, caps))
} else {
quote_expr!(self.cx, {
if self.chars.prev != Some('\n') {
self.add(nlist, $nextpc, caps)
}
()
})
}
}
// EmptyBegin, EmptyEnd, EmptyWordBoundary, Save, Jump, Split
_ => self.empty_block(),
};
self.arm_inst(pc, body)
}).collect::<Vec<ast::Arm>>();
self.match_insts(arms)
}
// Translates a character class into a match expression.
// This avoids a binary search (and is hopefully replaced by a jump
// table).
fn match_class(&self, casei: bool, ranges: &[(char, char)]) -> @ast::Expr {
let expr_true = quote_expr!(self.cx, true);
let mut arms = ranges.iter().map(|&(mut start, mut end)| {
if casei {
start = start.to_uppercase();
end = end.to_uppercase();
}
let pat = self.cx.pat(self.sp, ast::PatRange(quote_expr!(self.cx, $start),
quote_expr!(self.cx, $end)));
self.cx.arm(self.sp, vec!(pat), expr_true)
}).collect::<Vec<ast::Arm>>();
arms.push(self.wild_arm_expr(quote_expr!(self.cx, false)));
let match_on = quote_expr!(self.cx, c);
self.cx.expr_match(self.sp, match_on, arms)
}
// Generates code for checking a literal prefix of the search string.
// The code is only generated if the regex *has* a literal prefix.
// Otherwise, a no-op is returned.
fn check_prefix(&self) -> @ast::Expr {
if self.prog.prefix.len() == 0 {
self.empty_block()
} else {
quote_expr!(self.cx,
if clist.size == 0 {
let haystack = self.input.as_bytes().slice_from(self.ic);
match find_prefix(prefix_bytes, haystack) {
None => break,
Some(i) => {
self.ic += i;
next_ic = self.chars.set(self.ic);
}
}
}
)
}
}
// Builds a `match pc { ... }` expression from a list of arms, specifically
// for matching the current program counter with an instruction.
// A wild-card arm is automatically added that executes a no-op. It will
// never be used, but is added to satisfy the compiler complaining about
// non-exhaustive patterns.
fn match_insts(&self, mut arms: Vec<ast::Arm>) -> @ast::Expr {
arms.push(self.wild_arm_expr(self.empty_block()));
self.cx.expr_match(self.sp, quote_expr!(self.cx, pc), arms)
}
fn empty_block(&self) -> @ast::Expr {
quote_expr!(self.cx, {})
}
// Creates a match arm for the instruction at `pc` with the expression
// `body`.
fn arm_inst(&self, pc: uint, body: @ast::Expr) -> ast::Arm {
let pc_pat = self.cx.pat_lit(self.sp, quote_expr!(self.cx, $pc));
self.cx.arm(self.sp, vec!(pc_pat), body)
}
// Creates a wild-card match arm with the expression `body`.
fn wild_arm_expr(&self, body: @ast::Expr) -> ast::Arm {
ast::Arm {
attrs: vec!(),
pats: vec!(@ast::Pat{
id: ast::DUMMY_NODE_ID,
span: self.sp,
node: ast::PatWild,
}),
guard: None,
body: body,
}
}
// Converts `xs` to a `[x1, x2, .., xN]` expression by calling `to_expr`
// on each element in `xs`.
fn vec_expr<T, It: Iterator<T>>(&self, xs: It, to_expr: |&ExtCtxt, T| -> @ast::Expr)
-> @ast::Expr | {
let exprs = xs.map(|x| to_expr(self.cx, x)).collect();
self.cx.expr_vec(self.sp, exprs)
} | identifier_body |
|
soccer_ball.py | # Copyright 2019 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A soccer ball that keeps track of ball-player contacts."""
import os
from dm_control import mjcf
from dm_control.entities import props
import numpy as np
from dm_control.utils import io as resources
_ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'soccer_ball')
# FIFA regulation parameters for a size 5 ball.
_REGULATION_RADIUS = 0.117 # Meters.
_REGULATION_MASS = 0.45 # Kilograms.
_DEFAULT_FRICTION = (0.7, 0.05, 0.04) # (slide, spin, roll).
_DEFAULT_DAMP_RATIO = 0.4
def _get_texture(name):
contents = resources.GetResource(
os.path.join(_ASSETS_PATH, '{}.png'.format(name)))
return mjcf.Asset(contents, '.png')
def regulation_soccer_ball():
return SoccerBall(
radius=_REGULATION_RADIUS,
mass=_REGULATION_MASS,
friction=_DEFAULT_FRICTION,
damp_ratio=_DEFAULT_DAMP_RATIO)
class SoccerBall(props.Primitive):
"""A soccer ball that keeps track of entities that come into contact."""
def _build(self,
radius=0.35,
mass=0.045,
friction=(0.7, 0.075, 0.075),
damp_ratio=1.0,
name='soccer_ball'):
"""Builds this soccer ball.
Args:
radius: The radius (in meters) of this target sphere.
mass: Mass (in kilograms) of the ball.
friction: Friction parameters of the ball geom with the three dimensions
corresponding to (slide, spin, roll) frictions.
damp_ratio: A real positive number. Lower implies less dampening upon
contacts.
name: The name of this entity.
"""
super()._build(geom_type='sphere', size=(radius,), name=name)
texture = self._mjcf_root.asset.add(
'texture',
name='soccer_ball',
type='cube',
fileup=_get_texture('up'),
filedown=_get_texture('down'),
filefront=_get_texture('front'),
fileback=_get_texture('back'),
fileleft=_get_texture('left'),
fileright=_get_texture('right'))
material = self._mjcf_root.asset.add(
'material', name='soccer_ball', texture=texture)
if damp_ratio < 0.0:
raise ValueError(
f'Invalid `damp_ratio` parameter ({damp_ratio} is not positive).')
self._geom.set_attributes(
pos=[0, 0, radius],
size=[radius],
condim=6,
priority=1,
mass=mass,
friction=friction,
solref=[0.02, damp_ratio],
material=material)
# Add some tracking cameras for visualization and logging.
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_near',
pos=[0, -2, 2],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam',
pos=[0, -7, 7],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_far',
pos=[0, -10, 10],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
# Keep track of entities to team mapping.
self._players = []
# Initialize tracker attributes.
self.initialize_entity_trackers()
def register_player(self, player):
self._players.append(player)
def initialize_entity_trackers(self):
self._last_hit = None
self._hit = False
self._repossessed = False
self._intercepted = False
# Tracks distance traveled by the ball in between consecutive hits.
self._pos_at_last_step = None
self._dist_since_last_hit = None
self._dist_between_last_hits = None
def initialize_episode(self, physics, unused_random_state):
self._geom_id = physics.model.name2id(self._geom.full_identifier, 'geom')
self._geom_id_to_player = {}
for player in self._players:
geoms = player.walker.mjcf_model.find_all('geom')
for geom in geoms:
|
self.initialize_entity_trackers()
def after_substep(self, physics, unused_random_state):
"""Resolve contacts and update ball-player contact trackers."""
if self._hit:
# Ball has already registered a valid contact within step (during one of
# previous after_substep calls).
return
# Iterate through all contacts to find the first contact between the ball
# and one of the registered entities.
for contact in physics.data.contact:
# Keep contacts that involve the ball and one of the registered entities.
has_self = False
for geom_id in (contact.geom1, contact.geom2):
if geom_id == self._geom_id:
has_self = True
else:
player = self._geom_id_to_player.get(geom_id)
if has_self and player:
# Detected a contact between the ball and an registered player.
if self._last_hit is not None:
self._intercepted = player.team != self._last_hit.team
else:
self._intercepted = True
# Register repossessed before updating last_hit player.
self._repossessed = player is not self._last_hit
self._last_hit = player
# Register hit event.
self._hit = True
break
def before_step(self, physics, random_state):
super().before_step(physics, random_state)
# Reset per simulation step indicator.
self._hit = False
self._repossessed = False
self._intercepted = False
def after_step(self, physics, random_state):
super().after_step(physics, random_state)
pos = physics.bind(self._geom).xpos
if self._hit:
# SoccerBall is hit on this step. Update dist_between_last_hits
# to dist_since_last_hit before resetting dist_since_last_hit.
self._dist_between_last_hits = self._dist_since_last_hit
self._dist_since_last_hit = 0.
self._pos_at_last_step = pos.copy()
if self._dist_since_last_hit is not None:
# Accumulate distance traveled since last hit event.
self._dist_since_last_hit += np.linalg.norm(pos - self._pos_at_last_step)
self._pos_at_last_step = pos.copy()
@property
def last_hit(self):
"""The player that last came in contact with the ball or `None`."""
return self._last_hit
@property
def hit(self):
"""Indicates if the ball is hit during the last simulation step.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit by a registered player during simulation step.
False: if not.
"""
return self._hit
@property
def repossessed(self):
"""Indicates if the ball has been repossessed by a different player.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True if the ball is hit by a registered player during simulation step
and that player is different from `last_hit`.
False: if the ball is not hit, or the ball is hit by `last_hit` player.
"""
return self._repossessed
@property
def intercepted(self):
"""Indicates if the ball has been intercepted by a different team.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit for the first time, or repossessed by an player
from a different team.
False: if the ball is not hit, not repossessed, or repossessed by a
teammate to `last_hit`.
"""
return self._intercepted
@property
def dist_between_last_hits(self):
"""Distance between last consecutive hits.
Returns:
Distance between last two consecutive hit events or `None` if there has
not been two consecutive hits on the ball.
"""
return self._dist_between_last_hits | geom_id = physics.model.name2id(geom.full_identifier, 'geom')
self._geom_id_to_player[geom_id] = player | conditional_block |
soccer_ball.py | # Copyright 2019 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A soccer ball that keeps track of ball-player contacts."""
import os
from dm_control import mjcf
from dm_control.entities import props
import numpy as np
from dm_control.utils import io as resources
_ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'soccer_ball')
# FIFA regulation parameters for a size 5 ball.
_REGULATION_RADIUS = 0.117 # Meters.
_REGULATION_MASS = 0.45 # Kilograms.
_DEFAULT_FRICTION = (0.7, 0.05, 0.04) # (slide, spin, roll).
_DEFAULT_DAMP_RATIO = 0.4
def _get_texture(name):
contents = resources.GetResource(
os.path.join(_ASSETS_PATH, '{}.png'.format(name)))
return mjcf.Asset(contents, '.png')
def regulation_soccer_ball():
return SoccerBall(
radius=_REGULATION_RADIUS,
mass=_REGULATION_MASS,
friction=_DEFAULT_FRICTION,
damp_ratio=_DEFAULT_DAMP_RATIO)
class SoccerBall(props.Primitive):
"""A soccer ball that keeps track of entities that come into contact."""
def _build(self,
radius=0.35,
mass=0.045,
friction=(0.7, 0.075, 0.075),
damp_ratio=1.0,
name='soccer_ball'):
"""Builds this soccer ball.
Args:
radius: The radius (in meters) of this target sphere.
mass: Mass (in kilograms) of the ball.
friction: Friction parameters of the ball geom with the three dimensions
corresponding to (slide, spin, roll) frictions.
damp_ratio: A real positive number. Lower implies less dampening upon
contacts.
name: The name of this entity.
"""
super()._build(geom_type='sphere', size=(radius,), name=name)
texture = self._mjcf_root.asset.add(
'texture',
name='soccer_ball',
type='cube',
fileup=_get_texture('up'),
filedown=_get_texture('down'),
filefront=_get_texture('front'),
fileback=_get_texture('back'),
fileleft=_get_texture('left'),
fileright=_get_texture('right'))
material = self._mjcf_root.asset.add(
'material', name='soccer_ball', texture=texture)
if damp_ratio < 0.0:
raise ValueError(
f'Invalid `damp_ratio` parameter ({damp_ratio} is not positive).')
self._geom.set_attributes(
pos=[0, 0, radius],
size=[radius],
condim=6,
priority=1,
mass=mass,
friction=friction,
solref=[0.02, damp_ratio],
material=material)
# Add some tracking cameras for visualization and logging.
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_near',
pos=[0, -2, 2],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam',
pos=[0, -7, 7],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_far',
pos=[0, -10, 10],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
# Keep track of entities to team mapping.
self._players = []
# Initialize tracker attributes.
self.initialize_entity_trackers()
def register_player(self, player):
self._players.append(player)
def initialize_entity_trackers(self):
self._last_hit = None
self._hit = False
self._repossessed = False
self._intercepted = False
# Tracks distance traveled by the ball in between consecutive hits.
self._pos_at_last_step = None
self._dist_since_last_hit = None
self._dist_between_last_hits = None
def initialize_episode(self, physics, unused_random_state):
|
def after_substep(self, physics, unused_random_state):
"""Resolve contacts and update ball-player contact trackers."""
if self._hit:
# Ball has already registered a valid contact within step (during one of
# previous after_substep calls).
return
# Iterate through all contacts to find the first contact between the ball
# and one of the registered entities.
for contact in physics.data.contact:
# Keep contacts that involve the ball and one of the registered entities.
has_self = False
for geom_id in (contact.geom1, contact.geom2):
if geom_id == self._geom_id:
has_self = True
else:
player = self._geom_id_to_player.get(geom_id)
if has_self and player:
# Detected a contact between the ball and an registered player.
if self._last_hit is not None:
self._intercepted = player.team != self._last_hit.team
else:
self._intercepted = True
# Register repossessed before updating last_hit player.
self._repossessed = player is not self._last_hit
self._last_hit = player
# Register hit event.
self._hit = True
break
def before_step(self, physics, random_state):
super().before_step(physics, random_state)
# Reset per simulation step indicator.
self._hit = False
self._repossessed = False
self._intercepted = False
def after_step(self, physics, random_state):
super().after_step(physics, random_state)
pos = physics.bind(self._geom).xpos
if self._hit:
# SoccerBall is hit on this step. Update dist_between_last_hits
# to dist_since_last_hit before resetting dist_since_last_hit.
self._dist_between_last_hits = self._dist_since_last_hit
self._dist_since_last_hit = 0.
self._pos_at_last_step = pos.copy()
if self._dist_since_last_hit is not None:
# Accumulate distance traveled since last hit event.
self._dist_since_last_hit += np.linalg.norm(pos - self._pos_at_last_step)
self._pos_at_last_step = pos.copy()
@property
def last_hit(self):
"""The player that last came in contact with the ball or `None`."""
return self._last_hit
@property
def hit(self):
"""Indicates if the ball is hit during the last simulation step.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit by a registered player during simulation step.
False: if not.
"""
return self._hit
@property
def repossessed(self):
"""Indicates if the ball has been repossessed by a different player.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True if the ball is hit by a registered player during simulation step
and that player is different from `last_hit`.
False: if the ball is not hit, or the ball is hit by `last_hit` player.
"""
return self._repossessed
@property
def intercepted(self):
"""Indicates if the ball has been intercepted by a different team.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit for the first time, or repossessed by an player
from a different team.
False: if the ball is not hit, not repossessed, or repossessed by a
teammate to `last_hit`.
"""
return self._intercepted
@property
def dist_between_last_hits(self):
"""Distance between last consecutive hits.
Returns:
Distance between last two consecutive hit events or `None` if there has
not been two consecutive hits on the ball.
"""
return self._dist_between_last | self._geom_id = physics.model.name2id(self._geom.full_identifier, 'geom')
self._geom_id_to_player = {}
for player in self._players:
geoms = player.walker.mjcf_model.find_all('geom')
for geom in geoms:
geom_id = physics.model.name2id(geom.full_identifier, 'geom')
self._geom_id_to_player[geom_id] = player
self.initialize_entity_trackers() | identifier_body |
soccer_ball.py | # Copyright 2019 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A soccer ball that keeps track of ball-player contacts."""
import os
from dm_control import mjcf
from dm_control.entities import props
import numpy as np
from dm_control.utils import io as resources
_ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'soccer_ball')
# FIFA regulation parameters for a size 5 ball.
_REGULATION_RADIUS = 0.117 # Meters.
_REGULATION_MASS = 0.45 # Kilograms.
_DEFAULT_FRICTION = (0.7, 0.05, 0.04) # (slide, spin, roll).
_DEFAULT_DAMP_RATIO = 0.4
def _get_texture(name):
contents = resources.GetResource(
os.path.join(_ASSETS_PATH, '{}.png'.format(name)))
return mjcf.Asset(contents, '.png')
def regulation_soccer_ball():
return SoccerBall(
radius=_REGULATION_RADIUS,
mass=_REGULATION_MASS,
friction=_DEFAULT_FRICTION,
damp_ratio=_DEFAULT_DAMP_RATIO)
class SoccerBall(props.Primitive):
"""A soccer ball that keeps track of entities that come into contact."""
def _build(self,
radius=0.35,
mass=0.045,
friction=(0.7, 0.075, 0.075),
damp_ratio=1.0,
name='soccer_ball'):
"""Builds this soccer ball.
Args:
radius: The radius (in meters) of this target sphere.
mass: Mass (in kilograms) of the ball.
friction: Friction parameters of the ball geom with the three dimensions
corresponding to (slide, spin, roll) frictions.
damp_ratio: A real positive number. Lower implies less dampening upon
contacts.
name: The name of this entity.
"""
super()._build(geom_type='sphere', size=(radius,), name=name)
texture = self._mjcf_root.asset.add(
'texture',
name='soccer_ball',
type='cube',
fileup=_get_texture('up'),
filedown=_get_texture('down'),
filefront=_get_texture('front'),
fileback=_get_texture('back'),
fileleft=_get_texture('left'),
fileright=_get_texture('right'))
material = self._mjcf_root.asset.add(
'material', name='soccer_ball', texture=texture)
if damp_ratio < 0.0:
raise ValueError(
f'Invalid `damp_ratio` parameter ({damp_ratio} is not positive).')
self._geom.set_attributes(
pos=[0, 0, radius],
size=[radius],
condim=6,
priority=1,
mass=mass,
friction=friction,
solref=[0.02, damp_ratio],
material=material)
# Add some tracking cameras for visualization and logging.
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_near',
pos=[0, -2, 2],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam',
pos=[0, -7, 7],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_far',
pos=[0, -10, 10],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
# Keep track of entities to team mapping.
self._players = []
# Initialize tracker attributes.
self.initialize_entity_trackers()
def register_player(self, player):
self._players.append(player)
def initialize_entity_trackers(self):
self._last_hit = None
self._hit = False
self._repossessed = False
self._intercepted = False
# Tracks distance traveled by the ball in between consecutive hits.
self._pos_at_last_step = None
self._dist_since_last_hit = None
self._dist_between_last_hits = None
def initialize_episode(self, physics, unused_random_state):
self._geom_id = physics.model.name2id(self._geom.full_identifier, 'geom')
self._geom_id_to_player = {}
for player in self._players:
geoms = player.walker.mjcf_model.find_all('geom')
for geom in geoms:
geom_id = physics.model.name2id(geom.full_identifier, 'geom')
self._geom_id_to_player[geom_id] = player
self.initialize_entity_trackers()
def after_substep(self, physics, unused_random_state):
"""Resolve contacts and update ball-player contact trackers."""
if self._hit:
# Ball has already registered a valid contact within step (during one of
# previous after_substep calls).
return
# Iterate through all contacts to find the first contact between the ball
# and one of the registered entities.
for contact in physics.data.contact:
# Keep contacts that involve the ball and one of the registered entities.
has_self = False
for geom_id in (contact.geom1, contact.geom2):
if geom_id == self._geom_id:
has_self = True
else:
player = self._geom_id_to_player.get(geom_id)
if has_self and player:
# Detected a contact between the ball and an registered player.
if self._last_hit is not None:
self._intercepted = player.team != self._last_hit.team
else:
self._intercepted = True
# Register repossessed before updating last_hit player.
self._repossessed = player is not self._last_hit
self._last_hit = player
# Register hit event.
self._hit = True
break
def before_step(self, physics, random_state):
super().before_step(physics, random_state)
# Reset per simulation step indicator.
self._hit = False
self._repossessed = False
self._intercepted = False
def after_step(self, physics, random_state):
super().after_step(physics, random_state)
pos = physics.bind(self._geom).xpos
if self._hit:
# SoccerBall is hit on this step. Update dist_between_last_hits
# to dist_since_last_hit before resetting dist_since_last_hit.
self._dist_between_last_hits = self._dist_since_last_hit
self._dist_since_last_hit = 0.
self._pos_at_last_step = pos.copy()
if self._dist_since_last_hit is not None:
# Accumulate distance traveled since last hit event.
self._dist_since_last_hit += np.linalg.norm(pos - self._pos_at_last_step)
self._pos_at_last_step = pos.copy()
@property
def last_hit(self):
"""The player that last came in contact with the ball or `None`."""
return self._last_hit
@property
def hit(self):
"""Indicates if the ball is hit during the last simulation step.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit by a registered player during simulation step.
False: if not. | return self._hit
@property
def repossessed(self):
"""Indicates if the ball has been repossessed by a different player.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True if the ball is hit by a registered player during simulation step
and that player is different from `last_hit`.
False: if the ball is not hit, or the ball is hit by `last_hit` player.
"""
return self._repossessed
@property
def intercepted(self):
"""Indicates if the ball has been intercepted by a different team.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit for the first time, or repossessed by an player
from a different team.
False: if the ball is not hit, not repossessed, or repossessed by a
teammate to `last_hit`.
"""
return self._intercepted
@property
def dist_between_last_hits(self):
"""Distance between last consecutive hits.
Returns:
Distance between last two consecutive hit events or `None` if there has
not been two consecutive hits on the ball.
"""
return self._dist_between_last_hits | """ | random_line_split |
soccer_ball.py | # Copyright 2019 The dm_control Authors.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ============================================================================
"""A soccer ball that keeps track of ball-player contacts."""
import os
from dm_control import mjcf
from dm_control.entities import props
import numpy as np
from dm_control.utils import io as resources
_ASSETS_PATH = os.path.join(os.path.dirname(__file__), 'assets', 'soccer_ball')
# FIFA regulation parameters for a size 5 ball.
_REGULATION_RADIUS = 0.117 # Meters.
_REGULATION_MASS = 0.45 # Kilograms.
_DEFAULT_FRICTION = (0.7, 0.05, 0.04) # (slide, spin, roll).
_DEFAULT_DAMP_RATIO = 0.4
def _get_texture(name):
contents = resources.GetResource(
os.path.join(_ASSETS_PATH, '{}.png'.format(name)))
return mjcf.Asset(contents, '.png')
def regulation_soccer_ball():
return SoccerBall(
radius=_REGULATION_RADIUS,
mass=_REGULATION_MASS,
friction=_DEFAULT_FRICTION,
damp_ratio=_DEFAULT_DAMP_RATIO)
class SoccerBall(props.Primitive):
"""A soccer ball that keeps track of entities that come into contact."""
def _build(self,
radius=0.35,
mass=0.045,
friction=(0.7, 0.075, 0.075),
damp_ratio=1.0,
name='soccer_ball'):
"""Builds this soccer ball.
Args:
radius: The radius (in meters) of this target sphere.
mass: Mass (in kilograms) of the ball.
friction: Friction parameters of the ball geom with the three dimensions
corresponding to (slide, spin, roll) frictions.
damp_ratio: A real positive number. Lower implies less dampening upon
contacts.
name: The name of this entity.
"""
super()._build(geom_type='sphere', size=(radius,), name=name)
texture = self._mjcf_root.asset.add(
'texture',
name='soccer_ball',
type='cube',
fileup=_get_texture('up'),
filedown=_get_texture('down'),
filefront=_get_texture('front'),
fileback=_get_texture('back'),
fileleft=_get_texture('left'),
fileright=_get_texture('right'))
material = self._mjcf_root.asset.add(
'material', name='soccer_ball', texture=texture)
if damp_ratio < 0.0:
raise ValueError(
f'Invalid `damp_ratio` parameter ({damp_ratio} is not positive).')
self._geom.set_attributes(
pos=[0, 0, radius],
size=[radius],
condim=6,
priority=1,
mass=mass,
friction=friction,
solref=[0.02, damp_ratio],
material=material)
# Add some tracking cameras for visualization and logging.
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_near',
pos=[0, -2, 2],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam',
pos=[0, -7, 7],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
self._mjcf_root.worldbody.add(
'camera',
name='ball_cam_far',
pos=[0, -10, 10],
zaxis=[0, -1, 1],
fovy=70,
mode='trackcom')
# Keep track of entities to team mapping.
self._players = []
# Initialize tracker attributes.
self.initialize_entity_trackers()
def register_player(self, player):
self._players.append(player)
def initialize_entity_trackers(self):
self._last_hit = None
self._hit = False
self._repossessed = False
self._intercepted = False
# Tracks distance traveled by the ball in between consecutive hits.
self._pos_at_last_step = None
self._dist_since_last_hit = None
self._dist_between_last_hits = None
def initialize_episode(self, physics, unused_random_state):
self._geom_id = physics.model.name2id(self._geom.full_identifier, 'geom')
self._geom_id_to_player = {}
for player in self._players:
geoms = player.walker.mjcf_model.find_all('geom')
for geom in geoms:
geom_id = physics.model.name2id(geom.full_identifier, 'geom')
self._geom_id_to_player[geom_id] = player
self.initialize_entity_trackers()
def after_substep(self, physics, unused_random_state):
"""Resolve contacts and update ball-player contact trackers."""
if self._hit:
# Ball has already registered a valid contact within step (during one of
# previous after_substep calls).
return
# Iterate through all contacts to find the first contact between the ball
# and one of the registered entities.
for contact in physics.data.contact:
# Keep contacts that involve the ball and one of the registered entities.
has_self = False
for geom_id in (contact.geom1, contact.geom2):
if geom_id == self._geom_id:
has_self = True
else:
player = self._geom_id_to_player.get(geom_id)
if has_self and player:
# Detected a contact between the ball and an registered player.
if self._last_hit is not None:
self._intercepted = player.team != self._last_hit.team
else:
self._intercepted = True
# Register repossessed before updating last_hit player.
self._repossessed = player is not self._last_hit
self._last_hit = player
# Register hit event.
self._hit = True
break
def before_step(self, physics, random_state):
super().before_step(physics, random_state)
# Reset per simulation step indicator.
self._hit = False
self._repossessed = False
self._intercepted = False
def | (self, physics, random_state):
super().after_step(physics, random_state)
pos = physics.bind(self._geom).xpos
if self._hit:
# SoccerBall is hit on this step. Update dist_between_last_hits
# to dist_since_last_hit before resetting dist_since_last_hit.
self._dist_between_last_hits = self._dist_since_last_hit
self._dist_since_last_hit = 0.
self._pos_at_last_step = pos.copy()
if self._dist_since_last_hit is not None:
# Accumulate distance traveled since last hit event.
self._dist_since_last_hit += np.linalg.norm(pos - self._pos_at_last_step)
self._pos_at_last_step = pos.copy()
@property
def last_hit(self):
"""The player that last came in contact with the ball or `None`."""
return self._last_hit
@property
def hit(self):
"""Indicates if the ball is hit during the last simulation step.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit by a registered player during simulation step.
False: if not.
"""
return self._hit
@property
def repossessed(self):
"""Indicates if the ball has been repossessed by a different player.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True if the ball is hit by a registered player during simulation step
and that player is different from `last_hit`.
False: if the ball is not hit, or the ball is hit by `last_hit` player.
"""
return self._repossessed
@property
def intercepted(self):
"""Indicates if the ball has been intercepted by a different team.
For a timeline shown below:
..., agent.step, simulation, agent.step, ...
Returns:
True: if the ball is hit for the first time, or repossessed by an player
from a different team.
False: if the ball is not hit, not repossessed, or repossessed by a
teammate to `last_hit`.
"""
return self._intercepted
@property
def dist_between_last_hits(self):
"""Distance between last consecutive hits.
Returns:
Distance between last two consecutive hit events or `None` if there has
not been two consecutive hits on the ball.
"""
return self._dist_between_last | after_step | identifier_name |
test_lms_matlab_problem.py | # -*- coding: utf-8 -*-
"""
Test for matlab problems
"""
import time
from ...pages.lms.matlab_problem import MatlabProblemPage
from ...fixtures.course import XBlockFixtureDesc
from ...fixtures.xqueue import XQueueResponseFixture
from .test_lms_problems import ProblemsTest
from textwrap import dedent
class | (ProblemsTest):
"""
Tests that verify matlab problem "Run Code".
"""
def get_problem(self):
"""
Create a matlab problem for the test.
"""
problem_data = dedent("""
<problem markdown="null">
<text>
<p>
Write MATLAB code to create the following row vector and store it in a variable named <code>V</code>.
</p>
<table id="a0000000466" class="equation" width="100%" cellspacing="0" cellpadding="7" style="table-layout:auto">
<tr>
<td class="equation">[1 1 2 3 5 8 13]</td>
</tr>
</table>
<p>
<coderesponse queuename="matlab">
<matlabinput rows="10" cols="40" mode="" tabsize="4">
<plot_payload>
</plot_payload>
</matlabinput>
<codeparam>
<initial_display/>
<answer_display>
</answer_display>
<grader_payload>
</grader_payload>
</codeparam>
</coderesponse>
</p>
</text>
</problem>
""")
return XBlockFixtureDesc('problem', 'Test Matlab Problem', data=problem_data)
def _goto_matlab_problem_page(self):
"""
Open matlab problem page with assertion.
"""
self.courseware_page.visit()
matlab_problem_page = MatlabProblemPage(self.browser)
self.assertEqual(matlab_problem_page.problem_name, 'TEST MATLAB PROBLEM')
return matlab_problem_page
def test_run_code(self):
"""
Test "Run Code" button functionality.
"""
# Enter a submission, which will trigger a pre-defined response from the XQueue stub.
self.submission = "a=1" + self.unique_id[0:5]
self.xqueue_grade_response = {'msg': self.submission}
matlab_problem_page = self._goto_matlab_problem_page()
# Configure the XQueue stub's response for the text we will submit
if self.xqueue_grade_response is not None:
XQueueResponseFixture(self.submission, self.xqueue_grade_response).install()
matlab_problem_page.set_response(self.submission)
matlab_problem_page.click_run_code()
self.assertEqual(
u'Submitted. As soon as a response is returned, this message will be replaced by that feedback.',
matlab_problem_page.get_grader_msg(".external-grader-message")[0]
)
# Wait 5 seconds for xqueue stub server grader response sent back to lms.
time.sleep(5)
self.assertEqual(u'', matlab_problem_page.get_grader_msg(".external-grader-message")[0])
self.assertEqual(
self.xqueue_grade_response.get("msg"),
matlab_problem_page.get_grader_msg(".ungraded-matlab-result")[0]
)
| MatlabProblemTest | identifier_name |
test_lms_matlab_problem.py | # -*- coding: utf-8 -*-
"""
Test for matlab problems
"""
import time
from ...pages.lms.matlab_problem import MatlabProblemPage
from ...fixtures.course import XBlockFixtureDesc
from ...fixtures.xqueue import XQueueResponseFixture
from .test_lms_problems import ProblemsTest
from textwrap import dedent
class MatlabProblemTest(ProblemsTest):
"""
Tests that verify matlab problem "Run Code".
"""
def get_problem(self):
"""
Create a matlab problem for the test.
"""
problem_data = dedent("""
<problem markdown="null">
<text>
<p>
Write MATLAB code to create the following row vector and store it in a variable named <code>V</code>.
</p>
<table id="a0000000466" class="equation" width="100%" cellspacing="0" cellpadding="7" style="table-layout:auto">
<tr>
<td class="equation">[1 1 2 3 5 8 13]</td>
</tr>
</table>
<p>
<coderesponse queuename="matlab">
<matlabinput rows="10" cols="40" mode="" tabsize="4">
<plot_payload>
</plot_payload>
</matlabinput>
<codeparam>
<initial_display/>
<answer_display>
</answer_display>
<grader_payload>
</grader_payload>
</codeparam>
</coderesponse>
</p>
</text>
</problem>
""")
return XBlockFixtureDesc('problem', 'Test Matlab Problem', data=problem_data)
def _goto_matlab_problem_page(self):
"""
Open matlab problem page with assertion.
"""
self.courseware_page.visit()
matlab_problem_page = MatlabProblemPage(self.browser)
self.assertEqual(matlab_problem_page.problem_name, 'TEST MATLAB PROBLEM')
return matlab_problem_page
def test_run_code(self):
"""
Test "Run Code" button functionality.
"""
# Enter a submission, which will trigger a pre-defined response from the XQueue stub.
self.submission = "a=1" + self.unique_id[0:5]
self.xqueue_grade_response = {'msg': self.submission}
matlab_problem_page = self._goto_matlab_problem_page()
# Configure the XQueue stub's response for the text we will submit
if self.xqueue_grade_response is not None:
|
matlab_problem_page.set_response(self.submission)
matlab_problem_page.click_run_code()
self.assertEqual(
u'Submitted. As soon as a response is returned, this message will be replaced by that feedback.',
matlab_problem_page.get_grader_msg(".external-grader-message")[0]
)
# Wait 5 seconds for xqueue stub server grader response sent back to lms.
time.sleep(5)
self.assertEqual(u'', matlab_problem_page.get_grader_msg(".external-grader-message")[0])
self.assertEqual(
self.xqueue_grade_response.get("msg"),
matlab_problem_page.get_grader_msg(".ungraded-matlab-result")[0]
)
| XQueueResponseFixture(self.submission, self.xqueue_grade_response).install() | conditional_block |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.