code
stringlengths
75
104k
code_sememe
stringlengths
47
309k
token_type
stringlengths
215
214k
code_dependency
stringlengths
75
155k
def tf_next_step(self, x, iteration, deltas, improvement, last_improvement, estimated_improvement): """ Termination condition: max number of iterations, or no improvement for last step, or improvement less than acceptable ratio, or estimated value not positive. Args: x: Current solution estimate $x_t$. iteration: Current iteration counter $t$. deltas: Current difference $x_t - x'$. improvement: Current improvement $(f(x_t) - f(x')) / v'$. last_improvement: Last improvement $(f(x_{t-1}) - f(x')) / v'$. estimated_improvement: Current estimated value $v'$. Returns: True if another iteration should be performed. """ next_step = super(LineSearch, self).tf_next_step( x, iteration, deltas, improvement, last_improvement, estimated_improvement ) def undo_deltas(): value = self.fn_x([-delta for delta in deltas]) with tf.control_dependencies(control_inputs=(value,)): # Trivial operation to enforce control dependency return tf.less(x=value, y=value) # == False improved = tf.cond( pred=(improvement > last_improvement), true_fn=(lambda: True), false_fn=undo_deltas ) next_step = tf.logical_and(x=next_step, y=improved) next_step = tf.logical_and(x=next_step, y=(improvement < self.accept_ratio)) return tf.logical_and(x=next_step, y=(estimated_improvement > util.epsilon))
def function[tf_next_step, parameter[self, x, iteration, deltas, improvement, last_improvement, estimated_improvement]]: constant[ Termination condition: max number of iterations, or no improvement for last step, or improvement less than acceptable ratio, or estimated value not positive. Args: x: Current solution estimate $x_t$. iteration: Current iteration counter $t$. deltas: Current difference $x_t - x'$. improvement: Current improvement $(f(x_t) - f(x')) / v'$. last_improvement: Last improvement $(f(x_{t-1}) - f(x')) / v'$. estimated_improvement: Current estimated value $v'$. Returns: True if another iteration should be performed. ] variable[next_step] assign[=] call[call[name[super], parameter[name[LineSearch], name[self]]].tf_next_step, parameter[name[x], name[iteration], name[deltas], name[improvement], name[last_improvement], name[estimated_improvement]]] def function[undo_deltas, parameter[]]: variable[value] assign[=] call[name[self].fn_x, parameter[<ast.ListComp object at 0x7da20c992890>]] with call[name[tf].control_dependencies, parameter[]] begin[:] return[call[name[tf].less, parameter[]]] variable[improved] assign[=] call[name[tf].cond, parameter[]] variable[next_step] assign[=] call[name[tf].logical_and, parameter[]] variable[next_step] assign[=] call[name[tf].logical_and, parameter[]] return[call[name[tf].logical_and, parameter[]]]
keyword[def] identifier[tf_next_step] ( identifier[self] , identifier[x] , identifier[iteration] , identifier[deltas] , identifier[improvement] , identifier[last_improvement] , identifier[estimated_improvement] ): literal[string] identifier[next_step] = identifier[super] ( identifier[LineSearch] , identifier[self] ). identifier[tf_next_step] ( identifier[x] , identifier[iteration] , identifier[deltas] , identifier[improvement] , identifier[last_improvement] , identifier[estimated_improvement] ) keyword[def] identifier[undo_deltas] (): identifier[value] = identifier[self] . identifier[fn_x] ([- identifier[delta] keyword[for] identifier[delta] keyword[in] identifier[deltas] ]) keyword[with] identifier[tf] . identifier[control_dependencies] ( identifier[control_inputs] =( identifier[value] ,)): keyword[return] identifier[tf] . identifier[less] ( identifier[x] = identifier[value] , identifier[y] = identifier[value] ) identifier[improved] = identifier[tf] . identifier[cond] ( identifier[pred] =( identifier[improvement] > identifier[last_improvement] ), identifier[true_fn] =( keyword[lambda] : keyword[True] ), identifier[false_fn] = identifier[undo_deltas] ) identifier[next_step] = identifier[tf] . identifier[logical_and] ( identifier[x] = identifier[next_step] , identifier[y] = identifier[improved] ) identifier[next_step] = identifier[tf] . identifier[logical_and] ( identifier[x] = identifier[next_step] , identifier[y] =( identifier[improvement] < identifier[self] . identifier[accept_ratio] )) keyword[return] identifier[tf] . identifier[logical_and] ( identifier[x] = identifier[next_step] , identifier[y] =( identifier[estimated_improvement] > identifier[util] . identifier[epsilon] ))
def tf_next_step(self, x, iteration, deltas, improvement, last_improvement, estimated_improvement): """ Termination condition: max number of iterations, or no improvement for last step, or improvement less than acceptable ratio, or estimated value not positive. Args: x: Current solution estimate $x_t$. iteration: Current iteration counter $t$. deltas: Current difference $x_t - x'$. improvement: Current improvement $(f(x_t) - f(x')) / v'$. last_improvement: Last improvement $(f(x_{t-1}) - f(x')) / v'$. estimated_improvement: Current estimated value $v'$. Returns: True if another iteration should be performed. """ next_step = super(LineSearch, self).tf_next_step(x, iteration, deltas, improvement, last_improvement, estimated_improvement) def undo_deltas(): value = self.fn_x([-delta for delta in deltas]) with tf.control_dependencies(control_inputs=(value,)): # Trivial operation to enforce control dependency return tf.less(x=value, y=value) # == False # depends on [control=['with'], data=[]] improved = tf.cond(pred=improvement > last_improvement, true_fn=lambda : True, false_fn=undo_deltas) next_step = tf.logical_and(x=next_step, y=improved) next_step = tf.logical_and(x=next_step, y=improvement < self.accept_ratio) return tf.logical_and(x=next_step, y=estimated_improvement > util.epsilon)
def create_request_elements( cls, request_type, credentials, url, method='GET', params=None, headers=None, body='', secret=None, redirect_uri='', scope='', csrf='', user_state='' ): """ Creates |oauth2| request elements. """ headers = headers or {} params = params or {} consumer_key = credentials.consumer_key or '' consumer_secret = credentials.consumer_secret or '' token = credentials.token or '' refresh_token = credentials.refresh_token or credentials.token or '' # Separate url base and query parameters. url, base_params = cls._split_url(url) # Add params extracted from URL. params.update(dict(base_params)) if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE: # User authorization request. # TODO: Raise error for specific message for each missing argument. if consumer_key and redirect_uri and ( csrf or not cls.supports_csrf_protection): params['client_id'] = consumer_key params['redirect_uri'] = redirect_uri params['scope'] = scope if cls.supports_user_state: params['state'] = base64.urlsafe_b64encode( json.dumps( {"csrf": csrf, "user_state": user_state} ).encode('utf-8') ) else: params['state'] = csrf params['response_type'] = 'code' # Add authorization header headers.update(cls._authorization_header(credentials)) else: raise OAuth2Error( 'Credentials with valid consumer_key and arguments ' 'redirect_uri, scope and state are required to create ' 'OAuth 2.0 user authorization request elements!') elif request_type == cls.ACCESS_TOKEN_REQUEST_TYPE: # Access token request. if consumer_key and consumer_secret: params['code'] = token params['client_id'] = consumer_key params['client_secret'] = consumer_secret params['redirect_uri'] = redirect_uri params['grant_type'] = 'authorization_code' # TODO: Check whether all providers accept it headers.update(cls._authorization_header(credentials)) else: raise OAuth2Error( 'Credentials with valid token, consumer_key, ' 'consumer_secret and argument redirect_uri are required ' 'to create OAuth 2.0 access token request elements!') elif request_type == cls.REFRESH_TOKEN_REQUEST_TYPE: # Refresh access token request. if refresh_token and consumer_key and consumer_secret: params['refresh_token'] = refresh_token params['client_id'] = consumer_key params['client_secret'] = consumer_secret params['grant_type'] = 'refresh_token' else: raise OAuth2Error( 'Credentials with valid refresh_token, consumer_key, ' 'consumer_secret are required to create OAuth 2.0 ' 'refresh token request elements!') elif request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: # Protected resource request. # Add Authorization header. See: # http://tools.ietf.org/html/rfc6749#section-7.1 if credentials.token_type == cls.BEARER: # http://tools.ietf.org/html/rfc6750#section-2.1 headers.update( {'Authorization': 'Bearer {0}'.format(credentials.token)}) elif token: params['access_token'] = token else: raise OAuth2Error( 'Credentials with valid token are required to create ' 'OAuth 2.0 protected resources request elements!') request_elements = core.RequestElements( url, method, params, headers, body) return cls._x_request_elements_filter( request_type, request_elements, credentials)
def function[create_request_elements, parameter[cls, request_type, credentials, url, method, params, headers, body, secret, redirect_uri, scope, csrf, user_state]]: constant[ Creates |oauth2| request elements. ] variable[headers] assign[=] <ast.BoolOp object at 0x7da1b0399b40> variable[params] assign[=] <ast.BoolOp object at 0x7da1b039a770> variable[consumer_key] assign[=] <ast.BoolOp object at 0x7da1b03987f0> variable[consumer_secret] assign[=] <ast.BoolOp object at 0x7da1b039b820> variable[token] assign[=] <ast.BoolOp object at 0x7da1b0398130> variable[refresh_token] assign[=] <ast.BoolOp object at 0x7da1b039b4f0> <ast.Tuple object at 0x7da1b03981c0> assign[=] call[name[cls]._split_url, parameter[name[url]]] call[name[params].update, parameter[call[name[dict], parameter[name[base_params]]]]] if compare[name[request_type] equal[==] name[cls].USER_AUTHORIZATION_REQUEST_TYPE] begin[:] if <ast.BoolOp object at 0x7da1b0399d20> begin[:] call[name[params]][constant[client_id]] assign[=] name[consumer_key] call[name[params]][constant[redirect_uri]] assign[=] name[redirect_uri] call[name[params]][constant[scope]] assign[=] name[scope] if name[cls].supports_user_state begin[:] call[name[params]][constant[state]] assign[=] call[name[base64].urlsafe_b64encode, parameter[call[call[name[json].dumps, parameter[dictionary[[<ast.Constant object at 0x7da1b03994e0>, <ast.Constant object at 0x7da1b0399ea0>], [<ast.Name object at 0x7da1b039bee0>, <ast.Name object at 0x7da1b03991e0>]]]].encode, parameter[constant[utf-8]]]]] call[name[params]][constant[response_type]] assign[=] constant[code] call[name[headers].update, parameter[call[name[cls]._authorization_header, parameter[name[credentials]]]]] variable[request_elements] assign[=] call[name[core].RequestElements, parameter[name[url], name[method], name[params], name[headers], name[body]]] return[call[name[cls]._x_request_elements_filter, parameter[name[request_type], name[request_elements], name[credentials]]]]
keyword[def] identifier[create_request_elements] ( identifier[cls] , identifier[request_type] , identifier[credentials] , identifier[url] , identifier[method] = literal[string] , identifier[params] = keyword[None] , identifier[headers] = keyword[None] , identifier[body] = literal[string] , identifier[secret] = keyword[None] , identifier[redirect_uri] = literal[string] , identifier[scope] = literal[string] , identifier[csrf] = literal[string] , identifier[user_state] = literal[string] ): literal[string] identifier[headers] = identifier[headers] keyword[or] {} identifier[params] = identifier[params] keyword[or] {} identifier[consumer_key] = identifier[credentials] . identifier[consumer_key] keyword[or] literal[string] identifier[consumer_secret] = identifier[credentials] . identifier[consumer_secret] keyword[or] literal[string] identifier[token] = identifier[credentials] . identifier[token] keyword[or] literal[string] identifier[refresh_token] = identifier[credentials] . identifier[refresh_token] keyword[or] identifier[credentials] . identifier[token] keyword[or] literal[string] identifier[url] , identifier[base_params] = identifier[cls] . identifier[_split_url] ( identifier[url] ) identifier[params] . identifier[update] ( identifier[dict] ( identifier[base_params] )) keyword[if] identifier[request_type] == identifier[cls] . identifier[USER_AUTHORIZATION_REQUEST_TYPE] : keyword[if] identifier[consumer_key] keyword[and] identifier[redirect_uri] keyword[and] ( identifier[csrf] keyword[or] keyword[not] identifier[cls] . identifier[supports_csrf_protection] ): identifier[params] [ literal[string] ]= identifier[consumer_key] identifier[params] [ literal[string] ]= identifier[redirect_uri] identifier[params] [ literal[string] ]= identifier[scope] keyword[if] identifier[cls] . identifier[supports_user_state] : identifier[params] [ literal[string] ]= identifier[base64] . identifier[urlsafe_b64encode] ( identifier[json] . identifier[dumps] ( { literal[string] : identifier[csrf] , literal[string] : identifier[user_state] } ). identifier[encode] ( literal[string] ) ) keyword[else] : identifier[params] [ literal[string] ]= identifier[csrf] identifier[params] [ literal[string] ]= literal[string] identifier[headers] . identifier[update] ( identifier[cls] . identifier[_authorization_header] ( identifier[credentials] )) keyword[else] : keyword[raise] identifier[OAuth2Error] ( literal[string] literal[string] literal[string] ) keyword[elif] identifier[request_type] == identifier[cls] . identifier[ACCESS_TOKEN_REQUEST_TYPE] : keyword[if] identifier[consumer_key] keyword[and] identifier[consumer_secret] : identifier[params] [ literal[string] ]= identifier[token] identifier[params] [ literal[string] ]= identifier[consumer_key] identifier[params] [ literal[string] ]= identifier[consumer_secret] identifier[params] [ literal[string] ]= identifier[redirect_uri] identifier[params] [ literal[string] ]= literal[string] identifier[headers] . identifier[update] ( identifier[cls] . identifier[_authorization_header] ( identifier[credentials] )) keyword[else] : keyword[raise] identifier[OAuth2Error] ( literal[string] literal[string] literal[string] ) keyword[elif] identifier[request_type] == identifier[cls] . identifier[REFRESH_TOKEN_REQUEST_TYPE] : keyword[if] identifier[refresh_token] keyword[and] identifier[consumer_key] keyword[and] identifier[consumer_secret] : identifier[params] [ literal[string] ]= identifier[refresh_token] identifier[params] [ literal[string] ]= identifier[consumer_key] identifier[params] [ literal[string] ]= identifier[consumer_secret] identifier[params] [ literal[string] ]= literal[string] keyword[else] : keyword[raise] identifier[OAuth2Error] ( literal[string] literal[string] literal[string] ) keyword[elif] identifier[request_type] == identifier[cls] . identifier[PROTECTED_RESOURCE_REQUEST_TYPE] : keyword[if] identifier[credentials] . identifier[token_type] == identifier[cls] . identifier[BEARER] : identifier[headers] . identifier[update] ( { literal[string] : literal[string] . identifier[format] ( identifier[credentials] . identifier[token] )}) keyword[elif] identifier[token] : identifier[params] [ literal[string] ]= identifier[token] keyword[else] : keyword[raise] identifier[OAuth2Error] ( literal[string] literal[string] ) identifier[request_elements] = identifier[core] . identifier[RequestElements] ( identifier[url] , identifier[method] , identifier[params] , identifier[headers] , identifier[body] ) keyword[return] identifier[cls] . identifier[_x_request_elements_filter] ( identifier[request_type] , identifier[request_elements] , identifier[credentials] )
def create_request_elements(cls, request_type, credentials, url, method='GET', params=None, headers=None, body='', secret=None, redirect_uri='', scope='', csrf='', user_state=''): """ Creates |oauth2| request elements. """ headers = headers or {} params = params or {} consumer_key = credentials.consumer_key or '' consumer_secret = credentials.consumer_secret or '' token = credentials.token or '' refresh_token = credentials.refresh_token or credentials.token or '' # Separate url base and query parameters. (url, base_params) = cls._split_url(url) # Add params extracted from URL. params.update(dict(base_params)) if request_type == cls.USER_AUTHORIZATION_REQUEST_TYPE: # User authorization request. # TODO: Raise error for specific message for each missing argument. if consumer_key and redirect_uri and (csrf or not cls.supports_csrf_protection): params['client_id'] = consumer_key params['redirect_uri'] = redirect_uri params['scope'] = scope if cls.supports_user_state: params['state'] = base64.urlsafe_b64encode(json.dumps({'csrf': csrf, 'user_state': user_state}).encode('utf-8')) # depends on [control=['if'], data=[]] else: params['state'] = csrf params['response_type'] = 'code' # Add authorization header headers.update(cls._authorization_header(credentials)) # depends on [control=['if'], data=[]] else: raise OAuth2Error('Credentials with valid consumer_key and arguments redirect_uri, scope and state are required to create OAuth 2.0 user authorization request elements!') # depends on [control=['if'], data=[]] elif request_type == cls.ACCESS_TOKEN_REQUEST_TYPE: # Access token request. if consumer_key and consumer_secret: params['code'] = token params['client_id'] = consumer_key params['client_secret'] = consumer_secret params['redirect_uri'] = redirect_uri params['grant_type'] = 'authorization_code' # TODO: Check whether all providers accept it headers.update(cls._authorization_header(credentials)) # depends on [control=['if'], data=[]] else: raise OAuth2Error('Credentials with valid token, consumer_key, consumer_secret and argument redirect_uri are required to create OAuth 2.0 access token request elements!') # depends on [control=['if'], data=[]] elif request_type == cls.REFRESH_TOKEN_REQUEST_TYPE: # Refresh access token request. if refresh_token and consumer_key and consumer_secret: params['refresh_token'] = refresh_token params['client_id'] = consumer_key params['client_secret'] = consumer_secret params['grant_type'] = 'refresh_token' # depends on [control=['if'], data=[]] else: raise OAuth2Error('Credentials with valid refresh_token, consumer_key, consumer_secret are required to create OAuth 2.0 refresh token request elements!') # depends on [control=['if'], data=[]] elif request_type == cls.PROTECTED_RESOURCE_REQUEST_TYPE: # Protected resource request. # Add Authorization header. See: # http://tools.ietf.org/html/rfc6749#section-7.1 if credentials.token_type == cls.BEARER: # http://tools.ietf.org/html/rfc6750#section-2.1 headers.update({'Authorization': 'Bearer {0}'.format(credentials.token)}) # depends on [control=['if'], data=[]] elif token: params['access_token'] = token # depends on [control=['if'], data=[]] else: raise OAuth2Error('Credentials with valid token are required to create OAuth 2.0 protected resources request elements!') # depends on [control=['if'], data=[]] request_elements = core.RequestElements(url, method, params, headers, body) return cls._x_request_elements_filter(request_type, request_elements, credentials)
def namespace(ns_key): '''Construct a validation schema for a given namespace. Parameters ---------- ns_key : str Namespace key identifier (eg, 'beat' or 'segment_tut') Returns ------- schema : dict JSON schema of `namespace` ''' if ns_key not in __NAMESPACE__: raise NamespaceError('Unknown namespace: {:s}'.format(ns_key)) sch = copy.deepcopy(JAMS_SCHEMA['definitions']['SparseObservation']) for key in ['value', 'confidence']: try: sch['properties'][key] = __NAMESPACE__[ns_key][key] except KeyError: pass return sch
def function[namespace, parameter[ns_key]]: constant[Construct a validation schema for a given namespace. Parameters ---------- ns_key : str Namespace key identifier (eg, 'beat' or 'segment_tut') Returns ------- schema : dict JSON schema of `namespace` ] if compare[name[ns_key] <ast.NotIn object at 0x7da2590d7190> name[__NAMESPACE__]] begin[:] <ast.Raise object at 0x7da1b00e6680> variable[sch] assign[=] call[name[copy].deepcopy, parameter[call[call[name[JAMS_SCHEMA]][constant[definitions]]][constant[SparseObservation]]]] for taget[name[key]] in starred[list[[<ast.Constant object at 0x7da1b00e6530>, <ast.Constant object at 0x7da1b00e7f10>]]] begin[:] <ast.Try object at 0x7da1b00e4e50> return[name[sch]]
keyword[def] identifier[namespace] ( identifier[ns_key] ): literal[string] keyword[if] identifier[ns_key] keyword[not] keyword[in] identifier[__NAMESPACE__] : keyword[raise] identifier[NamespaceError] ( literal[string] . identifier[format] ( identifier[ns_key] )) identifier[sch] = identifier[copy] . identifier[deepcopy] ( identifier[JAMS_SCHEMA] [ literal[string] ][ literal[string] ]) keyword[for] identifier[key] keyword[in] [ literal[string] , literal[string] ]: keyword[try] : identifier[sch] [ literal[string] ][ identifier[key] ]= identifier[__NAMESPACE__] [ identifier[ns_key] ][ identifier[key] ] keyword[except] identifier[KeyError] : keyword[pass] keyword[return] identifier[sch]
def namespace(ns_key): """Construct a validation schema for a given namespace. Parameters ---------- ns_key : str Namespace key identifier (eg, 'beat' or 'segment_tut') Returns ------- schema : dict JSON schema of `namespace` """ if ns_key not in __NAMESPACE__: raise NamespaceError('Unknown namespace: {:s}'.format(ns_key)) # depends on [control=['if'], data=['ns_key']] sch = copy.deepcopy(JAMS_SCHEMA['definitions']['SparseObservation']) for key in ['value', 'confidence']: try: sch['properties'][key] = __NAMESPACE__[ns_key][key] # depends on [control=['try'], data=[]] except KeyError: pass # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['key']] return sch
def GetRemainder(self): """Method to get the remainder of the buffered XML. this method stops the parser, set its state to End Of File and return the input stream with what is left that the parser did not use. The implementation is not good, the parser certainly procgressed past what's left in reader->input, and there is an allocation problem. Best would be to rewrite it differently. """ ret = libxml2mod.xmlTextReaderGetRemainder(self._o) if ret is None:raise treeError('xmlTextReaderGetRemainder() failed') __tmp = inputBuffer(_obj=ret) return __tmp
def function[GetRemainder, parameter[self]]: constant[Method to get the remainder of the buffered XML. this method stops the parser, set its state to End Of File and return the input stream with what is left that the parser did not use. The implementation is not good, the parser certainly procgressed past what's left in reader->input, and there is an allocation problem. Best would be to rewrite it differently. ] variable[ret] assign[=] call[name[libxml2mod].xmlTextReaderGetRemainder, parameter[name[self]._o]] if compare[name[ret] is constant[None]] begin[:] <ast.Raise object at 0x7da1b1fa7e80> variable[__tmp] assign[=] call[name[inputBuffer], parameter[]] return[name[__tmp]]
keyword[def] identifier[GetRemainder] ( identifier[self] ): literal[string] identifier[ret] = identifier[libxml2mod] . identifier[xmlTextReaderGetRemainder] ( identifier[self] . identifier[_o] ) keyword[if] identifier[ret] keyword[is] keyword[None] : keyword[raise] identifier[treeError] ( literal[string] ) identifier[__tmp] = identifier[inputBuffer] ( identifier[_obj] = identifier[ret] ) keyword[return] identifier[__tmp]
def GetRemainder(self): """Method to get the remainder of the buffered XML. this method stops the parser, set its state to End Of File and return the input stream with what is left that the parser did not use. The implementation is not good, the parser certainly procgressed past what's left in reader->input, and there is an allocation problem. Best would be to rewrite it differently. """ ret = libxml2mod.xmlTextReaderGetRemainder(self._o) if ret is None: raise treeError('xmlTextReaderGetRemainder() failed') # depends on [control=['if'], data=[]] __tmp = inputBuffer(_obj=ret) return __tmp
def _branch_name(cls, version): """Defines a mapping between versions and branches. In particular, `-dev` suffixed releases always live on master. Any other (modern) release lives in a branch. """ suffix = version.public[len(version.base_version):] components = version.base_version.split('.') + [suffix] if suffix == '' or suffix.startswith('rc'): # An un-suffixed, or suffixed-with-rc version is a release from a stable branch. return '{}.{}.x'.format(*components[:2]) elif suffix.startswith('.dev'): # Suffixed `dev` release version in master. return 'master' else: raise ValueError('Unparseable pants version number: {}'.format(version))
def function[_branch_name, parameter[cls, version]]: constant[Defines a mapping between versions and branches. In particular, `-dev` suffixed releases always live on master. Any other (modern) release lives in a branch. ] variable[suffix] assign[=] call[name[version].public][<ast.Slice object at 0x7da1b1e8c8b0>] variable[components] assign[=] binary_operation[call[name[version].base_version.split, parameter[constant[.]]] + list[[<ast.Name object at 0x7da1b22a5660>]]] if <ast.BoolOp object at 0x7da1b22a4c10> begin[:] return[call[constant[{}.{}.x].format, parameter[<ast.Starred object at 0x7da1b1e5df00>]]]
keyword[def] identifier[_branch_name] ( identifier[cls] , identifier[version] ): literal[string] identifier[suffix] = identifier[version] . identifier[public] [ identifier[len] ( identifier[version] . identifier[base_version] ):] identifier[components] = identifier[version] . identifier[base_version] . identifier[split] ( literal[string] )+[ identifier[suffix] ] keyword[if] identifier[suffix] == literal[string] keyword[or] identifier[suffix] . identifier[startswith] ( literal[string] ): keyword[return] literal[string] . identifier[format] (* identifier[components] [: literal[int] ]) keyword[elif] identifier[suffix] . identifier[startswith] ( literal[string] ): keyword[return] literal[string] keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] . identifier[format] ( identifier[version] ))
def _branch_name(cls, version): """Defines a mapping between versions and branches. In particular, `-dev` suffixed releases always live on master. Any other (modern) release lives in a branch. """ suffix = version.public[len(version.base_version):] components = version.base_version.split('.') + [suffix] if suffix == '' or suffix.startswith('rc'): # An un-suffixed, or suffixed-with-rc version is a release from a stable branch. return '{}.{}.x'.format(*components[:2]) # depends on [control=['if'], data=[]] elif suffix.startswith('.dev'): # Suffixed `dev` release version in master. return 'master' # depends on [control=['if'], data=[]] else: raise ValueError('Unparseable pants version number: {}'.format(version))
def pad_version(left, right): """Returns two sequences of the same length so that they can be compared. The shorter of the two arguments is lengthened by inserting extra zeros before non-integer components. The algorithm attempts to align character components.""" pair = vcmp(left), vcmp(right) mn, mx = min(pair, key=len), max(pair, key=len) for idx, c in enumerate(mx): try: a = mx[idx] b = mn[idx] if type(a) != type(b): mn.insert(idx, 0) except IndexError: if type(c) is int: mn.append(0) elif isinstance(c, six.string_types): mn.append('') else: raise Exception("pad_version failed (%s) (%s)" % (left, right)) return pair
def function[pad_version, parameter[left, right]]: constant[Returns two sequences of the same length so that they can be compared. The shorter of the two arguments is lengthened by inserting extra zeros before non-integer components. The algorithm attempts to align character components.] variable[pair] assign[=] tuple[[<ast.Call object at 0x7da18dc9af80>, <ast.Call object at 0x7da18dc99fc0>]] <ast.Tuple object at 0x7da18dc9b7f0> assign[=] tuple[[<ast.Call object at 0x7da18dc999c0>, <ast.Call object at 0x7da18dc9bd60>]] for taget[tuple[[<ast.Name object at 0x7da18dc984c0>, <ast.Name object at 0x7da18dc99000>]]] in starred[call[name[enumerate], parameter[name[mx]]]] begin[:] <ast.Try object at 0x7da18dc99270> return[name[pair]]
keyword[def] identifier[pad_version] ( identifier[left] , identifier[right] ): literal[string] identifier[pair] = identifier[vcmp] ( identifier[left] ), identifier[vcmp] ( identifier[right] ) identifier[mn] , identifier[mx] = identifier[min] ( identifier[pair] , identifier[key] = identifier[len] ), identifier[max] ( identifier[pair] , identifier[key] = identifier[len] ) keyword[for] identifier[idx] , identifier[c] keyword[in] identifier[enumerate] ( identifier[mx] ): keyword[try] : identifier[a] = identifier[mx] [ identifier[idx] ] identifier[b] = identifier[mn] [ identifier[idx] ] keyword[if] identifier[type] ( identifier[a] )!= identifier[type] ( identifier[b] ): identifier[mn] . identifier[insert] ( identifier[idx] , literal[int] ) keyword[except] identifier[IndexError] : keyword[if] identifier[type] ( identifier[c] ) keyword[is] identifier[int] : identifier[mn] . identifier[append] ( literal[int] ) keyword[elif] identifier[isinstance] ( identifier[c] , identifier[six] . identifier[string_types] ): identifier[mn] . identifier[append] ( literal[string] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] %( identifier[left] , identifier[right] )) keyword[return] identifier[pair]
def pad_version(left, right): """Returns two sequences of the same length so that they can be compared. The shorter of the two arguments is lengthened by inserting extra zeros before non-integer components. The algorithm attempts to align character components.""" pair = (vcmp(left), vcmp(right)) (mn, mx) = (min(pair, key=len), max(pair, key=len)) for (idx, c) in enumerate(mx): try: a = mx[idx] b = mn[idx] if type(a) != type(b): mn.insert(idx, 0) # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except IndexError: if type(c) is int: mn.append(0) # depends on [control=['if'], data=[]] elif isinstance(c, six.string_types): mn.append('') # depends on [control=['if'], data=[]] else: raise Exception('pad_version failed (%s) (%s)' % (left, right)) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] return pair
def __get_depth(root): """ return 0 if unbalanced else depth + 1 """ if root is None: return 0 left = __get_depth(root.left) right = __get_depth(root.right) if abs(left-right) > 1 or -1 in [left, right]: return -1 return 1 + max(left, right)
def function[__get_depth, parameter[root]]: constant[ return 0 if unbalanced else depth + 1 ] if compare[name[root] is constant[None]] begin[:] return[constant[0]] variable[left] assign[=] call[name[__get_depth], parameter[name[root].left]] variable[right] assign[=] call[name[__get_depth], parameter[name[root].right]] if <ast.BoolOp object at 0x7da1b1ee90c0> begin[:] return[<ast.UnaryOp object at 0x7da1b1e9a500>] return[binary_operation[constant[1] + call[name[max], parameter[name[left], name[right]]]]]
keyword[def] identifier[__get_depth] ( identifier[root] ): literal[string] keyword[if] identifier[root] keyword[is] keyword[None] : keyword[return] literal[int] identifier[left] = identifier[__get_depth] ( identifier[root] . identifier[left] ) identifier[right] = identifier[__get_depth] ( identifier[root] . identifier[right] ) keyword[if] identifier[abs] ( identifier[left] - identifier[right] )> literal[int] keyword[or] - literal[int] keyword[in] [ identifier[left] , identifier[right] ]: keyword[return] - literal[int] keyword[return] literal[int] + identifier[max] ( identifier[left] , identifier[right] )
def __get_depth(root): """ return 0 if unbalanced else depth + 1 """ if root is None: return 0 # depends on [control=['if'], data=[]] left = __get_depth(root.left) right = __get_depth(root.right) if abs(left - right) > 1 or -1 in [left, right]: return -1 # depends on [control=['if'], data=[]] return 1 + max(left, right)
def cython_enums(): """generate `enum: ZMQ_CONST` block for constant_enums.pxi""" lines = [] for name in all_names: if no_prefix(name): lines.append('enum: ZMQ_{0} "{0}"'.format(name)) else: lines.append('enum: ZMQ_{0}'.format(name)) return dict(ZMQ_ENUMS='\n '.join(lines))
def function[cython_enums, parameter[]]: constant[generate `enum: ZMQ_CONST` block for constant_enums.pxi] variable[lines] assign[=] list[[]] for taget[name[name]] in starred[name[all_names]] begin[:] if call[name[no_prefix], parameter[name[name]]] begin[:] call[name[lines].append, parameter[call[constant[enum: ZMQ_{0} "{0}"].format, parameter[name[name]]]]] return[call[name[dict], parameter[]]]
keyword[def] identifier[cython_enums] (): literal[string] identifier[lines] =[] keyword[for] identifier[name] keyword[in] identifier[all_names] : keyword[if] identifier[no_prefix] ( identifier[name] ): identifier[lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] )) keyword[else] : identifier[lines] . identifier[append] ( literal[string] . identifier[format] ( identifier[name] )) keyword[return] identifier[dict] ( identifier[ZMQ_ENUMS] = literal[string] . identifier[join] ( identifier[lines] ))
def cython_enums(): """generate `enum: ZMQ_CONST` block for constant_enums.pxi""" lines = [] for name in all_names: if no_prefix(name): lines.append('enum: ZMQ_{0} "{0}"'.format(name)) # depends on [control=['if'], data=[]] else: lines.append('enum: ZMQ_{0}'.format(name)) # depends on [control=['for'], data=['name']] return dict(ZMQ_ENUMS='\n '.join(lines))
def initiate(self, request): """ Initiates a device management request, such as reboot. In case of failure it throws APIException """ url = MgmtRequests.mgmtRequests r = self._apiClient.post(url, request) if r.status_code == 202: return r.json() else: raise ApiException(r)
def function[initiate, parameter[self, request]]: constant[ Initiates a device management request, such as reboot. In case of failure it throws APIException ] variable[url] assign[=] name[MgmtRequests].mgmtRequests variable[r] assign[=] call[name[self]._apiClient.post, parameter[name[url], name[request]]] if compare[name[r].status_code equal[==] constant[202]] begin[:] return[call[name[r].json, parameter[]]]
keyword[def] identifier[initiate] ( identifier[self] , identifier[request] ): literal[string] identifier[url] = identifier[MgmtRequests] . identifier[mgmtRequests] identifier[r] = identifier[self] . identifier[_apiClient] . identifier[post] ( identifier[url] , identifier[request] ) keyword[if] identifier[r] . identifier[status_code] == literal[int] : keyword[return] identifier[r] . identifier[json] () keyword[else] : keyword[raise] identifier[ApiException] ( identifier[r] )
def initiate(self, request): """ Initiates a device management request, such as reboot. In case of failure it throws APIException """ url = MgmtRequests.mgmtRequests r = self._apiClient.post(url, request) if r.status_code == 202: return r.json() # depends on [control=['if'], data=[]] else: raise ApiException(r)
def update(self, **kwargs): """When setting useProxyServer to enable we need to supply proxyServerPool value as well """ if 'useProxyServer' in kwargs and kwargs['useProxyServer'] == 'enabled': if 'proxyServerPool' not in kwargs: error = 'Missing proxyServerPool parameter value.' raise MissingUpdateParameter(error) if hasattr(self, 'useProxyServer'): if getattr(self, 'useProxyServer') == 'enabled' and 'proxyServerPool' not in self.__dict__: error = 'Missing proxyServerPool parameter value.' raise MissingUpdateParameter(error) self._update(**kwargs) return self
def function[update, parameter[self]]: constant[When setting useProxyServer to enable we need to supply proxyServerPool value as well ] if <ast.BoolOp object at 0x7da20e962dd0> begin[:] if compare[constant[proxyServerPool] <ast.NotIn object at 0x7da2590d7190> name[kwargs]] begin[:] variable[error] assign[=] constant[Missing proxyServerPool parameter value.] <ast.Raise object at 0x7da20e962680> if call[name[hasattr], parameter[name[self], constant[useProxyServer]]] begin[:] if <ast.BoolOp object at 0x7da20e9603a0> begin[:] variable[error] assign[=] constant[Missing proxyServerPool parameter value.] <ast.Raise object at 0x7da20e9608b0> call[name[self]._update, parameter[]] return[name[self]]
keyword[def] identifier[update] ( identifier[self] ,** identifier[kwargs] ): literal[string] keyword[if] literal[string] keyword[in] identifier[kwargs] keyword[and] identifier[kwargs] [ literal[string] ]== literal[string] : keyword[if] literal[string] keyword[not] keyword[in] identifier[kwargs] : identifier[error] = literal[string] keyword[raise] identifier[MissingUpdateParameter] ( identifier[error] ) keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[if] identifier[getattr] ( identifier[self] , literal[string] )== literal[string] keyword[and] literal[string] keyword[not] keyword[in] identifier[self] . identifier[__dict__] : identifier[error] = literal[string] keyword[raise] identifier[MissingUpdateParameter] ( identifier[error] ) identifier[self] . identifier[_update] (** identifier[kwargs] ) keyword[return] identifier[self]
def update(self, **kwargs): """When setting useProxyServer to enable we need to supply proxyServerPool value as well """ if 'useProxyServer' in kwargs and kwargs['useProxyServer'] == 'enabled': if 'proxyServerPool' not in kwargs: error = 'Missing proxyServerPool parameter value.' raise MissingUpdateParameter(error) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if hasattr(self, 'useProxyServer'): if getattr(self, 'useProxyServer') == 'enabled' and 'proxyServerPool' not in self.__dict__: error = 'Missing proxyServerPool parameter value.' raise MissingUpdateParameter(error) # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] self._update(**kwargs) return self
def auto_convert_numeric_string_cell(flagable, cell_str, position, worksheet, flags, units): ''' Handles the string containing numeric case of cell and attempts auto-conversion for auto_convert_cell. ''' def numerify_str(cell_str, flag_level='minor', flag_text=""): ''' Differentiates between int and float strings. Expects a numeric string. ''' if re.search(allregex.integer_regex, cell_str): flagable.flag_change(flags, flag_level, position, worksheet) return int(cell_str) else: flagable.flag_change(flags, flag_level, worksheet, position) return float(cell_str) def numerify_percentage_str(cell_str, flag_level='minor', flag_text=""): flagable.flag_change(flags, flag_level, position, worksheet) return float(cell_str) / 100 def convert_to_int_or_float(cell_str, flag_level='minor', flag_text=""): if not cell_str: conversion = 0 flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['empty-to-zero-string']) if re.search(allregex.numerical_regex, cell_str): conversion = numerify_str(cell_str, flag_level, flag_text) # Comma separated? elif re.search(allregex.comma_sep_numerical_regex, cell_str): smashed_cell = ''.join(cell_str.split(',')) conversion = numerify_str(smashed_cell, flag_level, flag_text) # Ends in percentage sign elif re.search(allregex.percent_numerical_regex, cell_str): cell_str = allregex.percent_numerical_regex.search(cell_str).group(1) conversion = numerify_percentage_str(cell_str, flag_level, flag_text) # Ends in + or - sign (estimate)? elif re.search(allregex.estimate_numerical_regex, cell_str): cell_str = cell_str[:-1].replace(",","") conversion = numerify_str(cell_str, flag_level, flag_text) # Begins with money symbol? elif re.search(allregex.begins_with_monetary_symbol_regex, cell_str): symbol = cell_str[0] cell_str = cell_str[1:] try: conversion = convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['monetary-removal']) if re.search(allregex.contains_dollar_symbol_regex, symbol): units[position] = UNITS_DOLLAR elif re.search(allregex.contains_pound_symbol_regex, symbol): units[position] = UNITS_POUND elif re.search(allregex.contains_euro_symbol_regex, symbol): units[position] = UNITS_EURO except ValueError: conversion = cell_str flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-monetary-convert']) # Number ending in 'k'? elif re.search(allregex.ends_with_thousands_scaling_regex, cell_str): cell_str = cell_str.rstrip()[:-1] try: conversion = 1000*convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['thousands-convert']) except ValueError: flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-thousands-convert']) # Number ending in 'M' or 'MM'? elif re.search(allregex.ends_with_millions_scaling_regex, cell_str): if cell_str[-2] == "M": cell_str = cell_str[:-2] else: cell_str = cell_str[:-1] try: conversion = 1000000*convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['millions-convert']) except ValueError: flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-millions-convert']) else: raise ValueError("Cannot convert cell") return conversion # Try converting try: return convert_to_int_or_float(cell_str) # Couldn't convert? except ValueError: flagable.flag_change(flags, 'minor', position, worksheet, flagable.FLAGS['failed-convert-numeric-string']) return cell_str
def function[auto_convert_numeric_string_cell, parameter[flagable, cell_str, position, worksheet, flags, units]]: constant[ Handles the string containing numeric case of cell and attempts auto-conversion for auto_convert_cell. ] def function[numerify_str, parameter[cell_str, flag_level, flag_text]]: constant[ Differentiates between int and float strings. Expects a numeric string. ] if call[name[re].search, parameter[name[allregex].integer_regex, name[cell_str]]] begin[:] call[name[flagable].flag_change, parameter[name[flags], name[flag_level], name[position], name[worksheet]]] return[call[name[int], parameter[name[cell_str]]]] def function[numerify_percentage_str, parameter[cell_str, flag_level, flag_text]]: call[name[flagable].flag_change, parameter[name[flags], name[flag_level], name[position], name[worksheet]]] return[binary_operation[call[name[float], parameter[name[cell_str]]] / constant[100]]] def function[convert_to_int_or_float, parameter[cell_str, flag_level, flag_text]]: if <ast.UnaryOp object at 0x7da18f00c580> begin[:] variable[conversion] assign[=] constant[0] call[name[flagable].flag_change, parameter[name[flags], constant[warning], name[position], name[worksheet], call[name[flagable].FLAGS][constant[empty-to-zero-string]]]] if call[name[re].search, parameter[name[allregex].numerical_regex, name[cell_str]]] begin[:] variable[conversion] assign[=] call[name[numerify_str], parameter[name[cell_str], name[flag_level], name[flag_text]]] return[name[conversion]] <ast.Try object at 0x7da2044c3550>
keyword[def] identifier[auto_convert_numeric_string_cell] ( identifier[flagable] , identifier[cell_str] , identifier[position] , identifier[worksheet] , identifier[flags] , identifier[units] ): literal[string] keyword[def] identifier[numerify_str] ( identifier[cell_str] , identifier[flag_level] = literal[string] , identifier[flag_text] = literal[string] ): literal[string] keyword[if] identifier[re] . identifier[search] ( identifier[allregex] . identifier[integer_regex] , identifier[cell_str] ): identifier[flagable] . identifier[flag_change] ( identifier[flags] , identifier[flag_level] , identifier[position] , identifier[worksheet] ) keyword[return] identifier[int] ( identifier[cell_str] ) keyword[else] : identifier[flagable] . identifier[flag_change] ( identifier[flags] , identifier[flag_level] , identifier[worksheet] , identifier[position] ) keyword[return] identifier[float] ( identifier[cell_str] ) keyword[def] identifier[numerify_percentage_str] ( identifier[cell_str] , identifier[flag_level] = literal[string] , identifier[flag_text] = literal[string] ): identifier[flagable] . identifier[flag_change] ( identifier[flags] , identifier[flag_level] , identifier[position] , identifier[worksheet] ) keyword[return] identifier[float] ( identifier[cell_str] )/ literal[int] keyword[def] identifier[convert_to_int_or_float] ( identifier[cell_str] , identifier[flag_level] = literal[string] , identifier[flag_text] = literal[string] ): keyword[if] keyword[not] identifier[cell_str] : identifier[conversion] = literal[int] identifier[flagable] . identifier[flag_change] ( identifier[flags] , literal[string] , identifier[position] , identifier[worksheet] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[if] identifier[re] . identifier[search] ( identifier[allregex] . identifier[numerical_regex] , identifier[cell_str] ): identifier[conversion] = identifier[numerify_str] ( identifier[cell_str] , identifier[flag_level] , identifier[flag_text] ) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[comma_sep_numerical_regex] , identifier[cell_str] ): identifier[smashed_cell] = literal[string] . identifier[join] ( identifier[cell_str] . identifier[split] ( literal[string] )) identifier[conversion] = identifier[numerify_str] ( identifier[smashed_cell] , identifier[flag_level] , identifier[flag_text] ) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[percent_numerical_regex] , identifier[cell_str] ): identifier[cell_str] = identifier[allregex] . identifier[percent_numerical_regex] . identifier[search] ( identifier[cell_str] ). identifier[group] ( literal[int] ) identifier[conversion] = identifier[numerify_percentage_str] ( identifier[cell_str] , identifier[flag_level] , identifier[flag_text] ) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[estimate_numerical_regex] , identifier[cell_str] ): identifier[cell_str] = identifier[cell_str] [:- literal[int] ]. identifier[replace] ( literal[string] , literal[string] ) identifier[conversion] = identifier[numerify_str] ( identifier[cell_str] , identifier[flag_level] , identifier[flag_text] ) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[begins_with_monetary_symbol_regex] , identifier[cell_str] ): identifier[symbol] = identifier[cell_str] [ literal[int] ] identifier[cell_str] = identifier[cell_str] [ literal[int] :] keyword[try] : identifier[conversion] = identifier[convert_to_int_or_float] ( identifier[cell_str] , literal[string] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[if] identifier[re] . identifier[search] ( identifier[allregex] . identifier[contains_dollar_symbol_regex] , identifier[symbol] ): identifier[units] [ identifier[position] ]= identifier[UNITS_DOLLAR] keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[contains_pound_symbol_regex] , identifier[symbol] ): identifier[units] [ identifier[position] ]= identifier[UNITS_POUND] keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[contains_euro_symbol_regex] , identifier[symbol] ): identifier[units] [ identifier[position] ]= identifier[UNITS_EURO] keyword[except] identifier[ValueError] : identifier[conversion] = identifier[cell_str] identifier[flagable] . identifier[flag_change] ( identifier[flags] , literal[string] , identifier[position] , identifier[worksheet] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[ends_with_thousands_scaling_regex] , identifier[cell_str] ): identifier[cell_str] = identifier[cell_str] . identifier[rstrip] ()[:- literal[int] ] keyword[try] : identifier[conversion] = literal[int] * identifier[convert_to_int_or_float] ( identifier[cell_str] , literal[string] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[except] identifier[ValueError] : identifier[flagable] . identifier[flag_change] ( identifier[flags] , literal[string] , identifier[position] , identifier[worksheet] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[elif] identifier[re] . identifier[search] ( identifier[allregex] . identifier[ends_with_millions_scaling_regex] , identifier[cell_str] ): keyword[if] identifier[cell_str] [- literal[int] ]== literal[string] : identifier[cell_str] = identifier[cell_str] [:- literal[int] ] keyword[else] : identifier[cell_str] = identifier[cell_str] [:- literal[int] ] keyword[try] : identifier[conversion] = literal[int] * identifier[convert_to_int_or_float] ( identifier[cell_str] , literal[string] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[except] identifier[ValueError] : identifier[flagable] . identifier[flag_change] ( identifier[flags] , literal[string] , identifier[position] , identifier[worksheet] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[conversion] keyword[try] : keyword[return] identifier[convert_to_int_or_float] ( identifier[cell_str] ) keyword[except] identifier[ValueError] : identifier[flagable] . identifier[flag_change] ( identifier[flags] , literal[string] , identifier[position] , identifier[worksheet] , identifier[flagable] . identifier[FLAGS] [ literal[string] ]) keyword[return] identifier[cell_str]
def auto_convert_numeric_string_cell(flagable, cell_str, position, worksheet, flags, units): """ Handles the string containing numeric case of cell and attempts auto-conversion for auto_convert_cell. """ def numerify_str(cell_str, flag_level='minor', flag_text=''): """ Differentiates between int and float strings. Expects a numeric string. """ if re.search(allregex.integer_regex, cell_str): flagable.flag_change(flags, flag_level, position, worksheet) return int(cell_str) # depends on [control=['if'], data=[]] else: flagable.flag_change(flags, flag_level, worksheet, position) return float(cell_str) def numerify_percentage_str(cell_str, flag_level='minor', flag_text=''): flagable.flag_change(flags, flag_level, position, worksheet) return float(cell_str) / 100 def convert_to_int_or_float(cell_str, flag_level='minor', flag_text=''): if not cell_str: conversion = 0 flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['empty-to-zero-string']) # depends on [control=['if'], data=[]] if re.search(allregex.numerical_regex, cell_str): conversion = numerify_str(cell_str, flag_level, flag_text) # depends on [control=['if'], data=[]] # Comma separated? elif re.search(allregex.comma_sep_numerical_regex, cell_str): smashed_cell = ''.join(cell_str.split(',')) conversion = numerify_str(smashed_cell, flag_level, flag_text) # depends on [control=['if'], data=[]] # Ends in percentage sign elif re.search(allregex.percent_numerical_regex, cell_str): cell_str = allregex.percent_numerical_regex.search(cell_str).group(1) conversion = numerify_percentage_str(cell_str, flag_level, flag_text) # depends on [control=['if'], data=[]] # Ends in + or - sign (estimate)? elif re.search(allregex.estimate_numerical_regex, cell_str): cell_str = cell_str[:-1].replace(',', '') conversion = numerify_str(cell_str, flag_level, flag_text) # depends on [control=['if'], data=[]] # Begins with money symbol? elif re.search(allregex.begins_with_monetary_symbol_regex, cell_str): symbol = cell_str[0] cell_str = cell_str[1:] try: conversion = convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['monetary-removal']) if re.search(allregex.contains_dollar_symbol_regex, symbol): units[position] = UNITS_DOLLAR # depends on [control=['if'], data=[]] elif re.search(allregex.contains_pound_symbol_regex, symbol): units[position] = UNITS_POUND # depends on [control=['if'], data=[]] elif re.search(allregex.contains_euro_symbol_regex, symbol): units[position] = UNITS_EURO # depends on [control=['if'], data=[]] # depends on [control=['try'], data=[]] except ValueError: conversion = cell_str flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-monetary-convert']) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Number ending in 'k'? elif re.search(allregex.ends_with_thousands_scaling_regex, cell_str): cell_str = cell_str.rstrip()[:-1] try: conversion = 1000 * convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['thousands-convert']) # depends on [control=['try'], data=[]] except ValueError: flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-thousands-convert']) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] # Number ending in 'M' or 'MM'? elif re.search(allregex.ends_with_millions_scaling_regex, cell_str): if cell_str[-2] == 'M': cell_str = cell_str[:-2] # depends on [control=['if'], data=[]] else: cell_str = cell_str[:-1] try: conversion = 1000000 * convert_to_int_or_float(cell_str, 'interpreted', flagable.FLAGS['millions-convert']) # depends on [control=['try'], data=[]] except ValueError: flagable.flag_change(flags, 'warning', position, worksheet, flagable.FLAGS['failed-millions-convert']) # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] else: raise ValueError('Cannot convert cell') return conversion # Try converting try: return convert_to_int_or_float(cell_str) # depends on [control=['try'], data=[]] # Couldn't convert? except ValueError: flagable.flag_change(flags, 'minor', position, worksheet, flagable.FLAGS['failed-convert-numeric-string']) return cell_str # depends on [control=['except'], data=[]]
def data(self): """ Return the raw data block which makes up this record as a bytestring. @rtype str @return A string that is a copy of the buffer that makes up this record. """ return self._buf[self.offset():self.offset() + self.size()]
def function[data, parameter[self]]: constant[ Return the raw data block which makes up this record as a bytestring. @rtype str @return A string that is a copy of the buffer that makes up this record. ] return[call[name[self]._buf][<ast.Slice object at 0x7da1b20e7010>]]
keyword[def] identifier[data] ( identifier[self] ): literal[string] keyword[return] identifier[self] . identifier[_buf] [ identifier[self] . identifier[offset] (): identifier[self] . identifier[offset] ()+ identifier[self] . identifier[size] ()]
def data(self): """ Return the raw data block which makes up this record as a bytestring. @rtype str @return A string that is a copy of the buffer that makes up this record. """ return self._buf[self.offset():self.offset() + self.size()]
def list_set_indent(lst: list, indent: int=1): """recurs into list for indentation""" for i in lst: if isinstance(i, indentable): i.set_indent(indent) if isinstance(i, list): list_set_indent(i, indent)
def function[list_set_indent, parameter[lst, indent]]: constant[recurs into list for indentation] for taget[name[i]] in starred[name[lst]] begin[:] if call[name[isinstance], parameter[name[i], name[indentable]]] begin[:] call[name[i].set_indent, parameter[name[indent]]] if call[name[isinstance], parameter[name[i], name[list]]] begin[:] call[name[list_set_indent], parameter[name[i], name[indent]]]
keyword[def] identifier[list_set_indent] ( identifier[lst] : identifier[list] , identifier[indent] : identifier[int] = literal[int] ): literal[string] keyword[for] identifier[i] keyword[in] identifier[lst] : keyword[if] identifier[isinstance] ( identifier[i] , identifier[indentable] ): identifier[i] . identifier[set_indent] ( identifier[indent] ) keyword[if] identifier[isinstance] ( identifier[i] , identifier[list] ): identifier[list_set_indent] ( identifier[i] , identifier[indent] )
def list_set_indent(lst: list, indent: int=1): """recurs into list for indentation""" for i in lst: if isinstance(i, indentable): i.set_indent(indent) # depends on [control=['if'], data=[]] if isinstance(i, list): list_set_indent(i, indent) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']]
def _max_of_integrand(t_val, f, g, inverse_time=None, return_log=False): ''' Evaluates max_tau f(t+tau)*g(tau) or max_tau f(t-tau)g(tau) if inverse time is TRUE Parameters ----------- t_val : double Time point f : Interpolation object First multiplier in convolution g : Interpolation object Second multiplier in convolution inverse_time : bool, None time direction. If True, then the f(t-tau)*g(tau) is calculated, otherwise, f(t+tau)*g(tau) return_log : bool If True, the logarithm will be returned Returns ------- FG : Distribution The function to be integrated as Distribution object (interpolator) ''' # return log is always True FG = _convolution_integrand(t_val, f, g, inverse_time, return_log=True) if FG == ttconf.BIG_NUMBER: res = ttconf.BIG_NUMBER, 0 else: X = FG.x[FG.y.argmin()] Y = FG.y.min() res = Y, X if not return_log: res[0] = np.log(res[0]) return res
def function[_max_of_integrand, parameter[t_val, f, g, inverse_time, return_log]]: constant[ Evaluates max_tau f(t+tau)*g(tau) or max_tau f(t-tau)g(tau) if inverse time is TRUE Parameters ----------- t_val : double Time point f : Interpolation object First multiplier in convolution g : Interpolation object Second multiplier in convolution inverse_time : bool, None time direction. If True, then the f(t-tau)*g(tau) is calculated, otherwise, f(t+tau)*g(tau) return_log : bool If True, the logarithm will be returned Returns ------- FG : Distribution The function to be integrated as Distribution object (interpolator) ] variable[FG] assign[=] call[name[_convolution_integrand], parameter[name[t_val], name[f], name[g], name[inverse_time]]] if compare[name[FG] equal[==] name[ttconf].BIG_NUMBER] begin[:] variable[res] assign[=] tuple[[<ast.Attribute object at 0x7da1b02c6800>, <ast.Constant object at 0x7da1b02c6aa0>]] if <ast.UnaryOp object at 0x7da18f00d330> begin[:] call[name[res]][constant[0]] assign[=] call[name[np].log, parameter[call[name[res]][constant[0]]]] return[name[res]]
keyword[def] identifier[_max_of_integrand] ( identifier[t_val] , identifier[f] , identifier[g] , identifier[inverse_time] = keyword[None] , identifier[return_log] = keyword[False] ): literal[string] identifier[FG] = identifier[_convolution_integrand] ( identifier[t_val] , identifier[f] , identifier[g] , identifier[inverse_time] , identifier[return_log] = keyword[True] ) keyword[if] identifier[FG] == identifier[ttconf] . identifier[BIG_NUMBER] : identifier[res] = identifier[ttconf] . identifier[BIG_NUMBER] , literal[int] keyword[else] : identifier[X] = identifier[FG] . identifier[x] [ identifier[FG] . identifier[y] . identifier[argmin] ()] identifier[Y] = identifier[FG] . identifier[y] . identifier[min] () identifier[res] = identifier[Y] , identifier[X] keyword[if] keyword[not] identifier[return_log] : identifier[res] [ literal[int] ]= identifier[np] . identifier[log] ( identifier[res] [ literal[int] ]) keyword[return] identifier[res]
def _max_of_integrand(t_val, f, g, inverse_time=None, return_log=False): """ Evaluates max_tau f(t+tau)*g(tau) or max_tau f(t-tau)g(tau) if inverse time is TRUE Parameters ----------- t_val : double Time point f : Interpolation object First multiplier in convolution g : Interpolation object Second multiplier in convolution inverse_time : bool, None time direction. If True, then the f(t-tau)*g(tau) is calculated, otherwise, f(t+tau)*g(tau) return_log : bool If True, the logarithm will be returned Returns ------- FG : Distribution The function to be integrated as Distribution object (interpolator) """ # return log is always True FG = _convolution_integrand(t_val, f, g, inverse_time, return_log=True) if FG == ttconf.BIG_NUMBER: res = (ttconf.BIG_NUMBER, 0) # depends on [control=['if'], data=[]] else: X = FG.x[FG.y.argmin()] Y = FG.y.min() res = (Y, X) if not return_log: res[0] = np.log(res[0]) # depends on [control=['if'], data=[]] return res
def get_entropy(hsm, iterations, entropy_ratio): """ Read entropy from YubiHSM and feed it to Linux as entropy using ioctl() syscall. """ fd = os.open("/dev/random", os.O_WRONLY) # struct rand_pool_info { # int entropy_count; # int buf_size; # __u32 buf[0]; # }; fmt = 'ii%is' % (pyhsm.defines.YSM_MAX_PKT_SIZE - 1) for _ in xrange(iterations): rnd = hsm.random(pyhsm.defines.YSM_MAX_PKT_SIZE - 1) this = struct.pack(fmt, entropy_ratio * len(rnd), len(rnd), rnd) fcntl.ioctl(fd, RNDADDENTROPY, this) os.close(fd)
def function[get_entropy, parameter[hsm, iterations, entropy_ratio]]: constant[ Read entropy from YubiHSM and feed it to Linux as entropy using ioctl() syscall. ] variable[fd] assign[=] call[name[os].open, parameter[constant[/dev/random], name[os].O_WRONLY]] variable[fmt] assign[=] binary_operation[constant[ii%is] <ast.Mod object at 0x7da2590d6920> binary_operation[name[pyhsm].defines.YSM_MAX_PKT_SIZE - constant[1]]] for taget[name[_]] in starred[call[name[xrange], parameter[name[iterations]]]] begin[:] variable[rnd] assign[=] call[name[hsm].random, parameter[binary_operation[name[pyhsm].defines.YSM_MAX_PKT_SIZE - constant[1]]]] variable[this] assign[=] call[name[struct].pack, parameter[name[fmt], binary_operation[name[entropy_ratio] * call[name[len], parameter[name[rnd]]]], call[name[len], parameter[name[rnd]]], name[rnd]]] call[name[fcntl].ioctl, parameter[name[fd], name[RNDADDENTROPY], name[this]]] call[name[os].close, parameter[name[fd]]]
keyword[def] identifier[get_entropy] ( identifier[hsm] , identifier[iterations] , identifier[entropy_ratio] ): literal[string] identifier[fd] = identifier[os] . identifier[open] ( literal[string] , identifier[os] . identifier[O_WRONLY] ) identifier[fmt] = literal[string] %( identifier[pyhsm] . identifier[defines] . identifier[YSM_MAX_PKT_SIZE] - literal[int] ) keyword[for] identifier[_] keyword[in] identifier[xrange] ( identifier[iterations] ): identifier[rnd] = identifier[hsm] . identifier[random] ( identifier[pyhsm] . identifier[defines] . identifier[YSM_MAX_PKT_SIZE] - literal[int] ) identifier[this] = identifier[struct] . identifier[pack] ( identifier[fmt] , identifier[entropy_ratio] * identifier[len] ( identifier[rnd] ), identifier[len] ( identifier[rnd] ), identifier[rnd] ) identifier[fcntl] . identifier[ioctl] ( identifier[fd] , identifier[RNDADDENTROPY] , identifier[this] ) identifier[os] . identifier[close] ( identifier[fd] )
def get_entropy(hsm, iterations, entropy_ratio): """ Read entropy from YubiHSM and feed it to Linux as entropy using ioctl() syscall. """ fd = os.open('/dev/random', os.O_WRONLY) # struct rand_pool_info { # int entropy_count; # int buf_size; # __u32 buf[0]; # }; fmt = 'ii%is' % (pyhsm.defines.YSM_MAX_PKT_SIZE - 1) for _ in xrange(iterations): rnd = hsm.random(pyhsm.defines.YSM_MAX_PKT_SIZE - 1) this = struct.pack(fmt, entropy_ratio * len(rnd), len(rnd), rnd) fcntl.ioctl(fd, RNDADDENTROPY, this) # depends on [control=['for'], data=[]] os.close(fd)
def init_fundamental_types(self): """Registers all fundamental typekind handlers""" for _id in range(2, 25): setattr(self, TypeKind.from_id(_id).name, self._handle_fundamental_types)
def function[init_fundamental_types, parameter[self]]: constant[Registers all fundamental typekind handlers] for taget[name[_id]] in starred[call[name[range], parameter[constant[2], constant[25]]]] begin[:] call[name[setattr], parameter[name[self], call[name[TypeKind].from_id, parameter[name[_id]]].name, name[self]._handle_fundamental_types]]
keyword[def] identifier[init_fundamental_types] ( identifier[self] ): literal[string] keyword[for] identifier[_id] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[setattr] ( identifier[self] , identifier[TypeKind] . identifier[from_id] ( identifier[_id] ). identifier[name] , identifier[self] . identifier[_handle_fundamental_types] )
def init_fundamental_types(self): """Registers all fundamental typekind handlers""" for _id in range(2, 25): setattr(self, TypeKind.from_id(_id).name, self._handle_fundamental_types) # depends on [control=['for'], data=['_id']]
def trigger(self, name, *args, **kwargs): """ Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string """ for ev in self.__listeners[name]: ev(*args, **kwargs)
def function[trigger, parameter[self, name]]: constant[ Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string ] for taget[name[ev]] in starred[call[name[self].__listeners][name[name]]] begin[:] call[name[ev], parameter[<ast.Starred object at 0x7da1b0287ac0>]]
keyword[def] identifier[trigger] ( identifier[self] , identifier[name] ,* identifier[args] ,** identifier[kwargs] ): literal[string] keyword[for] identifier[ev] keyword[in] identifier[self] . identifier[__listeners] [ identifier[name] ]: identifier[ev] (* identifier[args] ,** identifier[kwargs] )
def trigger(self, name, *args, **kwargs): """ Execute the callbacks for the listeners on the specified event with the supplied arguments. All extra arguments are passed through to each callback. :param name: the name of the event :type name: string """ for ev in self.__listeners[name]: ev(*args, **kwargs) # depends on [control=['for'], data=['ev']]
def activation(self, activation_hash): """ Endpoint to expose an activation url, this url is sent to the user by email, when accessed the user is inserted and activated """ reg = self.appbuilder.sm.find_register_user(activation_hash) if not reg: log.error(c.LOGMSG_ERR_SEC_NO_REGISTER_HASH.format(activation_hash)) flash(as_unicode(self.false_error_message), "danger") return redirect(self.appbuilder.get_url_for_index) if not self.appbuilder.sm.add_user( username=reg.username, email=reg.email, first_name=reg.first_name, last_name=reg.last_name, role=self.appbuilder.sm.find_role( self.appbuilder.sm.auth_user_registration_role ), hashed_password=reg.password, ): flash(as_unicode(self.error_message), "danger") return redirect(self.appbuilder.get_url_for_index) else: self.appbuilder.sm.del_register_user(reg) return self.render_template( self.activation_template, username=reg.username, first_name=reg.first_name, last_name=reg.last_name, appbuilder=self.appbuilder, )
def function[activation, parameter[self, activation_hash]]: constant[ Endpoint to expose an activation url, this url is sent to the user by email, when accessed the user is inserted and activated ] variable[reg] assign[=] call[name[self].appbuilder.sm.find_register_user, parameter[name[activation_hash]]] if <ast.UnaryOp object at 0x7da20e954100> begin[:] call[name[log].error, parameter[call[name[c].LOGMSG_ERR_SEC_NO_REGISTER_HASH.format, parameter[name[activation_hash]]]]] call[name[flash], parameter[call[name[as_unicode], parameter[name[self].false_error_message]], constant[danger]]] return[call[name[redirect], parameter[name[self].appbuilder.get_url_for_index]]] if <ast.UnaryOp object at 0x7da20e9556f0> begin[:] call[name[flash], parameter[call[name[as_unicode], parameter[name[self].error_message]], constant[danger]]] return[call[name[redirect], parameter[name[self].appbuilder.get_url_for_index]]]
keyword[def] identifier[activation] ( identifier[self] , identifier[activation_hash] ): literal[string] identifier[reg] = identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[find_register_user] ( identifier[activation_hash] ) keyword[if] keyword[not] identifier[reg] : identifier[log] . identifier[error] ( identifier[c] . identifier[LOGMSG_ERR_SEC_NO_REGISTER_HASH] . identifier[format] ( identifier[activation_hash] )) identifier[flash] ( identifier[as_unicode] ( identifier[self] . identifier[false_error_message] ), literal[string] ) keyword[return] identifier[redirect] ( identifier[self] . identifier[appbuilder] . identifier[get_url_for_index] ) keyword[if] keyword[not] identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[add_user] ( identifier[username] = identifier[reg] . identifier[username] , identifier[email] = identifier[reg] . identifier[email] , identifier[first_name] = identifier[reg] . identifier[first_name] , identifier[last_name] = identifier[reg] . identifier[last_name] , identifier[role] = identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[find_role] ( identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[auth_user_registration_role] ), identifier[hashed_password] = identifier[reg] . identifier[password] , ): identifier[flash] ( identifier[as_unicode] ( identifier[self] . identifier[error_message] ), literal[string] ) keyword[return] identifier[redirect] ( identifier[self] . identifier[appbuilder] . identifier[get_url_for_index] ) keyword[else] : identifier[self] . identifier[appbuilder] . identifier[sm] . identifier[del_register_user] ( identifier[reg] ) keyword[return] identifier[self] . identifier[render_template] ( identifier[self] . identifier[activation_template] , identifier[username] = identifier[reg] . identifier[username] , identifier[first_name] = identifier[reg] . identifier[first_name] , identifier[last_name] = identifier[reg] . identifier[last_name] , identifier[appbuilder] = identifier[self] . identifier[appbuilder] , )
def activation(self, activation_hash): """ Endpoint to expose an activation url, this url is sent to the user by email, when accessed the user is inserted and activated """ reg = self.appbuilder.sm.find_register_user(activation_hash) if not reg: log.error(c.LOGMSG_ERR_SEC_NO_REGISTER_HASH.format(activation_hash)) flash(as_unicode(self.false_error_message), 'danger') return redirect(self.appbuilder.get_url_for_index) # depends on [control=['if'], data=[]] if not self.appbuilder.sm.add_user(username=reg.username, email=reg.email, first_name=reg.first_name, last_name=reg.last_name, role=self.appbuilder.sm.find_role(self.appbuilder.sm.auth_user_registration_role), hashed_password=reg.password): flash(as_unicode(self.error_message), 'danger') return redirect(self.appbuilder.get_url_for_index) # depends on [control=['if'], data=[]] else: self.appbuilder.sm.del_register_user(reg) return self.render_template(self.activation_template, username=reg.username, first_name=reg.first_name, last_name=reg.last_name, appbuilder=self.appbuilder)
def __liftover_coordinates_genomic_insertions(self, intersecting_region): """ Lift a region that overlaps the genomic occurrence of this repeat to the consensus sequence coordinates using just coordinates (not the full alignment, even if it is avaialble), when the length of the genomic match is greater than the concensus match. We assume there are insertions in the genomic sequence (i.e. gaps in the consensus match). We uniformly distribute the gaps through the consensus match. e.g. if the genomic region is 100nt long and the consensus match is 90nt long, we will consider an insertion to occurr every 10nt in the genomic sequence. The start and end coordinates of the region after lifting will be reduced by the number of genomic insertions that would have come before them. :param intersecting_region: a region that intersects this occurrence. :return: list of GenomicInterval objects. This is a list because, although the liftover can produce only one genomic interval in consensus, it might also produce none (if the interval overlaps only insertions in the genomic sequence), hence an empty list is possible. """ # should never happen, but check anyway... consensus_match_length = self.consensus_end - self.consensus_start size_dif = consensus_match_length - len(self) assert(size_dif < 0) gap_interval = len(self) / (-1 * size_dif) s_dist_to_gen_start = max(intersecting_region.start - self.start, 0) e_dist_to_gen_start = max(intersecting_region.end - self.start, 0) if self.consensus_match_strand is '+': s = s_dist_to_gen_start + self.consensus_start e = e_dist_to_gen_start + self.consensus_start s = s - (s_dist_to_gen_start / gap_interval) e = min(e - (e_dist_to_gen_start / gap_interval), self.consensus_len) else: e = self.consensus_end - s_dist_to_gen_start s = self.consensus_end - e_dist_to_gen_start s = max(s + (e_dist_to_gen_start / gap_interval), self.consensus_start) e = min(e + (s_dist_to_gen_start / gap_interval), self.consensus_end) res = [] if s == e else [GenomicInterval(self.repeat_name(), s, e, intersecting_region.name, intersecting_region.score, self.strand)] return res
def function[__liftover_coordinates_genomic_insertions, parameter[self, intersecting_region]]: constant[ Lift a region that overlaps the genomic occurrence of this repeat to the consensus sequence coordinates using just coordinates (not the full alignment, even if it is avaialble), when the length of the genomic match is greater than the concensus match. We assume there are insertions in the genomic sequence (i.e. gaps in the consensus match). We uniformly distribute the gaps through the consensus match. e.g. if the genomic region is 100nt long and the consensus match is 90nt long, we will consider an insertion to occurr every 10nt in the genomic sequence. The start and end coordinates of the region after lifting will be reduced by the number of genomic insertions that would have come before them. :param intersecting_region: a region that intersects this occurrence. :return: list of GenomicInterval objects. This is a list because, although the liftover can produce only one genomic interval in consensus, it might also produce none (if the interval overlaps only insertions in the genomic sequence), hence an empty list is possible. ] variable[consensus_match_length] assign[=] binary_operation[name[self].consensus_end - name[self].consensus_start] variable[size_dif] assign[=] binary_operation[name[consensus_match_length] - call[name[len], parameter[name[self]]]] assert[compare[name[size_dif] less[<] constant[0]]] variable[gap_interval] assign[=] binary_operation[call[name[len], parameter[name[self]]] / binary_operation[<ast.UnaryOp object at 0x7da1b1342290> * name[size_dif]]] variable[s_dist_to_gen_start] assign[=] call[name[max], parameter[binary_operation[name[intersecting_region].start - name[self].start], constant[0]]] variable[e_dist_to_gen_start] assign[=] call[name[max], parameter[binary_operation[name[intersecting_region].end - name[self].start], constant[0]]] if compare[name[self].consensus_match_strand is constant[+]] begin[:] variable[s] assign[=] binary_operation[name[s_dist_to_gen_start] + name[self].consensus_start] variable[e] assign[=] binary_operation[name[e_dist_to_gen_start] + name[self].consensus_start] variable[s] assign[=] binary_operation[name[s] - binary_operation[name[s_dist_to_gen_start] / name[gap_interval]]] variable[e] assign[=] call[name[min], parameter[binary_operation[name[e] - binary_operation[name[e_dist_to_gen_start] / name[gap_interval]]], name[self].consensus_len]] variable[res] assign[=] <ast.IfExp object at 0x7da1b1501300> return[name[res]]
keyword[def] identifier[__liftover_coordinates_genomic_insertions] ( identifier[self] , identifier[intersecting_region] ): literal[string] identifier[consensus_match_length] = identifier[self] . identifier[consensus_end] - identifier[self] . identifier[consensus_start] identifier[size_dif] = identifier[consensus_match_length] - identifier[len] ( identifier[self] ) keyword[assert] ( identifier[size_dif] < literal[int] ) identifier[gap_interval] = identifier[len] ( identifier[self] )/(- literal[int] * identifier[size_dif] ) identifier[s_dist_to_gen_start] = identifier[max] ( identifier[intersecting_region] . identifier[start] - identifier[self] . identifier[start] , literal[int] ) identifier[e_dist_to_gen_start] = identifier[max] ( identifier[intersecting_region] . identifier[end] - identifier[self] . identifier[start] , literal[int] ) keyword[if] identifier[self] . identifier[consensus_match_strand] keyword[is] literal[string] : identifier[s] = identifier[s_dist_to_gen_start] + identifier[self] . identifier[consensus_start] identifier[e] = identifier[e_dist_to_gen_start] + identifier[self] . identifier[consensus_start] identifier[s] = identifier[s] -( identifier[s_dist_to_gen_start] / identifier[gap_interval] ) identifier[e] = identifier[min] ( identifier[e] -( identifier[e_dist_to_gen_start] / identifier[gap_interval] ), identifier[self] . identifier[consensus_len] ) keyword[else] : identifier[e] = identifier[self] . identifier[consensus_end] - identifier[s_dist_to_gen_start] identifier[s] = identifier[self] . identifier[consensus_end] - identifier[e_dist_to_gen_start] identifier[s] = identifier[max] ( identifier[s] +( identifier[e_dist_to_gen_start] / identifier[gap_interval] ), identifier[self] . identifier[consensus_start] ) identifier[e] = identifier[min] ( identifier[e] +( identifier[s_dist_to_gen_start] / identifier[gap_interval] ), identifier[self] . identifier[consensus_end] ) identifier[res] =[] keyword[if] identifier[s] == identifier[e] keyword[else] [ identifier[GenomicInterval] ( identifier[self] . identifier[repeat_name] (), identifier[s] , identifier[e] , identifier[intersecting_region] . identifier[name] , identifier[intersecting_region] . identifier[score] , identifier[self] . identifier[strand] )] keyword[return] identifier[res]
def __liftover_coordinates_genomic_insertions(self, intersecting_region): """ Lift a region that overlaps the genomic occurrence of this repeat to the consensus sequence coordinates using just coordinates (not the full alignment, even if it is avaialble), when the length of the genomic match is greater than the concensus match. We assume there are insertions in the genomic sequence (i.e. gaps in the consensus match). We uniformly distribute the gaps through the consensus match. e.g. if the genomic region is 100nt long and the consensus match is 90nt long, we will consider an insertion to occurr every 10nt in the genomic sequence. The start and end coordinates of the region after lifting will be reduced by the number of genomic insertions that would have come before them. :param intersecting_region: a region that intersects this occurrence. :return: list of GenomicInterval objects. This is a list because, although the liftover can produce only one genomic interval in consensus, it might also produce none (if the interval overlaps only insertions in the genomic sequence), hence an empty list is possible. """ # should never happen, but check anyway... consensus_match_length = self.consensus_end - self.consensus_start size_dif = consensus_match_length - len(self) assert size_dif < 0 gap_interval = len(self) / (-1 * size_dif) s_dist_to_gen_start = max(intersecting_region.start - self.start, 0) e_dist_to_gen_start = max(intersecting_region.end - self.start, 0) if self.consensus_match_strand is '+': s = s_dist_to_gen_start + self.consensus_start e = e_dist_to_gen_start + self.consensus_start s = s - s_dist_to_gen_start / gap_interval e = min(e - e_dist_to_gen_start / gap_interval, self.consensus_len) # depends on [control=['if'], data=[]] else: e = self.consensus_end - s_dist_to_gen_start s = self.consensus_end - e_dist_to_gen_start s = max(s + e_dist_to_gen_start / gap_interval, self.consensus_start) e = min(e + s_dist_to_gen_start / gap_interval, self.consensus_end) res = [] if s == e else [GenomicInterval(self.repeat_name(), s, e, intersecting_region.name, intersecting_region.score, self.strand)] return res
def is_json(value, schema = None, json_serializer = None, **kwargs): """Indicate whether ``value`` is a valid JSON object. .. note:: ``schema`` supports JSON Schema Drafts 3 - 7. Unless the JSON Schema indicates the meta-schema using a ``$schema`` property, the schema will be assumed to conform to Draft 7. :param value: The value to evaluate. :param schema: An optional JSON schema against which ``value`` will be validated. :type schema: :class:`dict <python:dict>` / :class:`str <python:str>` / :obj:`None <python:None>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ try: value = validators.json(value, schema = schema, json_serializer = json_serializer, **kwargs) except SyntaxError as error: raise error except Exception: return False return True
def function[is_json, parameter[value, schema, json_serializer]]: constant[Indicate whether ``value`` is a valid JSON object. .. note:: ``schema`` supports JSON Schema Drafts 3 - 7. Unless the JSON Schema indicates the meta-schema using a ``$schema`` property, the schema will be assumed to conform to Draft 7. :param value: The value to evaluate. :param schema: An optional JSON schema against which ``value`` will be validated. :type schema: :class:`dict <python:dict>` / :class:`str <python:str>` / :obj:`None <python:None>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator ] <ast.Try object at 0x7da1b07d15d0> return[constant[True]]
keyword[def] identifier[is_json] ( identifier[value] , identifier[schema] = keyword[None] , identifier[json_serializer] = keyword[None] , ** identifier[kwargs] ): literal[string] keyword[try] : identifier[value] = identifier[validators] . identifier[json] ( identifier[value] , identifier[schema] = identifier[schema] , identifier[json_serializer] = identifier[json_serializer] , ** identifier[kwargs] ) keyword[except] identifier[SyntaxError] keyword[as] identifier[error] : keyword[raise] identifier[error] keyword[except] identifier[Exception] : keyword[return] keyword[False] keyword[return] keyword[True]
def is_json(value, schema=None, json_serializer=None, **kwargs): """Indicate whether ``value`` is a valid JSON object. .. note:: ``schema`` supports JSON Schema Drafts 3 - 7. Unless the JSON Schema indicates the meta-schema using a ``$schema`` property, the schema will be assumed to conform to Draft 7. :param value: The value to evaluate. :param schema: An optional JSON schema against which ``value`` will be validated. :type schema: :class:`dict <python:dict>` / :class:`str <python:str>` / :obj:`None <python:None>` :returns: ``True`` if ``value`` is valid, ``False`` if it is not. :rtype: :class:`bool <python:bool>` :raises SyntaxError: if ``kwargs`` contains duplicate keyword parameters or duplicates keyword parameters passed to the underlying validator """ try: value = validators.json(value, schema=schema, json_serializer=json_serializer, **kwargs) # depends on [control=['try'], data=[]] except SyntaxError as error: raise error # depends on [control=['except'], data=['error']] except Exception: return False # depends on [control=['except'], data=[]] return True
def get_dimension_by_unit_id(unit_id, do_accept_unit_id_none=False, **kwargs): """ Return the physical dimension a given unit id refers to. if do_accept_unit_id_none is False, it raises an exception if unit_id is not valid or None if do_accept_unit_id_none is True, and unit_id is None, the function returns a Dimension with id None (unit_id can be none in some cases) """ if do_accept_unit_id_none == True and unit_id is None: # In this special case, the method returns a dimension with id None return get_empty_dimension() try: dimension = db.DBSession.query(Dimension).join(Unit).filter(Unit.id==unit_id).filter().one() return get_dimension(dimension.id) except NoResultFound: # The dimension does not exist raise ResourceNotFoundError("Unit %s not found"%(unit_id))
def function[get_dimension_by_unit_id, parameter[unit_id, do_accept_unit_id_none]]: constant[ Return the physical dimension a given unit id refers to. if do_accept_unit_id_none is False, it raises an exception if unit_id is not valid or None if do_accept_unit_id_none is True, and unit_id is None, the function returns a Dimension with id None (unit_id can be none in some cases) ] if <ast.BoolOp object at 0x7da18bc70520> begin[:] return[call[name[get_empty_dimension], parameter[]]] <ast.Try object at 0x7da18bc703a0>
keyword[def] identifier[get_dimension_by_unit_id] ( identifier[unit_id] , identifier[do_accept_unit_id_none] = keyword[False] ,** identifier[kwargs] ): literal[string] keyword[if] identifier[do_accept_unit_id_none] == keyword[True] keyword[and] identifier[unit_id] keyword[is] keyword[None] : keyword[return] identifier[get_empty_dimension] () keyword[try] : identifier[dimension] = identifier[db] . identifier[DBSession] . identifier[query] ( identifier[Dimension] ). identifier[join] ( identifier[Unit] ). identifier[filter] ( identifier[Unit] . identifier[id] == identifier[unit_id] ). identifier[filter] (). identifier[one] () keyword[return] identifier[get_dimension] ( identifier[dimension] . identifier[id] ) keyword[except] identifier[NoResultFound] : keyword[raise] identifier[ResourceNotFoundError] ( literal[string] %( identifier[unit_id] ))
def get_dimension_by_unit_id(unit_id, do_accept_unit_id_none=False, **kwargs): """ Return the physical dimension a given unit id refers to. if do_accept_unit_id_none is False, it raises an exception if unit_id is not valid or None if do_accept_unit_id_none is True, and unit_id is None, the function returns a Dimension with id None (unit_id can be none in some cases) """ if do_accept_unit_id_none == True and unit_id is None: # In this special case, the method returns a dimension with id None return get_empty_dimension() # depends on [control=['if'], data=[]] try: dimension = db.DBSession.query(Dimension).join(Unit).filter(Unit.id == unit_id).filter().one() return get_dimension(dimension.id) # depends on [control=['try'], data=[]] except NoResultFound: # The dimension does not exist raise ResourceNotFoundError('Unit %s not found' % unit_id) # depends on [control=['except'], data=[]]
def circ_permutation(items): """Calculate the circular permutation for a given list of items.""" permutations = [] for i in range(len(items)): permutations.append(items[i:] + items[:i]) return permutations
def function[circ_permutation, parameter[items]]: constant[Calculate the circular permutation for a given list of items.] variable[permutations] assign[=] list[[]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[items]]]]]] begin[:] call[name[permutations].append, parameter[binary_operation[call[name[items]][<ast.Slice object at 0x7da1b23d0c70>] + call[name[items]][<ast.Slice object at 0x7da1b24b77c0>]]]] return[name[permutations]]
keyword[def] identifier[circ_permutation] ( identifier[items] ): literal[string] identifier[permutations] =[] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[items] )): identifier[permutations] . identifier[append] ( identifier[items] [ identifier[i] :]+ identifier[items] [: identifier[i] ]) keyword[return] identifier[permutations]
def circ_permutation(items): """Calculate the circular permutation for a given list of items.""" permutations = [] for i in range(len(items)): permutations.append(items[i:] + items[:i]) # depends on [control=['for'], data=['i']] return permutations
def bi_backtrace(node_a, node_b): """ Backtrace from start and end node, returns the path for bi-directional A* (including both start and end nodes) """ path_a = backtrace(node_a) path_b = backtrace(node_b) path_b.reverse() return path_a + path_b
def function[bi_backtrace, parameter[node_a, node_b]]: constant[ Backtrace from start and end node, returns the path for bi-directional A* (including both start and end nodes) ] variable[path_a] assign[=] call[name[backtrace], parameter[name[node_a]]] variable[path_b] assign[=] call[name[backtrace], parameter[name[node_b]]] call[name[path_b].reverse, parameter[]] return[binary_operation[name[path_a] + name[path_b]]]
keyword[def] identifier[bi_backtrace] ( identifier[node_a] , identifier[node_b] ): literal[string] identifier[path_a] = identifier[backtrace] ( identifier[node_a] ) identifier[path_b] = identifier[backtrace] ( identifier[node_b] ) identifier[path_b] . identifier[reverse] () keyword[return] identifier[path_a] + identifier[path_b]
def bi_backtrace(node_a, node_b): """ Backtrace from start and end node, returns the path for bi-directional A* (including both start and end nodes) """ path_a = backtrace(node_a) path_b = backtrace(node_b) path_b.reverse() return path_a + path_b
def absent(name, orgname=None, profile='grafana'): ''' Ensure the named grafana dashboard is absent. name Name of the grafana dashboard. orgname Name of the organization in which the dashboard should be present. profile Configuration profile used to connect to the Grafana instance. Default is 'grafana'. ''' ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} if isinstance(profile, six.string_types): profile = __salt__['config.option'](profile) existing_dashboard = __salt__['grafana4.get_dashboard']( name, orgname, profile) if existing_dashboard: if __opts__['test']: ret['result'] = None ret['comment'] = 'Dashboard {0} is set to be deleted.'.format(name) return ret __salt__['grafana4.delete_dashboard'](name, profile=profile) ret['comment'] = 'Dashboard {0} deleted.'.format(name) ret['changes']['new'] = 'Dashboard {0} deleted.'.format(name) return ret ret['comment'] = 'Dashboard absent' return ret
def function[absent, parameter[name, orgname, profile]]: constant[ Ensure the named grafana dashboard is absent. name Name of the grafana dashboard. orgname Name of the organization in which the dashboard should be present. profile Configuration profile used to connect to the Grafana instance. Default is 'grafana'. ] variable[ret] assign[=] dictionary[[<ast.Constant object at 0x7da1b1c111b0>, <ast.Constant object at 0x7da1b1c11270>, <ast.Constant object at 0x7da1b1c11300>, <ast.Constant object at 0x7da1b1c115d0>], [<ast.Name object at 0x7da1b1c12740>, <ast.Constant object at 0x7da1b1c12860>, <ast.Constant object at 0x7da1b1c12830>, <ast.Dict object at 0x7da1b1c12770>]] if call[name[isinstance], parameter[name[profile], name[six].string_types]] begin[:] variable[profile] assign[=] call[call[name[__salt__]][constant[config.option]], parameter[name[profile]]] variable[existing_dashboard] assign[=] call[call[name[__salt__]][constant[grafana4.get_dashboard]], parameter[name[name], name[orgname], name[profile]]] if name[existing_dashboard] begin[:] if call[name[__opts__]][constant[test]] begin[:] call[name[ret]][constant[result]] assign[=] constant[None] call[name[ret]][constant[comment]] assign[=] call[constant[Dashboard {0} is set to be deleted.].format, parameter[name[name]]] return[name[ret]] call[call[name[__salt__]][constant[grafana4.delete_dashboard]], parameter[name[name]]] call[name[ret]][constant[comment]] assign[=] call[constant[Dashboard {0} deleted.].format, parameter[name[name]]] call[call[name[ret]][constant[changes]]][constant[new]] assign[=] call[constant[Dashboard {0} deleted.].format, parameter[name[name]]] return[name[ret]] call[name[ret]][constant[comment]] assign[=] constant[Dashboard absent] return[name[ret]]
keyword[def] identifier[absent] ( identifier[name] , identifier[orgname] = keyword[None] , identifier[profile] = literal[string] ): literal[string] identifier[ret] ={ literal[string] : identifier[name] , literal[string] : keyword[True] , literal[string] : literal[string] , literal[string] :{}} keyword[if] identifier[isinstance] ( identifier[profile] , identifier[six] . identifier[string_types] ): identifier[profile] = identifier[__salt__] [ literal[string] ]( identifier[profile] ) identifier[existing_dashboard] = identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[orgname] , identifier[profile] ) keyword[if] identifier[existing_dashboard] : keyword[if] identifier[__opts__] [ literal[string] ]: identifier[ret] [ literal[string] ]= keyword[None] identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) keyword[return] identifier[ret] identifier[__salt__] [ literal[string] ]( identifier[name] , identifier[profile] = identifier[profile] ) identifier[ret] [ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) identifier[ret] [ literal[string] ][ literal[string] ]= literal[string] . identifier[format] ( identifier[name] ) keyword[return] identifier[ret] identifier[ret] [ literal[string] ]= literal[string] keyword[return] identifier[ret]
def absent(name, orgname=None, profile='grafana'): """ Ensure the named grafana dashboard is absent. name Name of the grafana dashboard. orgname Name of the organization in which the dashboard should be present. profile Configuration profile used to connect to the Grafana instance. Default is 'grafana'. """ ret = {'name': name, 'result': True, 'comment': '', 'changes': {}} if isinstance(profile, six.string_types): profile = __salt__['config.option'](profile) # depends on [control=['if'], data=[]] existing_dashboard = __salt__['grafana4.get_dashboard'](name, orgname, profile) if existing_dashboard: if __opts__['test']: ret['result'] = None ret['comment'] = 'Dashboard {0} is set to be deleted.'.format(name) return ret # depends on [control=['if'], data=[]] __salt__['grafana4.delete_dashboard'](name, profile=profile) ret['comment'] = 'Dashboard {0} deleted.'.format(name) ret['changes']['new'] = 'Dashboard {0} deleted.'.format(name) return ret # depends on [control=['if'], data=[]] ret['comment'] = 'Dashboard absent' return ret
def loose_search(self, asset_manager_id, query='', **kwargs): """ Asset search API. Possible kwargs: * threshold: int (default = 0) * page_no: int (default = 1) * page_size: int (default = 100) * sort_fields: list (default = []) * asset_types: list (default = []) * include_public: bool (default = True) * include_data_sources: bool (default = True) """ self.logger.info('Asset Search - Asset Manager: %s', asset_manager_id) url = '{endpoint}/assets/search/{asset_manager_id}'.format( asset_manager_id=asset_manager_id, endpoint=self.endpoint, ) params = {'query': query} for k, v in kwargs.items(): if not isinstance(v, str) and isinstance(v, Iterable): v = ','.join(str(i) for i in v) params[k] = v response = self.session.get(url, params=params) if response.ok: data = response.json() assets = [json_to_asset(json_asset) for json_asset in data.get('hits', [])] self.logger.info('Returned %s Assets.', len(assets)) return assets else: self.logger.error(response.text) response.raise_for_status()
def function[loose_search, parameter[self, asset_manager_id, query]]: constant[ Asset search API. Possible kwargs: * threshold: int (default = 0) * page_no: int (default = 1) * page_size: int (default = 100) * sort_fields: list (default = []) * asset_types: list (default = []) * include_public: bool (default = True) * include_data_sources: bool (default = True) ] call[name[self].logger.info, parameter[constant[Asset Search - Asset Manager: %s], name[asset_manager_id]]] variable[url] assign[=] call[constant[{endpoint}/assets/search/{asset_manager_id}].format, parameter[]] variable[params] assign[=] dictionary[[<ast.Constant object at 0x7da204622ad0>], [<ast.Name object at 0x7da204620f10>]] for taget[tuple[[<ast.Name object at 0x7da2046215d0>, <ast.Name object at 0x7da204621d50>]]] in starred[call[name[kwargs].items, parameter[]]] begin[:] if <ast.BoolOp object at 0x7da2046239d0> begin[:] variable[v] assign[=] call[constant[,].join, parameter[<ast.GeneratorExp object at 0x7da204621420>]] call[name[params]][name[k]] assign[=] name[v] variable[response] assign[=] call[name[self].session.get, parameter[name[url]]] if name[response].ok begin[:] variable[data] assign[=] call[name[response].json, parameter[]] variable[assets] assign[=] <ast.ListComp object at 0x7da1b09eadd0> call[name[self].logger.info, parameter[constant[Returned %s Assets.], call[name[len], parameter[name[assets]]]]] return[name[assets]]
keyword[def] identifier[loose_search] ( identifier[self] , identifier[asset_manager_id] , identifier[query] = literal[string] ,** identifier[kwargs] ): literal[string] identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[asset_manager_id] ) identifier[url] = literal[string] . identifier[format] ( identifier[asset_manager_id] = identifier[asset_manager_id] , identifier[endpoint] = identifier[self] . identifier[endpoint] , ) identifier[params] ={ literal[string] : identifier[query] } keyword[for] identifier[k] , identifier[v] keyword[in] identifier[kwargs] . identifier[items] (): keyword[if] keyword[not] identifier[isinstance] ( identifier[v] , identifier[str] ) keyword[and] identifier[isinstance] ( identifier[v] , identifier[Iterable] ): identifier[v] = literal[string] . identifier[join] ( identifier[str] ( identifier[i] ) keyword[for] identifier[i] keyword[in] identifier[v] ) identifier[params] [ identifier[k] ]= identifier[v] identifier[response] = identifier[self] . identifier[session] . identifier[get] ( identifier[url] , identifier[params] = identifier[params] ) keyword[if] identifier[response] . identifier[ok] : identifier[data] = identifier[response] . identifier[json] () identifier[assets] =[ identifier[json_to_asset] ( identifier[json_asset] ) keyword[for] identifier[json_asset] keyword[in] identifier[data] . identifier[get] ( literal[string] ,[])] identifier[self] . identifier[logger] . identifier[info] ( literal[string] , identifier[len] ( identifier[assets] )) keyword[return] identifier[assets] keyword[else] : identifier[self] . identifier[logger] . identifier[error] ( identifier[response] . identifier[text] ) identifier[response] . identifier[raise_for_status] ()
def loose_search(self, asset_manager_id, query='', **kwargs): """ Asset search API. Possible kwargs: * threshold: int (default = 0) * page_no: int (default = 1) * page_size: int (default = 100) * sort_fields: list (default = []) * asset_types: list (default = []) * include_public: bool (default = True) * include_data_sources: bool (default = True) """ self.logger.info('Asset Search - Asset Manager: %s', asset_manager_id) url = '{endpoint}/assets/search/{asset_manager_id}'.format(asset_manager_id=asset_manager_id, endpoint=self.endpoint) params = {'query': query} for (k, v) in kwargs.items(): if not isinstance(v, str) and isinstance(v, Iterable): v = ','.join((str(i) for i in v)) # depends on [control=['if'], data=[]] params[k] = v # depends on [control=['for'], data=[]] response = self.session.get(url, params=params) if response.ok: data = response.json() assets = [json_to_asset(json_asset) for json_asset in data.get('hits', [])] self.logger.info('Returned %s Assets.', len(assets)) return assets # depends on [control=['if'], data=[]] else: self.logger.error(response.text) response.raise_for_status()
def main(): """ NAME remanence_aniso_magic.py DESCRIPTION This program is similar to aarm_magic.py and atrm_magic.py with minor modifications. Converts magic measurement file with ATRM/AARM data to best-fit tensor (6 elements plus sigma) following Hext (1963), and calculates F-test statistics. Comments: - infield steps are marked with method codes LT-T-I:LP-AN-TRM; LT-AF-I:LP-AN-ARM - zerofield steps are marked with method codes LT-T-Z:LP-AN-TRM; LT-AF-Z:LP-AN-ARM - alteration check is marked with method codes LT-PTRM-I:LP-AN-TRM please notice; - ATRM: The program uses treatment_dc_field_phi/treatment_dc_field_theta columns to infer the direction of the applied field (this is a change from atrm_magic.py) - ATRM: zerofield (baseline) magnetization is subtructed from all infield measurements - AARM: The program uses measurement number (running number) to to infer the direction of the applied field assuming the SIO protocol for 6,9,15 measurements scheme. See cookbook for diagram and details. - AARM: zerofield (baseline) are assumed to be before any infield, and the baseline is subtructed from the subsequent infield magnetization. SYNTAX remanence_aniso_magic.py [-h] [command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is magic_measurements.txt INPUT magic measurement file with ATRM and/or AARM data. if both types of measurements exist then the program calculates both. OUTPUT rmag_anisotropy.log -I- information -W- Warning -E- Error rmag_anistropy.txt: This file contains in addition to some some magic information the following: - anistropy tensor s1 to s6 normalized by the trace: |Mx| |s1 s4 s6| |Bx| |My| = |s4 s2 s5| . |By| |Mz| |s6 s5 s3| |Bz| - anisotropy_sigma (Hext, 1963) - anisotropy_alt (altertion check for ATRM in units of %): 100* [abs(M_first-Mlast)/max(M_first,M_last)] - rmag_results.txt: This file contains in addition to some magic information the follow(ing: - anisotropy_t1,anisotropy_t2,anisotropy_t3 : eigenvalues - anisotropy_v*_dec,anisotropy_v*_inc: declination/inclination of the eigenvectors - anisotropy_ftest,anisotropy_ftest12,anisotropy_ftest13 - (the crtical F for 95% confidence level of anistropy is given in result_description column). """ #================================================================================== meas_file="magic_measurements.txt" args=sys.argv dir_path='.' # # get name of file from command line # if '-WD' in args: ind=args.index('-WD') dir_path=args[ind+1] if "-h" in args: print(main.__doc__) sys.exit() if "-f" in args: ind=args.index("-f") meas_file=sys.argv[ind+1] else: meas_file=dir_path+'/'+meas_file WD=dir_path #====================================== # functions #====================================== def get_Data(magic_file): #------------------------------------------------ # Read magic measurement file and sort to blocks #------------------------------------------------ Data={} try: meas_data,file_type=pmag.magic_read(magic_file) except: print("-E- ERROR: Cant read magic_measurement.txt file. File is corrupted.") return Data # get list of unique specimen names #sids=pmag.get_specs(meas_data) # samples ID's for rec in meas_data: s=rec["er_specimen_name"] method_codes= rec["magic_method_codes"].strip('\n') method_codes.replace(" ","") methods=method_codes.split(":") if "LP-AN-TRM" in methods: if s not in list(Data.keys()): Data[s]={} if 'atrmblock' not in list(Data[s].keys()): Data[s]['atrmblock']=[] Data[s]['atrmblock'].append(rec) if "LP-AN-ARM" in methods: if s not in list(Data.keys()): Data[s]={} if 'aarmblock' not in list(Data[s].keys()): Data[s]['aarmblock']=[] Data[s]['aarmblock'].append(rec) return (Data) #====================================== # better to put this one in pmagpy #====================================== def calculate_aniso_parameters(B,K): aniso_parameters={} S_bs=dot(B,K) # normalize by trace trace=S_bs[0]+S_bs[1]+S_bs[2] S_bs=old_div(S_bs,trace) s1,s2,s3,s4,s5,s6=S_bs[0],S_bs[1],S_bs[2],S_bs[3],S_bs[4],S_bs[5] s_matrix=[[s1,s4,s6],[s4,s2,s5],[s6,s5,s3]] # calculate eigen vector, t,evectors=eig(s_matrix) # sort vectors t=list(t) t1=max(t) ix_1=t.index(t1) t3=min(t) ix_3=t.index(t3) for tt in range(3): if t[tt]!=t1 and t[tt]!=t3: t2=t[tt] ix_2=t.index(t2) v1=[evectors[0][ix_1],evectors[1][ix_1],evectors[2][ix_1]] v2=[evectors[0][ix_2],evectors[1][ix_2],evectors[2][ix_2]] v3=[evectors[0][ix_3],evectors[1][ix_3],evectors[2][ix_3]] DIR_v1=pmag.cart2dir(v1) DIR_v2=pmag.cart2dir(v2) DIR_v3=pmag.cart2dir(v3) aniso_parameters['anisotropy_s1']="%f"%s1 aniso_parameters['anisotropy_s2']="%f"%s2 aniso_parameters['anisotropy_s3']="%f"%s3 aniso_parameters['anisotropy_s4']="%f"%s4 aniso_parameters['anisotropy_s5']="%f"%s5 aniso_parameters['anisotropy_s6']="%f"%s6 aniso_parameters['anisotropy_degree']="%f"%(old_div(t1,t3)) aniso_parameters['anisotropy_t1']="%f"%t1 aniso_parameters['anisotropy_t2']="%f"%t2 aniso_parameters['anisotropy_t3']="%f"%t3 aniso_parameters['anisotropy_v1_dec']="%.1f"%DIR_v1[0] aniso_parameters['anisotropy_v1_inc']="%.1f"%DIR_v1[1] aniso_parameters['anisotropy_v2_dec']="%.1f"%DIR_v2[0] aniso_parameters['anisotropy_v2_inc']="%.1f"%DIR_v2[1] aniso_parameters['anisotropy_v3_dec']="%.1f"%DIR_v3[0] aniso_parameters['anisotropy_v3_inc']="%.1f"%DIR_v3[1] # modified from pmagpy: if old_div(len(K),3)==9 or old_div(len(K),3)==6 or old_div(len(K),3)==15: n_pos=old_div(len(K),3) tmpH = Matrices[n_pos]['tmpH'] a=s_matrix S=0. comp=zeros((n_pos*3),'f') for i in range(n_pos): for j in range(3): index=i*3+j compare=a[j][0]*tmpH[i][0]+a[j][1]*tmpH[i][1]+a[j][2]*tmpH[i][2] comp[index]=compare for i in range(n_pos*3): d=old_div(K[i],trace) - comp[i] # del values S+=d*d nf=float(n_pos*3-6) # number of degrees of freedom if S >0: sigma=math.sqrt(old_div(S,nf)) hpars=pmag.dohext(nf,sigma,[s1,s2,s3,s4,s5,s6]) aniso_parameters['anisotropy_sigma']="%f"%sigma aniso_parameters['anisotropy_ftest']="%f"%hpars["F"] aniso_parameters['anisotropy_ftest12']="%f"%hpars["F12"] aniso_parameters['anisotropy_ftest23']="%f"%hpars["F23"] aniso_parameters['result_description']="Critical F: %s"%(hpars['F_crit']) aniso_parameters['anisotropy_F_crit']="%f"%float(hpars['F_crit']) aniso_parameters['anisotropy_n']=n_pos return(aniso_parameters) #====================================== # Main #====================================== aniso_logfile=open(WD+"/rmag_anisotropy.log",'w') aniso_logfile.write("------------------------\n") aniso_logfile.write( "-I- Start rmag anisrotropy script\n") aniso_logfile.write( "------------------------\n") Data=get_Data(meas_file) #try: # Data=get_Data(meas_file) #except: # aniso_logfile.write( "-E- Cant open measurement file %s\n" %meas_file) # print "-E- Cant open measurement file %s\n exiting" %meas_file # exit() aniso_logfile.write( "-I- Open measurement file %s\n" %meas_file) Data_anisotropy={} specimens=list(Data.keys()) specimens.sort() #----------------------------------- # Prepare rmag_anisotropy.txt file for writing #----------------------------------- rmag_anisotropy_file =open(WD+"/rmag_anisotropy.txt",'w') rmag_anisotropy_file.write("tab\trmag_anisotropy\n") rmag_results_file =open(WD+"/rmag_results.txt",'w') rmag_results_file.write("tab\trmag_results\n") rmag_anistropy_header=['er_specimen_name','er_sample_name','er_site_name','anisotropy_type','anisotropy_n','anisotropy_description','anisotropy_s1','anisotropy_s2','anisotropy_s3','anisotropy_s4','anisotropy_s5','anisotropy_s6','anisotropy_sigma','anisotropy_alt','magic_experiment_names','magic_method_codes'] String="" for i in range (len(rmag_anistropy_header)): String=String+rmag_anistropy_header[i]+'\t' rmag_anisotropy_file.write(String[:-1]+"\n") rmag_results_header=['er_specimen_names','er_sample_names','er_site_names','anisotropy_type','magic_method_codes','magic_experiment_names','result_description','anisotropy_t1','anisotropy_t2','anisotropy_t3','anisotropy_ftest','anisotropy_ftest12','anisotropy_ftest23',\ 'anisotropy_v1_dec','anisotropy_v1_inc','anisotropy_v2_dec','anisotropy_v2_inc','anisotropy_v3_dec','anisotropy_v3_inc'] String="" for i in range (len(rmag_results_header)): String=String+rmag_results_header[i]+'\t' rmag_results_file.write(String[:-1]+"\n") #----------------------------------- # Matrices definitions: # A design matrix # B dot(inv(dot(A.transpose(),A)),A.transpose()) # tmpH is used for sigma calculation (9,15 measurements only) # # Anisotropy tensor: # # |Mx| |s1 s4 s6| |Bx| # |My| = |s4 s2 s5| . |By| # |Mz| |s6 s5 s3| |Bz| # # A matrix (measurement matrix): # Each mesurement yields three lines in "A" matrix # # |Mi | |Bx 0 0 By 0 Bz| |s1| # |Mi+1| = |0 By 0 Bx Bz 0 | . |s2| # |Mi+2| |0 0 Bz 0 By Bx| |s3| # |s4| # |s5| # #----------------------------------- Matrices={} for n_pos in [6,9,15]: Matrices[n_pos]={} A=zeros((n_pos*3,6),'f') if n_pos==6: positions=[[0.,0.,1.],[90.,0.,1.],[0.,90.,1.],\ [180.,0.,1.],[270.,0.,1.],[0.,-90.,1.]] if n_pos==15: positions=[[315.,0.,1.],[225.,0.,1.],[180.,0.,1.],[135.,0.,1.],[45.,0.,1.],\ [90.,-45.,1.],[270.,-45.,1.],[270.,0.,1.],[270.,45.,1.],[90.,45.,1.],\ [180.,45.,1.],[180.,-45.,1.],[0.,-90.,1.],[0,-45.,1.],[0,45.,1.]] if n_pos==9: positions=[[315.,0.,1.],[225.,0.,1.],[180.,0.,1.],\ [90.,-45.,1.],[270.,-45.,1.],[270.,0.,1.],\ [180.,45.,1.],[180.,-45.,1.],[0.,-90.,1.]] tmpH=zeros((n_pos,3),'f') # define tmpH for i in range(len(positions)): CART=pmag.dir2cart(positions[i]) a=CART[0];b=CART[1];c=CART[2] A[3*i][0]=a A[3*i][3]=b A[3*i][5]=c A[3*i+1][1]=b A[3*i+1][3]=a A[3*i+1][4]=c A[3*i+2][2]=c A[3*i+2][4]=b A[3*i+2][5]=a tmpH[i][0]=CART[0] tmpH[i][1]=CART[1] tmpH[i][2]=CART[2] B=dot(inv(dot(A.transpose(),A)),A.transpose()) Matrices[n_pos]['A']=A Matrices[n_pos]['B']=B Matrices[n_pos]['tmpH']=tmpH for specimen in specimens: if 'atrmblock' in list(Data[specimen].keys()): #----------------------------------- # aTRM 6 positions #----------------------------------- aniso_logfile.write("-I- Start calculating ATRM tensor for specimen %s\n "%specimen) atrmblock=Data[specimen]['atrmblock'] if len(atrmblock)<6: aniso_logfile.write("-W- specimen %s has not enough measurementf for ATRM calculation\n"%specimen) continue B=Matrices[6]['B'] Reject_specimen = False # The zero field step is a "baseline" # Search the baseline in the ATRM measurement baseline="" Alteration_check="" Alteration_check_index="" baselines=[] # search for baseline in atrm blocks for rec in atrmblock: dec=float(rec['measurement_dec']) inc=float(rec['measurement_inc']) moment=float(rec['measurement_magn_moment']) # find the temperature of the atrm if float(rec['treatment_dc_field'])!=0 and float(rec['treatment_temp'])!=273: atrm_temperature=float(rec['treatment_temp']) # find baseline if float(rec['treatment_dc_field'])==0 and float(rec['treatment_temp'])!=273: baselines.append(array(pmag.dir2cart([dec,inc,moment]))) # Find alteration check #print rec['measurement_number'] if len(baselines)!=0: aniso_logfile.write( "-I- found ATRM baseline for specimen %s\n"%specimen) baselines=array(baselines) baseline=array([mean(baselines[:,0]),mean(baselines[:,1]),mean(baselines[:,2])]) else: baseline=zeros(3,'f') aniso_logfile.write( "-I- No aTRM baseline for specimen %s\n"%specimen) # sort measurements M=zeros([6,3],'f') for rec in atrmblock: dec=float(rec['measurement_dec']) inc=float(rec['measurement_inc']) moment=float(rec['measurement_magn_moment']) CART=array(pmag.dir2cart([dec,inc,moment]))-baseline if float(rec['treatment_dc_field'])==0: # Ignore zero field steps continue if "LT-PTRM-I" in rec['magic_method_codes'].split(":"): # alteration check Alteration_check=CART Alteration_check_dc_field_phi=float(rec['treatment_dc_field_phi']) Alteration_check_dc_field_theta=float(rec['treatment_dc_field_theta']) if Alteration_check_dc_field_phi==0 and Alteration_check_dc_field_theta==0 : Alteration_check_index=0 if Alteration_check_dc_field_phi==90 and Alteration_check_dc_field_theta==0 : Alteration_check_index=1 if Alteration_check_dc_field_phi==0 and Alteration_check_dc_field_theta==90 : Alteration_check_index=2 if Alteration_check_dc_field_phi==180 and Alteration_check_dc_field_theta==0 : Alteration_check_index=3 if Alteration_check_dc_field_phi==270 and Alteration_check_dc_field_theta==0 : Alteration_check_index=4 if Alteration_check_dc_field_phi==0 and Alteration_check_dc_field_theta==-90 : Alteration_check_index=5 aniso_logfile.write( "-I- found alteration check for specimen %s\n"%specimen) continue treatment_dc_field_phi=float(rec['treatment_dc_field_phi']) treatment_dc_field_theta=float(rec['treatment_dc_field_theta']) treatment_dc_field=float(rec['treatment_dc_field']) #+x, M[0] if treatment_dc_field_phi==0 and treatment_dc_field_theta==0 : M[0]=CART #+Y , M[1] if treatment_dc_field_phi==90 and treatment_dc_field_theta==0 : M[1]=CART #+Z , M[2] if treatment_dc_field_phi==0 and treatment_dc_field_theta==90 : M[2]=CART #-x, M[3] if treatment_dc_field_phi==180 and treatment_dc_field_theta==0 : M[3]=CART #-Y , M[4] if treatment_dc_field_phi==270 and treatment_dc_field_theta==0 : M[4]=CART #-Z , M[5] if treatment_dc_field_phi==0 and treatment_dc_field_theta==-90 : M[5]=CART # check if at least one measurement in missing for i in range(len(M)): if M[i][0]==0 and M[i][1]==0 and M[i][2]==0: aniso_logfile.write( "-E- ERROR: missing atrm data for specimen %s\n"%(specimen)) Reject_specimen=True # alteration check anisotropy_alt=0 if Alteration_check!="": for i in range(len(M)): if Alteration_check_index==i: M_1=sqrt(sum((array(M[i])**2))) M_2=sqrt(sum(Alteration_check**2)) diff=abs(M_1-M_2) diff_ratio=old_div(diff,mean([M_1,M_2])) diff_ratio_perc=100*diff_ratio if diff_ratio_perc > anisotropy_alt: anisotropy_alt=diff_ratio_perc else: aniso_logfile.write( "-W- Warning: no alteration check for specimen %s \n "%specimen ) # Check for maximum difference in anti parallel directions. # if the difference between the two measurements is more than maximum_diff # The specimen is rejected # i.e. +x versus -x, +y versus -y, etc.s for i in range(3): M_1=sqrt(sum(array(M[i])**2)) M_2=sqrt(sum(array(M[i+3])**2)) diff=abs(M_1-M_2) diff_ratio=old_div(diff,max(M_1,M_2)) diff_ratio_perc=100*diff_ratio if diff_ratio_perc>anisotropy_alt: anisotropy_alt=diff_ratio_perc if not Reject_specimen: # K vector (18 elements, M1[x], M1[y], M1[z], ... etc.) K=zeros(18,'f') K[0],K[1],K[2]=M[0][0],M[0][1],M[0][2] K[3],K[4],K[5]=M[1][0],M[1][1],M[1][2] K[6],K[7],K[8]=M[2][0],M[2][1],M[2][2] K[9],K[10],K[11]=M[3][0],M[3][1],M[3][2] K[12],K[13],K[14]=M[4][0],M[4][1],M[4][2] K[15],K[16],K[17]=M[5][0],M[5][1],M[5][2] if specimen not in list(Data_anisotropy.keys()): Data_anisotropy[specimen]={} aniso_parameters=calculate_aniso_parameters(B,K) Data_anisotropy[specimen]['ATRM']=aniso_parameters Data_anisotropy[specimen]['ATRM']['anisotropy_alt']="%.2f"%anisotropy_alt Data_anisotropy[specimen]['ATRM']['anisotropy_type']="ATRM" Data_anisotropy[specimen]['ATRM']['er_sample_name']=atrmblock[0]['er_sample_name'] Data_anisotropy[specimen]['ATRM']['er_specimen_name']=specimen Data_anisotropy[specimen]['ATRM']['er_site_name']=atrmblock[0]['er_site_name'] Data_anisotropy[specimen]['ATRM']['anisotropy_description']='Hext statistics adapted to ATRM' Data_anisotropy[specimen]['ATRM']['magic_experiment_names']=specimen+";ATRM" Data_anisotropy[specimen]['ATRM']['magic_method_codes']="LP-AN-TRM:AE-H" #Data_anisotropy[specimen]['ATRM']['rmag_anisotropy_name']=specimen if 'aarmblock' in list(Data[specimen].keys()): #----------------------------------- # AARM - 6, 9 or 15 positions #----------------------------------- aniso_logfile.write( "-I- Start calculating AARM tensors specimen %s\n"%specimen) aarmblock=Data[specimen]['aarmblock'] if len(aarmblock)<12: aniso_logfile.write( "-W- WARNING: not enough aarm measurement for specimen %s\n"%specimen) continue elif len(aarmblock)==12: n_pos=6 B=Matrices[6]['B'] M=zeros([6,3],'f') elif len(aarmblock)==18: n_pos=9 B=Matrices[9]['B'] M=zeros([9,3],'f') # 15 positions elif len(aarmblock)==30: n_pos=15 B=Matrices[15]['B'] M=zeros([15,3],'f') else: aniso_logfile.write( "-E- ERROR: number of measurements in aarm block is incorrect sample %s\n"%specimen) continue Reject_specimen = False for i in range(n_pos): for rec in aarmblock: if float(rec['measurement_number'])==i*2+1: dec=float(rec['measurement_dec']) inc=float(rec['measurement_inc']) moment=float(rec['measurement_magn_moment']) M_baseline=array(pmag.dir2cart([dec,inc,moment])) if float(rec['measurement_number'])==i*2+2: dec=float(rec['measurement_dec']) inc=float(rec['measurement_inc']) moment=float(rec['measurement_magn_moment']) M_arm=array(pmag.dir2cart([dec,inc,moment])) M[i]=M_arm-M_baseline K=zeros(3*n_pos,'f') for i in range(n_pos): K[i*3]=M[i][0] K[i*3+1]=M[i][1] K[i*3+2]=M[i][2] if specimen not in list(Data_anisotropy.keys()): Data_anisotropy[specimen]={} aniso_parameters=calculate_aniso_parameters(B,K) Data_anisotropy[specimen]['AARM']=aniso_parameters Data_anisotropy[specimen]['AARM']['anisotropy_alt']="" Data_anisotropy[specimen]['AARM']['anisotropy_type']="AARM" Data_anisotropy[specimen]['AARM']['er_sample_name']=aarmblock[0]['er_sample_name'] Data_anisotropy[specimen]['AARM']['er_site_name']=aarmblock[0]['er_site_name'] Data_anisotropy[specimen]['AARM']['er_specimen_name']=specimen Data_anisotropy[specimen]['AARM']['anisotropy_description']='Hext statistics adapted to AARM' Data_anisotropy[specimen]['AARM']['magic_experiment_names']=specimen+";AARM" Data_anisotropy[specimen]['AARM']['magic_method_codes']="LP-AN-ARM:AE-H" #Data_anisotropy[specimen]['AARM']['rmag_anisotropy_name']=specimen #----------------------------------- specimens=list(Data_anisotropy.keys()) specimens.sort # remove previous anistropy data, and replace with the new one: s_list=list(Data.keys()) for sp in s_list: if 'AniSpec' in list(Data[sp].keys()): del Data[sp]['AniSpec'] for specimen in specimens: # if both AARM and ATRM axist prefer the AARM !! if 'AARM' in list(Data_anisotropy[specimen].keys()): TYPES=['AARM'] if 'ATRM' in list(Data_anisotropy[specimen].keys()): TYPES=['ATRM'] if 'AARM' in list(Data_anisotropy[specimen].keys()) and 'ATRM' in list(Data_anisotropy[specimen].keys()): TYPES=['ATRM','AARM'] aniso_logfile.write( "-W- WARNING: both aarm and atrm data exist for specimen %s. using AARM by default. If you prefer using one of them, delete the other!\n"%specimen) for TYPE in TYPES: String="" for i in range (len(rmag_anistropy_header)): try: String=String+Data_anisotropy[specimen][TYPE][rmag_anistropy_header[i]]+'\t' except: String=String+"%f"%(Data_anisotropy[specimen][TYPE][rmag_anistropy_header[i]])+'\t' rmag_anisotropy_file.write(String[:-1]+"\n") String="" Data_anisotropy[specimen][TYPE]['er_specimen_names']=Data_anisotropy[specimen][TYPE]['er_specimen_name'] Data_anisotropy[specimen][TYPE]['er_sample_names']=Data_anisotropy[specimen][TYPE]['er_sample_name'] Data_anisotropy[specimen][TYPE]['er_site_names']=Data_anisotropy[specimen][TYPE]['er_site_name'] for i in range (len(rmag_results_header)): try: String=String+Data_anisotropy[specimen][TYPE][rmag_results_header[i]]+'\t' except: String=String+"%f"%(Data_anisotropy[specimen][TYPE][rmag_results_header[i]])+'\t' rmag_results_file.write(String[:-1]+"\n") if 'AniSpec' not in Data[specimen]: Data[specimen]['AniSpec']={} Data[specimen]['AniSpec'][TYPE]=Data_anisotropy[specimen][TYPE] aniso_logfile.write("------------------------\n") aniso_logfile.write("-I- remanence_aniso_magic script finished sucsessfuly\n") aniso_logfile.write( "------------------------\n") rmag_anisotropy_file.close() print("Anisotropy tensors elements are saved in rmag_anistropy.txt") print("Other anisotropy statistics are saved in rmag_results.txt") print("log file is in rmag_anisotropy.log")
def function[main, parameter[]]: constant[ NAME remanence_aniso_magic.py DESCRIPTION This program is similar to aarm_magic.py and atrm_magic.py with minor modifications. Converts magic measurement file with ATRM/AARM data to best-fit tensor (6 elements plus sigma) following Hext (1963), and calculates F-test statistics. Comments: - infield steps are marked with method codes LT-T-I:LP-AN-TRM; LT-AF-I:LP-AN-ARM - zerofield steps are marked with method codes LT-T-Z:LP-AN-TRM; LT-AF-Z:LP-AN-ARM - alteration check is marked with method codes LT-PTRM-I:LP-AN-TRM please notice; - ATRM: The program uses treatment_dc_field_phi/treatment_dc_field_theta columns to infer the direction of the applied field (this is a change from atrm_magic.py) - ATRM: zerofield (baseline) magnetization is subtructed from all infield measurements - AARM: The program uses measurement number (running number) to to infer the direction of the applied field assuming the SIO protocol for 6,9,15 measurements scheme. See cookbook for diagram and details. - AARM: zerofield (baseline) are assumed to be before any infield, and the baseline is subtructed from the subsequent infield magnetization. SYNTAX remanence_aniso_magic.py [-h] [command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is magic_measurements.txt INPUT magic measurement file with ATRM and/or AARM data. if both types of measurements exist then the program calculates both. OUTPUT rmag_anisotropy.log -I- information -W- Warning -E- Error rmag_anistropy.txt: This file contains in addition to some some magic information the following: - anistropy tensor s1 to s6 normalized by the trace: |Mx| |s1 s4 s6| |Bx| |My| = |s4 s2 s5| . |By| |Mz| |s6 s5 s3| |Bz| - anisotropy_sigma (Hext, 1963) - anisotropy_alt (altertion check for ATRM in units of %): 100* [abs(M_first-Mlast)/max(M_first,M_last)] - rmag_results.txt: This file contains in addition to some magic information the follow(ing: - anisotropy_t1,anisotropy_t2,anisotropy_t3 : eigenvalues - anisotropy_v*_dec,anisotropy_v*_inc: declination/inclination of the eigenvectors - anisotropy_ftest,anisotropy_ftest12,anisotropy_ftest13 - (the crtical F for 95% confidence level of anistropy is given in result_description column). ] variable[meas_file] assign[=] constant[magic_measurements.txt] variable[args] assign[=] name[sys].argv variable[dir_path] assign[=] constant[.] if compare[constant[-WD] in name[args]] begin[:] variable[ind] assign[=] call[name[args].index, parameter[constant[-WD]]] variable[dir_path] assign[=] call[name[args]][binary_operation[name[ind] + constant[1]]] if compare[constant[-h] in name[args]] begin[:] call[name[print], parameter[name[main].__doc__]] call[name[sys].exit, parameter[]] if compare[constant[-f] in name[args]] begin[:] variable[ind] assign[=] call[name[args].index, parameter[constant[-f]]] variable[meas_file] assign[=] call[name[sys].argv][binary_operation[name[ind] + constant[1]]] variable[WD] assign[=] name[dir_path] def function[get_Data, parameter[magic_file]]: variable[Data] assign[=] dictionary[[], []] <ast.Try object at 0x7da1b0454700> for taget[name[rec]] in starred[name[meas_data]] begin[:] variable[s] assign[=] call[name[rec]][constant[er_specimen_name]] variable[method_codes] assign[=] call[call[name[rec]][constant[magic_method_codes]].strip, parameter[constant[ ]]] call[name[method_codes].replace, parameter[constant[ ], constant[]]] variable[methods] assign[=] call[name[method_codes].split, parameter[constant[:]]] if compare[constant[LP-AN-TRM] in name[methods]] begin[:] if compare[name[s] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[Data].keys, parameter[]]]]] begin[:] call[name[Data]][name[s]] assign[=] dictionary[[], []] if compare[constant[atrmblock] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[call[name[Data]][name[s]].keys, parameter[]]]]] begin[:] call[call[name[Data]][name[s]]][constant[atrmblock]] assign[=] list[[]] call[call[call[name[Data]][name[s]]][constant[atrmblock]].append, parameter[name[rec]]] if compare[constant[LP-AN-ARM] in name[methods]] begin[:] if compare[name[s] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[Data].keys, parameter[]]]]] begin[:] call[name[Data]][name[s]] assign[=] dictionary[[], []] if compare[constant[aarmblock] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[call[name[Data]][name[s]].keys, parameter[]]]]] begin[:] call[call[name[Data]][name[s]]][constant[aarmblock]] assign[=] list[[]] call[call[call[name[Data]][name[s]]][constant[aarmblock]].append, parameter[name[rec]]] return[name[Data]] def function[calculate_aniso_parameters, parameter[B, K]]: variable[aniso_parameters] assign[=] dictionary[[], []] variable[S_bs] assign[=] call[name[dot], parameter[name[B], name[K]]] variable[trace] assign[=] binary_operation[binary_operation[call[name[S_bs]][constant[0]] + call[name[S_bs]][constant[1]]] + call[name[S_bs]][constant[2]]] variable[S_bs] assign[=] call[name[old_div], parameter[name[S_bs], name[trace]]] <ast.Tuple object at 0x7da1b050a680> assign[=] tuple[[<ast.Subscript object at 0x7da1b050a4a0>, <ast.Subscript object at 0x7da1b050a410>, <ast.Subscript object at 0x7da1b050a380>, <ast.Subscript object at 0x7da1b050a2f0>, <ast.Subscript object at 0x7da1b050a260>, <ast.Subscript object at 0x7da1b050a1d0>]] variable[s_matrix] assign[=] list[[<ast.List object at 0x7da1b050a0b0>, <ast.List object at 0x7da1b0509ff0>, <ast.List object at 0x7da1b0509f30>]] <ast.Tuple object at 0x7da1b0509e40> assign[=] call[name[eig], parameter[name[s_matrix]]] variable[t] assign[=] call[name[list], parameter[name[t]]] variable[t1] assign[=] call[name[max], parameter[name[t]]] variable[ix_1] assign[=] call[name[t].index, parameter[name[t1]]] variable[t3] assign[=] call[name[min], parameter[name[t]]] variable[ix_3] assign[=] call[name[t].index, parameter[name[t3]]] for taget[name[tt]] in starred[call[name[range], parameter[constant[3]]]] begin[:] if <ast.BoolOp object at 0x7da1b05096f0> begin[:] variable[t2] assign[=] call[name[t]][name[tt]] variable[ix_2] assign[=] call[name[t].index, parameter[name[t2]]] variable[v1] assign[=] list[[<ast.Subscript object at 0x7da1b0522260>, <ast.Subscript object at 0x7da1b0522170>, <ast.Subscript object at 0x7da1b0522080>]] variable[v2] assign[=] list[[<ast.Subscript object at 0x7da1b0521f00>, <ast.Subscript object at 0x7da1b0521e10>, <ast.Subscript object at 0x7da1b0521d20>]] variable[v3] assign[=] list[[<ast.Subscript object at 0x7da1b0521ba0>, <ast.Subscript object at 0x7da1b0521ab0>, <ast.Subscript object at 0x7da1b05219c0>]] variable[DIR_v1] assign[=] call[name[pmag].cart2dir, parameter[name[v1]]] variable[DIR_v2] assign[=] call[name[pmag].cart2dir, parameter[name[v2]]] variable[DIR_v3] assign[=] call[name[pmag].cart2dir, parameter[name[v3]]] call[name[aniso_parameters]][constant[anisotropy_s1]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s1]] call[name[aniso_parameters]][constant[anisotropy_s2]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s2]] call[name[aniso_parameters]][constant[anisotropy_s3]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s3]] call[name[aniso_parameters]][constant[anisotropy_s4]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s4]] call[name[aniso_parameters]][constant[anisotropy_s5]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s5]] call[name[aniso_parameters]][constant[anisotropy_s6]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[s6]] call[name[aniso_parameters]][constant[anisotropy_degree]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[old_div], parameter[name[t1], name[t3]]]] call[name[aniso_parameters]][constant[anisotropy_t1]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[t1]] call[name[aniso_parameters]][constant[anisotropy_t2]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[t2]] call[name[aniso_parameters]][constant[anisotropy_t3]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[t3]] call[name[aniso_parameters]][constant[anisotropy_v1_dec]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v1]][constant[0]]] call[name[aniso_parameters]][constant[anisotropy_v1_inc]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v1]][constant[1]]] call[name[aniso_parameters]][constant[anisotropy_v2_dec]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v2]][constant[0]]] call[name[aniso_parameters]][constant[anisotropy_v2_inc]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v2]][constant[1]]] call[name[aniso_parameters]][constant[anisotropy_v3_dec]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v3]][constant[0]]] call[name[aniso_parameters]][constant[anisotropy_v3_inc]] assign[=] binary_operation[constant[%.1f] <ast.Mod object at 0x7da2590d6920> call[name[DIR_v3]][constant[1]]] if <ast.BoolOp object at 0x7da20c76dab0> begin[:] variable[n_pos] assign[=] call[name[old_div], parameter[call[name[len], parameter[name[K]]], constant[3]]] variable[tmpH] assign[=] call[call[name[Matrices]][name[n_pos]]][constant[tmpH]] variable[a] assign[=] name[s_matrix] variable[S] assign[=] constant[0.0] variable[comp] assign[=] call[name[zeros], parameter[binary_operation[name[n_pos] * constant[3]], constant[f]]] for taget[name[i]] in starred[call[name[range], parameter[name[n_pos]]]] begin[:] for taget[name[j]] in starred[call[name[range], parameter[constant[3]]]] begin[:] variable[index] assign[=] binary_operation[binary_operation[name[i] * constant[3]] + name[j]] variable[compare] assign[=] binary_operation[binary_operation[binary_operation[call[call[name[a]][name[j]]][constant[0]] * call[call[name[tmpH]][name[i]]][constant[0]]] + binary_operation[call[call[name[a]][name[j]]][constant[1]] * call[call[name[tmpH]][name[i]]][constant[1]]]] + binary_operation[call[call[name[a]][name[j]]][constant[2]] * call[call[name[tmpH]][name[i]]][constant[2]]]] call[name[comp]][name[index]] assign[=] name[compare] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[n_pos] * constant[3]]]]] begin[:] variable[d] assign[=] binary_operation[call[name[old_div], parameter[call[name[K]][name[i]], name[trace]]] - call[name[comp]][name[i]]] <ast.AugAssign object at 0x7da20c76f5b0> variable[nf] assign[=] call[name[float], parameter[binary_operation[binary_operation[name[n_pos] * constant[3]] - constant[6]]]] if compare[name[S] greater[>] constant[0]] begin[:] variable[sigma] assign[=] call[name[math].sqrt, parameter[call[name[old_div], parameter[name[S], name[nf]]]]] variable[hpars] assign[=] call[name[pmag].dohext, parameter[name[nf], name[sigma], list[[<ast.Name object at 0x7da20c76c4c0>, <ast.Name object at 0x7da20c76cee0>, <ast.Name object at 0x7da20c76cd00>, <ast.Name object at 0x7da20c76ec20>, <ast.Name object at 0x7da20c76e110>, <ast.Name object at 0x7da20c76d210>]]]] call[name[aniso_parameters]][constant[anisotropy_sigma]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> name[sigma]] call[name[aniso_parameters]][constant[anisotropy_ftest]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[hpars]][constant[F]]] call[name[aniso_parameters]][constant[anisotropy_ftest12]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[hpars]][constant[F12]]] call[name[aniso_parameters]][constant[anisotropy_ftest23]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[hpars]][constant[F23]]] call[name[aniso_parameters]][constant[result_description]] assign[=] binary_operation[constant[Critical F: %s] <ast.Mod object at 0x7da2590d6920> call[name[hpars]][constant[F_crit]]] call[name[aniso_parameters]][constant[anisotropy_F_crit]] assign[=] binary_operation[constant[%f] <ast.Mod object at 0x7da2590d6920> call[name[float], parameter[call[name[hpars]][constant[F_crit]]]]] call[name[aniso_parameters]][constant[anisotropy_n]] assign[=] name[n_pos] return[name[aniso_parameters]] variable[aniso_logfile] assign[=] call[name[open], parameter[binary_operation[name[WD] + constant[/rmag_anisotropy.log]], constant[w]]] call[name[aniso_logfile].write, parameter[constant[------------------------ ]]] call[name[aniso_logfile].write, parameter[constant[-I- Start rmag anisrotropy script ]]] call[name[aniso_logfile].write, parameter[constant[------------------------ ]]] variable[Data] assign[=] call[name[get_Data], parameter[name[meas_file]]] call[name[aniso_logfile].write, parameter[binary_operation[constant[-I- Open measurement file %s ] <ast.Mod object at 0x7da2590d6920> name[meas_file]]]] variable[Data_anisotropy] assign[=] dictionary[[], []] variable[specimens] assign[=] call[name[list], parameter[call[name[Data].keys, parameter[]]]] call[name[specimens].sort, parameter[]] variable[rmag_anisotropy_file] assign[=] call[name[open], parameter[binary_operation[name[WD] + constant[/rmag_anisotropy.txt]], constant[w]]] call[name[rmag_anisotropy_file].write, parameter[constant[tab rmag_anisotropy ]]] variable[rmag_results_file] assign[=] call[name[open], parameter[binary_operation[name[WD] + constant[/rmag_results.txt]], constant[w]]] call[name[rmag_results_file].write, parameter[constant[tab rmag_results ]]] variable[rmag_anistropy_header] assign[=] list[[<ast.Constant object at 0x7da1b05bf970>, <ast.Constant object at 0x7da1b05bf940>, <ast.Constant object at 0x7da1b05bf910>, <ast.Constant object at 0x7da1b05bfa60>, <ast.Constant object at 0x7da1b05be890>, <ast.Constant object at 0x7da1b05be8c0>, <ast.Constant object at 0x7da1b05be830>, <ast.Constant object at 0x7da1b05be860>, <ast.Constant object at 0x7da1b05be8f0>, <ast.Constant object at 0x7da1b05be800>, <ast.Constant object at 0x7da1b05be920>, <ast.Constant object at 0x7da1b05bf640>, <ast.Constant object at 0x7da1b05bf5e0>, <ast.Constant object at 0x7da1b05bf550>, <ast.Constant object at 0x7da1b05bf580>, <ast.Constant object at 0x7da1b05bf5b0>]] variable[String] assign[=] constant[] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[rmag_anistropy_header]]]]]] begin[:] variable[String] assign[=] binary_operation[binary_operation[name[String] + call[name[rmag_anistropy_header]][name[i]]] + constant[ ]] call[name[rmag_anisotropy_file].write, parameter[binary_operation[call[name[String]][<ast.Slice object at 0x7da1b05bee60>] + constant[ ]]]] variable[rmag_results_header] assign[=] list[[<ast.Constant object at 0x7da1b05bf130>, <ast.Constant object at 0x7da1b05beda0>, <ast.Constant object at 0x7da1b05bf160>, <ast.Constant object at 0x7da1b05bf310>, <ast.Constant object at 0x7da1b05bf280>, <ast.Constant object at 0x7da1b05bf2b0>, <ast.Constant object at 0x7da1b05bf1c0>, <ast.Constant object at 0x7da1b05bf190>, <ast.Constant object at 0x7da1b05bf1f0>, <ast.Constant object at 0x7da1b05bf220>, <ast.Constant object at 0x7da1b05bf250>, <ast.Constant object at 0x7da1b05bf2e0>, <ast.Constant object at 0x7da1b05bf340>, <ast.Constant object at 0x7da1b05bf430>, <ast.Constant object at 0x7da1b05bf4f0>, <ast.Constant object at 0x7da1b05bf460>, <ast.Constant object at 0x7da1b05bf490>, <ast.Constant object at 0x7da1b05bf4c0>, <ast.Constant object at 0x7da1b05bf520>]] variable[String] assign[=] constant[] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[rmag_results_header]]]]]] begin[:] variable[String] assign[=] binary_operation[binary_operation[name[String] + call[name[rmag_results_header]][name[i]]] + constant[ ]] call[name[rmag_results_file].write, parameter[binary_operation[call[name[String]][<ast.Slice object at 0x7da1b05bfd00>] + constant[ ]]]] variable[Matrices] assign[=] dictionary[[], []] for taget[name[n_pos]] in starred[list[[<ast.Constant object at 0x7da1b05be2c0>, <ast.Constant object at 0x7da1b05be290>, <ast.Constant object at 0x7da1b05be260>]]] begin[:] call[name[Matrices]][name[n_pos]] assign[=] dictionary[[], []] variable[A] assign[=] call[name[zeros], parameter[tuple[[<ast.BinOp object at 0x7da1b0336710>, <ast.Constant object at 0x7da1b03340d0>]], constant[f]]] if compare[name[n_pos] equal[==] constant[6]] begin[:] variable[positions] assign[=] list[[<ast.List object at 0x7da1b0334e50>, <ast.List object at 0x7da1b0336320>, <ast.List object at 0x7da1b03346d0>, <ast.List object at 0x7da1b03366e0>, <ast.List object at 0x7da1b03348e0>, <ast.List object at 0x7da1b0336bc0>]] if compare[name[n_pos] equal[==] constant[15]] begin[:] variable[positions] assign[=] list[[<ast.List object at 0x7da1b0335b70>, <ast.List object at 0x7da1b03345b0>, <ast.List object at 0x7da1b0334100>, <ast.List object at 0x7da1b0335720>, <ast.List object at 0x7da1b03360e0>, <ast.List object at 0x7da1b0337790>, <ast.List object at 0x7da1b0337e50>, <ast.List object at 0x7da1b0335d80>, <ast.List object at 0x7da1b0334ac0>, <ast.List object at 0x7da1b0336440>, <ast.List object at 0x7da1b0335510>, <ast.List object at 0x7da1b0336a10>, <ast.List object at 0x7da1b0334a90>, <ast.List object at 0x7da18dc9a680>, <ast.List object at 0x7da18dc9a080>]] if compare[name[n_pos] equal[==] constant[9]] begin[:] variable[positions] assign[=] list[[<ast.List object at 0x7da18dc98700>, <ast.List object at 0x7da18dc9a110>, <ast.List object at 0x7da18dc99e70>, <ast.List object at 0x7da18dc98a90>, <ast.List object at 0x7da1b05cb760>, <ast.List object at 0x7da1b05ca770>, <ast.List object at 0x7da1b05cad40>, <ast.List object at 0x7da1b05ca2c0>, <ast.List object at 0x7da1b05c9f90>]] variable[tmpH] assign[=] call[name[zeros], parameter[tuple[[<ast.Name object at 0x7da1b05cabc0>, <ast.Constant object at 0x7da1b05cae60>]], constant[f]]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[positions]]]]]] begin[:] variable[CART] assign[=] call[name[pmag].dir2cart, parameter[call[name[positions]][name[i]]]] variable[a] assign[=] call[name[CART]][constant[0]] variable[b] assign[=] call[name[CART]][constant[1]] variable[c] assign[=] call[name[CART]][constant[2]] call[call[name[A]][binary_operation[constant[3] * name[i]]]][constant[0]] assign[=] name[a] call[call[name[A]][binary_operation[constant[3] * name[i]]]][constant[3]] assign[=] name[b] call[call[name[A]][binary_operation[constant[3] * name[i]]]][constant[5]] assign[=] name[c] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[1]]]][constant[1]] assign[=] name[b] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[1]]]][constant[3]] assign[=] name[a] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[1]]]][constant[4]] assign[=] name[c] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[2]]]][constant[2]] assign[=] name[c] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[2]]]][constant[4]] assign[=] name[b] call[call[name[A]][binary_operation[binary_operation[constant[3] * name[i]] + constant[2]]]][constant[5]] assign[=] name[a] call[call[name[tmpH]][name[i]]][constant[0]] assign[=] call[name[CART]][constant[0]] call[call[name[tmpH]][name[i]]][constant[1]] assign[=] call[name[CART]][constant[1]] call[call[name[tmpH]][name[i]]][constant[2]] assign[=] call[name[CART]][constant[2]] variable[B] assign[=] call[name[dot], parameter[call[name[inv], parameter[call[name[dot], parameter[call[name[A].transpose, parameter[]], name[A]]]]], call[name[A].transpose, parameter[]]]] call[call[name[Matrices]][name[n_pos]]][constant[A]] assign[=] name[A] call[call[name[Matrices]][name[n_pos]]][constant[B]] assign[=] name[B] call[call[name[Matrices]][name[n_pos]]][constant[tmpH]] assign[=] name[tmpH] for taget[name[specimen]] in starred[name[specimens]] begin[:] if compare[constant[atrmblock] in call[name[list], parameter[call[call[name[Data]][name[specimen]].keys, parameter[]]]]] begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-I- Start calculating ATRM tensor for specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] variable[atrmblock] assign[=] call[call[name[Data]][name[specimen]]][constant[atrmblock]] if compare[call[name[len], parameter[name[atrmblock]]] less[<] constant[6]] begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-W- specimen %s has not enough measurementf for ATRM calculation ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] continue variable[B] assign[=] call[call[name[Matrices]][constant[6]]][constant[B]] variable[Reject_specimen] assign[=] constant[False] variable[baseline] assign[=] constant[] variable[Alteration_check] assign[=] constant[] variable[Alteration_check_index] assign[=] constant[] variable[baselines] assign[=] list[[]] for taget[name[rec]] in starred[name[atrmblock]] begin[:] variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]] variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]] variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]] if <ast.BoolOp object at 0x7da1b05b4220> begin[:] variable[atrm_temperature] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_temp]]]] if <ast.BoolOp object at 0x7da1b05b4760> begin[:] call[name[baselines].append, parameter[call[name[array], parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b05b5030>, <ast.Name object at 0x7da1b05b4b80>, <ast.Name object at 0x7da1b05b4b50>]]]]]]]] if compare[call[name[len], parameter[name[baselines]]] not_equal[!=] constant[0]] begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-I- found ATRM baseline for specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] variable[baselines] assign[=] call[name[array], parameter[name[baselines]]] variable[baseline] assign[=] call[name[array], parameter[list[[<ast.Call object at 0x7da1b05b5c60>, <ast.Call object at 0x7da1b05b5b10>, <ast.Call object at 0x7da1b05b5a80>]]]] variable[M] assign[=] call[name[zeros], parameter[list[[<ast.Constant object at 0x7da1b05e1840>, <ast.Constant object at 0x7da1b05e1900>]], constant[f]]] for taget[name[rec]] in starred[name[atrmblock]] begin[:] variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]] variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]] variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]] variable[CART] assign[=] binary_operation[call[name[array], parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b05e2e30>, <ast.Name object at 0x7da1b05e2e00>, <ast.Name object at 0x7da1b05e2dd0>]]]]]] - name[baseline]] if compare[call[name[float], parameter[call[name[rec]][constant[treatment_dc_field]]]] equal[==] constant[0]] begin[:] continue if compare[constant[LT-PTRM-I] in call[call[name[rec]][constant[magic_method_codes]].split, parameter[constant[:]]]] begin[:] variable[Alteration_check] assign[=] name[CART] variable[Alteration_check_dc_field_phi] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_phi]]]] variable[Alteration_check_dc_field_theta] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_theta]]]] if <ast.BoolOp object at 0x7da1b05e21d0> begin[:] variable[Alteration_check_index] assign[=] constant[0] if <ast.BoolOp object at 0x7da1b05e2050> begin[:] variable[Alteration_check_index] assign[=] constant[1] if <ast.BoolOp object at 0x7da1b05e24a0> begin[:] variable[Alteration_check_index] assign[=] constant[2] if <ast.BoolOp object at 0x7da1b05e1e10> begin[:] variable[Alteration_check_index] assign[=] constant[3] if <ast.BoolOp object at 0x7da1b05e28f0> begin[:] variable[Alteration_check_index] assign[=] constant[4] if <ast.BoolOp object at 0x7da1b01e7f40> begin[:] variable[Alteration_check_index] assign[=] constant[5] call[name[aniso_logfile].write, parameter[binary_operation[constant[-I- found alteration check for specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] continue variable[treatment_dc_field_phi] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_phi]]]] variable[treatment_dc_field_theta] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field_theta]]]] variable[treatment_dc_field] assign[=] call[name[float], parameter[call[name[rec]][constant[treatment_dc_field]]]] if <ast.BoolOp object at 0x7da1b01e6c20> begin[:] call[name[M]][constant[0]] assign[=] name[CART] if <ast.BoolOp object at 0x7da1b01e5cc0> begin[:] call[name[M]][constant[1]] assign[=] name[CART] if <ast.BoolOp object at 0x7da1b01e6290> begin[:] call[name[M]][constant[2]] assign[=] name[CART] if <ast.BoolOp object at 0x7da1b01e4f70> begin[:] call[name[M]][constant[3]] assign[=] name[CART] if <ast.BoolOp object at 0x7da1b01e6d70> begin[:] call[name[M]][constant[4]] assign[=] name[CART] if <ast.BoolOp object at 0x7da1b01e7d60> begin[:] call[name[M]][constant[5]] assign[=] name[CART] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[M]]]]]] begin[:] if <ast.BoolOp object at 0x7da1b01f9a20> begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-E- ERROR: missing atrm data for specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] variable[Reject_specimen] assign[=] constant[True] variable[anisotropy_alt] assign[=] constant[0] if compare[name[Alteration_check] not_equal[!=] constant[]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[M]]]]]] begin[:] if compare[name[Alteration_check_index] equal[==] name[i]] begin[:] variable[M_1] assign[=] call[name[sqrt], parameter[call[name[sum], parameter[binary_operation[call[name[array], parameter[call[name[M]][name[i]]]] ** constant[2]]]]]] variable[M_2] assign[=] call[name[sqrt], parameter[call[name[sum], parameter[binary_operation[name[Alteration_check] ** constant[2]]]]]] variable[diff] assign[=] call[name[abs], parameter[binary_operation[name[M_1] - name[M_2]]]] variable[diff_ratio] assign[=] call[name[old_div], parameter[name[diff], call[name[mean], parameter[list[[<ast.Name object at 0x7da1b047cc10>, <ast.Name object at 0x7da1b047ea10>]]]]]] variable[diff_ratio_perc] assign[=] binary_operation[constant[100] * name[diff_ratio]] if compare[name[diff_ratio_perc] greater[>] name[anisotropy_alt]] begin[:] variable[anisotropy_alt] assign[=] name[diff_ratio_perc] for taget[name[i]] in starred[call[name[range], parameter[constant[3]]]] begin[:] variable[M_1] assign[=] call[name[sqrt], parameter[call[name[sum], parameter[binary_operation[call[name[array], parameter[call[name[M]][name[i]]]] ** constant[2]]]]]] variable[M_2] assign[=] call[name[sqrt], parameter[call[name[sum], parameter[binary_operation[call[name[array], parameter[call[name[M]][binary_operation[name[i] + constant[3]]]]] ** constant[2]]]]]] variable[diff] assign[=] call[name[abs], parameter[binary_operation[name[M_1] - name[M_2]]]] variable[diff_ratio] assign[=] call[name[old_div], parameter[name[diff], call[name[max], parameter[name[M_1], name[M_2]]]]] variable[diff_ratio_perc] assign[=] binary_operation[constant[100] * name[diff_ratio]] if compare[name[diff_ratio_perc] greater[>] name[anisotropy_alt]] begin[:] variable[anisotropy_alt] assign[=] name[diff_ratio_perc] if <ast.UnaryOp object at 0x7da1b047c0a0> begin[:] variable[K] assign[=] call[name[zeros], parameter[constant[18], constant[f]]] <ast.Tuple object at 0x7da1b047cd30> assign[=] tuple[[<ast.Subscript object at 0x7da1b047f820>, <ast.Subscript object at 0x7da1b047d4b0>, <ast.Subscript object at 0x7da1b047f250>]] <ast.Tuple object at 0x7da1b05ff790> assign[=] tuple[[<ast.Subscript object at 0x7da1b0574670>, <ast.Subscript object at 0x7da1b0575000>, <ast.Subscript object at 0x7da1b0574f10>]] <ast.Tuple object at 0x7da1b0574eb0> assign[=] tuple[[<ast.Subscript object at 0x7da1b05741f0>, <ast.Subscript object at 0x7da1b04e3af0>, <ast.Subscript object at 0x7da1b04e0850>]] <ast.Tuple object at 0x7da1b04e0fd0> assign[=] tuple[[<ast.Subscript object at 0x7da1b04e0400>, <ast.Subscript object at 0x7da1b04e17b0>, <ast.Subscript object at 0x7da1b04e03d0>]] <ast.Tuple object at 0x7da1b04e0c40> assign[=] tuple[[<ast.Subscript object at 0x7da1b04e0b50>, <ast.Subscript object at 0x7da1b04e05e0>, <ast.Subscript object at 0x7da1b04e3460>]] <ast.Tuple object at 0x7da1b04e3790> assign[=] tuple[[<ast.Subscript object at 0x7da1b04e22f0>, <ast.Subscript object at 0x7da1b04e1b40>, <ast.Subscript object at 0x7da1b04e1900>]] if compare[name[specimen] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[Data_anisotropy].keys, parameter[]]]]] begin[:] call[name[Data_anisotropy]][name[specimen]] assign[=] dictionary[[], []] variable[aniso_parameters] assign[=] call[name[calculate_aniso_parameters], parameter[name[B], name[K]]] call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]] assign[=] name[aniso_parameters] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[anisotropy_alt]] assign[=] binary_operation[constant[%.2f] <ast.Mod object at 0x7da2590d6920> name[anisotropy_alt]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[anisotropy_type]] assign[=] constant[ATRM] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[er_sample_name]] assign[=] call[call[name[atrmblock]][constant[0]]][constant[er_sample_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[er_specimen_name]] assign[=] name[specimen] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[er_site_name]] assign[=] call[call[name[atrmblock]][constant[0]]][constant[er_site_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[anisotropy_description]] assign[=] constant[Hext statistics adapted to ATRM] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[magic_experiment_names]] assign[=] binary_operation[name[specimen] + constant[;ATRM]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[ATRM]]][constant[magic_method_codes]] assign[=] constant[LP-AN-TRM:AE-H] if compare[constant[aarmblock] in call[name[list], parameter[call[call[name[Data]][name[specimen]].keys, parameter[]]]]] begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-I- Start calculating AARM tensors specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] variable[aarmblock] assign[=] call[call[name[Data]][name[specimen]]][constant[aarmblock]] if compare[call[name[len], parameter[name[aarmblock]]] less[<] constant[12]] begin[:] call[name[aniso_logfile].write, parameter[binary_operation[constant[-W- WARNING: not enough aarm measurement for specimen %s ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] continue variable[Reject_specimen] assign[=] constant[False] for taget[name[i]] in starred[call[name[range], parameter[name[n_pos]]]] begin[:] for taget[name[rec]] in starred[name[aarmblock]] begin[:] if compare[call[name[float], parameter[call[name[rec]][constant[measurement_number]]]] equal[==] binary_operation[binary_operation[name[i] * constant[2]] + constant[1]]] begin[:] variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]] variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]] variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]] variable[M_baseline] assign[=] call[name[array], parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b0405f00>, <ast.Name object at 0x7da1b0405ed0>, <ast.Name object at 0x7da1b0405ea0>]]]]]] if compare[call[name[float], parameter[call[name[rec]][constant[measurement_number]]]] equal[==] binary_operation[binary_operation[name[i] * constant[2]] + constant[2]]] begin[:] variable[dec] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_dec]]]] variable[inc] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_inc]]]] variable[moment] assign[=] call[name[float], parameter[call[name[rec]][constant[measurement_magn_moment]]]] variable[M_arm] assign[=] call[name[array], parameter[call[name[pmag].dir2cart, parameter[list[[<ast.Name object at 0x7da1b04056c0>, <ast.Name object at 0x7da1b0405690>, <ast.Name object at 0x7da1b0405660>]]]]]] call[name[M]][name[i]] assign[=] binary_operation[name[M_arm] - name[M_baseline]] variable[K] assign[=] call[name[zeros], parameter[binary_operation[constant[3] * name[n_pos]], constant[f]]] for taget[name[i]] in starred[call[name[range], parameter[name[n_pos]]]] begin[:] call[name[K]][binary_operation[name[i] * constant[3]]] assign[=] call[call[name[M]][name[i]]][constant[0]] call[name[K]][binary_operation[binary_operation[name[i] * constant[3]] + constant[1]]] assign[=] call[call[name[M]][name[i]]][constant[1]] call[name[K]][binary_operation[binary_operation[name[i] * constant[3]] + constant[2]]] assign[=] call[call[name[M]][name[i]]][constant[2]] if compare[name[specimen] <ast.NotIn object at 0x7da2590d7190> call[name[list], parameter[call[name[Data_anisotropy].keys, parameter[]]]]] begin[:] call[name[Data_anisotropy]][name[specimen]] assign[=] dictionary[[], []] variable[aniso_parameters] assign[=] call[name[calculate_aniso_parameters], parameter[name[B], name[K]]] call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]] assign[=] name[aniso_parameters] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[anisotropy_alt]] assign[=] constant[] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[anisotropy_type]] assign[=] constant[AARM] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[er_sample_name]] assign[=] call[call[name[aarmblock]][constant[0]]][constant[er_sample_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[er_site_name]] assign[=] call[call[name[aarmblock]][constant[0]]][constant[er_site_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[er_specimen_name]] assign[=] name[specimen] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[anisotropy_description]] assign[=] constant[Hext statistics adapted to AARM] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[magic_experiment_names]] assign[=] binary_operation[name[specimen] + constant[;AARM]] call[call[call[name[Data_anisotropy]][name[specimen]]][constant[AARM]]][constant[magic_method_codes]] assign[=] constant[LP-AN-ARM:AE-H] variable[specimens] assign[=] call[name[list], parameter[call[name[Data_anisotropy].keys, parameter[]]]] name[specimens].sort variable[s_list] assign[=] call[name[list], parameter[call[name[Data].keys, parameter[]]]] for taget[name[sp]] in starred[name[s_list]] begin[:] if compare[constant[AniSpec] in call[name[list], parameter[call[call[name[Data]][name[sp]].keys, parameter[]]]]] begin[:] <ast.Delete object at 0x7da1b04fded0> for taget[name[specimen]] in starred[name[specimens]] begin[:] if compare[constant[AARM] in call[name[list], parameter[call[call[name[Data_anisotropy]][name[specimen]].keys, parameter[]]]]] begin[:] variable[TYPES] assign[=] list[[<ast.Constant object at 0x7da1b04fcbe0>]] if compare[constant[ATRM] in call[name[list], parameter[call[call[name[Data_anisotropy]][name[specimen]].keys, parameter[]]]]] begin[:] variable[TYPES] assign[=] list[[<ast.Constant object at 0x7da1b04fcfd0>]] if <ast.BoolOp object at 0x7da1b04fe8c0> begin[:] variable[TYPES] assign[=] list[[<ast.Constant object at 0x7da1b04fc460>, <ast.Constant object at 0x7da1b04fc7c0>]] call[name[aniso_logfile].write, parameter[binary_operation[constant[-W- WARNING: both aarm and atrm data exist for specimen %s. using AARM by default. If you prefer using one of them, delete the other! ] <ast.Mod object at 0x7da2590d6920> name[specimen]]]] for taget[name[TYPE]] in starred[name[TYPES]] begin[:] variable[String] assign[=] constant[] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[rmag_anistropy_header]]]]]] begin[:] <ast.Try object at 0x7da1b04fff10> call[name[rmag_anisotropy_file].write, parameter[binary_operation[call[name[String]][<ast.Slice object at 0x7da1b04fed10>] + constant[ ]]]] variable[String] assign[=] constant[] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_specimen_names]] assign[=] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_specimen_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_sample_names]] assign[=] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_sample_name]] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_site_names]] assign[=] call[call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]]][constant[er_site_name]] for taget[name[i]] in starred[call[name[range], parameter[call[name[len], parameter[name[rmag_results_header]]]]]] begin[:] <ast.Try object at 0x7da1b04ff910> call[name[rmag_results_file].write, parameter[binary_operation[call[name[String]][<ast.Slice object at 0x7da1b04fd750>] + constant[ ]]]] if compare[constant[AniSpec] <ast.NotIn object at 0x7da2590d7190> call[name[Data]][name[specimen]]] begin[:] call[call[name[Data]][name[specimen]]][constant[AniSpec]] assign[=] dictionary[[], []] call[call[call[name[Data]][name[specimen]]][constant[AniSpec]]][name[TYPE]] assign[=] call[call[name[Data_anisotropy]][name[specimen]]][name[TYPE]] call[name[aniso_logfile].write, parameter[constant[------------------------ ]]] call[name[aniso_logfile].write, parameter[constant[-I- remanence_aniso_magic script finished sucsessfuly ]]] call[name[aniso_logfile].write, parameter[constant[------------------------ ]]] call[name[rmag_anisotropy_file].close, parameter[]] call[name[print], parameter[constant[Anisotropy tensors elements are saved in rmag_anistropy.txt]]] call[name[print], parameter[constant[Other anisotropy statistics are saved in rmag_results.txt]]] call[name[print], parameter[constant[log file is in rmag_anisotropy.log]]]
keyword[def] identifier[main] (): literal[string] identifier[meas_file] = literal[string] identifier[args] = identifier[sys] . identifier[argv] identifier[dir_path] = literal[string] keyword[if] literal[string] keyword[in] identifier[args] : identifier[ind] = identifier[args] . identifier[index] ( literal[string] ) identifier[dir_path] = identifier[args] [ identifier[ind] + literal[int] ] keyword[if] literal[string] keyword[in] identifier[args] : identifier[print] ( identifier[main] . identifier[__doc__] ) identifier[sys] . identifier[exit] () keyword[if] literal[string] keyword[in] identifier[args] : identifier[ind] = identifier[args] . identifier[index] ( literal[string] ) identifier[meas_file] = identifier[sys] . identifier[argv] [ identifier[ind] + literal[int] ] keyword[else] : identifier[meas_file] = identifier[dir_path] + literal[string] + identifier[meas_file] identifier[WD] = identifier[dir_path] keyword[def] identifier[get_Data] ( identifier[magic_file] ): identifier[Data] ={} keyword[try] : identifier[meas_data] , identifier[file_type] = identifier[pmag] . identifier[magic_read] ( identifier[magic_file] ) keyword[except] : identifier[print] ( literal[string] ) keyword[return] identifier[Data] keyword[for] identifier[rec] keyword[in] identifier[meas_data] : identifier[s] = identifier[rec] [ literal[string] ] identifier[method_codes] = identifier[rec] [ literal[string] ]. identifier[strip] ( literal[string] ) identifier[method_codes] . identifier[replace] ( literal[string] , literal[string] ) identifier[methods] = identifier[method_codes] . identifier[split] ( literal[string] ) keyword[if] literal[string] keyword[in] identifier[methods] : keyword[if] identifier[s] keyword[not] keyword[in] identifier[list] ( identifier[Data] . identifier[keys] ()): identifier[Data] [ identifier[s] ]={} keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[Data] [ identifier[s] ]. identifier[keys] ()): identifier[Data] [ identifier[s] ][ literal[string] ]=[] identifier[Data] [ identifier[s] ][ literal[string] ]. identifier[append] ( identifier[rec] ) keyword[if] literal[string] keyword[in] identifier[methods] : keyword[if] identifier[s] keyword[not] keyword[in] identifier[list] ( identifier[Data] . identifier[keys] ()): identifier[Data] [ identifier[s] ]={} keyword[if] literal[string] keyword[not] keyword[in] identifier[list] ( identifier[Data] [ identifier[s] ]. identifier[keys] ()): identifier[Data] [ identifier[s] ][ literal[string] ]=[] identifier[Data] [ identifier[s] ][ literal[string] ]. identifier[append] ( identifier[rec] ) keyword[return] ( identifier[Data] ) keyword[def] identifier[calculate_aniso_parameters] ( identifier[B] , identifier[K] ): identifier[aniso_parameters] ={} identifier[S_bs] = identifier[dot] ( identifier[B] , identifier[K] ) identifier[trace] = identifier[S_bs] [ literal[int] ]+ identifier[S_bs] [ literal[int] ]+ identifier[S_bs] [ literal[int] ] identifier[S_bs] = identifier[old_div] ( identifier[S_bs] , identifier[trace] ) identifier[s1] , identifier[s2] , identifier[s3] , identifier[s4] , identifier[s5] , identifier[s6] = identifier[S_bs] [ literal[int] ], identifier[S_bs] [ literal[int] ], identifier[S_bs] [ literal[int] ], identifier[S_bs] [ literal[int] ], identifier[S_bs] [ literal[int] ], identifier[S_bs] [ literal[int] ] identifier[s_matrix] =[[ identifier[s1] , identifier[s4] , identifier[s6] ],[ identifier[s4] , identifier[s2] , identifier[s5] ],[ identifier[s6] , identifier[s5] , identifier[s3] ]] identifier[t] , identifier[evectors] = identifier[eig] ( identifier[s_matrix] ) identifier[t] = identifier[list] ( identifier[t] ) identifier[t1] = identifier[max] ( identifier[t] ) identifier[ix_1] = identifier[t] . identifier[index] ( identifier[t1] ) identifier[t3] = identifier[min] ( identifier[t] ) identifier[ix_3] = identifier[t] . identifier[index] ( identifier[t3] ) keyword[for] identifier[tt] keyword[in] identifier[range] ( literal[int] ): keyword[if] identifier[t] [ identifier[tt] ]!= identifier[t1] keyword[and] identifier[t] [ identifier[tt] ]!= identifier[t3] : identifier[t2] = identifier[t] [ identifier[tt] ] identifier[ix_2] = identifier[t] . identifier[index] ( identifier[t2] ) identifier[v1] =[ identifier[evectors] [ literal[int] ][ identifier[ix_1] ], identifier[evectors] [ literal[int] ][ identifier[ix_1] ], identifier[evectors] [ literal[int] ][ identifier[ix_1] ]] identifier[v2] =[ identifier[evectors] [ literal[int] ][ identifier[ix_2] ], identifier[evectors] [ literal[int] ][ identifier[ix_2] ], identifier[evectors] [ literal[int] ][ identifier[ix_2] ]] identifier[v3] =[ identifier[evectors] [ literal[int] ][ identifier[ix_3] ], identifier[evectors] [ literal[int] ][ identifier[ix_3] ], identifier[evectors] [ literal[int] ][ identifier[ix_3] ]] identifier[DIR_v1] = identifier[pmag] . identifier[cart2dir] ( identifier[v1] ) identifier[DIR_v2] = identifier[pmag] . identifier[cart2dir] ( identifier[v2] ) identifier[DIR_v3] = identifier[pmag] . identifier[cart2dir] ( identifier[v3] ) identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s1] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s2] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s3] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s4] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s5] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[s6] identifier[aniso_parameters] [ literal[string] ]= literal[string] %( identifier[old_div] ( identifier[t1] , identifier[t3] )) identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[t1] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[t2] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[t3] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v1] [ literal[int] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v1] [ literal[int] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v2] [ literal[int] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v2] [ literal[int] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v3] [ literal[int] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[DIR_v3] [ literal[int] ] keyword[if] identifier[old_div] ( identifier[len] ( identifier[K] ), literal[int] )== literal[int] keyword[or] identifier[old_div] ( identifier[len] ( identifier[K] ), literal[int] )== literal[int] keyword[or] identifier[old_div] ( identifier[len] ( identifier[K] ), literal[int] )== literal[int] : identifier[n_pos] = identifier[old_div] ( identifier[len] ( identifier[K] ), literal[int] ) identifier[tmpH] = identifier[Matrices] [ identifier[n_pos] ][ literal[string] ] identifier[a] = identifier[s_matrix] identifier[S] = literal[int] identifier[comp] = identifier[zeros] (( identifier[n_pos] * literal[int] ), literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_pos] ): keyword[for] identifier[j] keyword[in] identifier[range] ( literal[int] ): identifier[index] = identifier[i] * literal[int] + identifier[j] identifier[compare] = identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ]+ identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ]+ identifier[a] [ identifier[j] ][ literal[int] ]* identifier[tmpH] [ identifier[i] ][ literal[int] ] identifier[comp] [ identifier[index] ]= identifier[compare] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_pos] * literal[int] ): identifier[d] = identifier[old_div] ( identifier[K] [ identifier[i] ], identifier[trace] )- identifier[comp] [ identifier[i] ] identifier[S] += identifier[d] * identifier[d] identifier[nf] = identifier[float] ( identifier[n_pos] * literal[int] - literal[int] ) keyword[if] identifier[S] > literal[int] : identifier[sigma] = identifier[math] . identifier[sqrt] ( identifier[old_div] ( identifier[S] , identifier[nf] )) identifier[hpars] = identifier[pmag] . identifier[dohext] ( identifier[nf] , identifier[sigma] ,[ identifier[s1] , identifier[s2] , identifier[s3] , identifier[s4] , identifier[s5] , identifier[s6] ]) identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[sigma] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[hpars] [ literal[string] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[hpars] [ literal[string] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[hpars] [ literal[string] ] identifier[aniso_parameters] [ literal[string] ]= literal[string] %( identifier[hpars] [ literal[string] ]) identifier[aniso_parameters] [ literal[string] ]= literal[string] % identifier[float] ( identifier[hpars] [ literal[string] ]) identifier[aniso_parameters] [ literal[string] ]= identifier[n_pos] keyword[return] ( identifier[aniso_parameters] ) identifier[aniso_logfile] = identifier[open] ( identifier[WD] + literal[string] , literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[Data] = identifier[get_Data] ( identifier[meas_file] ) identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[meas_file] ) identifier[Data_anisotropy] ={} identifier[specimens] = identifier[list] ( identifier[Data] . identifier[keys] ()) identifier[specimens] . identifier[sort] () identifier[rmag_anisotropy_file] = identifier[open] ( identifier[WD] + literal[string] , literal[string] ) identifier[rmag_anisotropy_file] . identifier[write] ( literal[string] ) identifier[rmag_results_file] = identifier[open] ( identifier[WD] + literal[string] , literal[string] ) identifier[rmag_results_file] . identifier[write] ( literal[string] ) identifier[rmag_anistropy_header] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[String] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[rmag_anistropy_header] )): identifier[String] = identifier[String] + identifier[rmag_anistropy_header] [ identifier[i] ]+ literal[string] identifier[rmag_anisotropy_file] . identifier[write] ( identifier[String] [:- literal[int] ]+ literal[string] ) identifier[rmag_results_header] =[ literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] , literal[string] ] identifier[String] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[rmag_results_header] )): identifier[String] = identifier[String] + identifier[rmag_results_header] [ identifier[i] ]+ literal[string] identifier[rmag_results_file] . identifier[write] ( identifier[String] [:- literal[int] ]+ literal[string] ) identifier[Matrices] ={} keyword[for] identifier[n_pos] keyword[in] [ literal[int] , literal[int] , literal[int] ]: identifier[Matrices] [ identifier[n_pos] ]={} identifier[A] = identifier[zeros] (( identifier[n_pos] * literal[int] , literal[int] ), literal[string] ) keyword[if] identifier[n_pos] == literal[int] : identifier[positions] =[[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ]] keyword[if] identifier[n_pos] == literal[int] : identifier[positions] =[[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ]] keyword[if] identifier[n_pos] == literal[int] : identifier[positions] =[[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] , literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ],[ literal[int] ,- literal[int] , literal[int] ]] identifier[tmpH] = identifier[zeros] (( identifier[n_pos] , literal[int] ), literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[positions] )): identifier[CART] = identifier[pmag] . identifier[dir2cart] ( identifier[positions] [ identifier[i] ]) identifier[a] = identifier[CART] [ literal[int] ]; identifier[b] = identifier[CART] [ literal[int] ]; identifier[c] = identifier[CART] [ literal[int] ] identifier[A] [ literal[int] * identifier[i] ][ literal[int] ]= identifier[a] identifier[A] [ literal[int] * identifier[i] ][ literal[int] ]= identifier[b] identifier[A] [ literal[int] * identifier[i] ][ literal[int] ]= identifier[c] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[b] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[a] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[c] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[c] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[b] identifier[A] [ literal[int] * identifier[i] + literal[int] ][ literal[int] ]= identifier[a] identifier[tmpH] [ identifier[i] ][ literal[int] ]= identifier[CART] [ literal[int] ] identifier[tmpH] [ identifier[i] ][ literal[int] ]= identifier[CART] [ literal[int] ] identifier[tmpH] [ identifier[i] ][ literal[int] ]= identifier[CART] [ literal[int] ] identifier[B] = identifier[dot] ( identifier[inv] ( identifier[dot] ( identifier[A] . identifier[transpose] (), identifier[A] )), identifier[A] . identifier[transpose] ()) identifier[Matrices] [ identifier[n_pos] ][ literal[string] ]= identifier[A] identifier[Matrices] [ identifier[n_pos] ][ literal[string] ]= identifier[B] identifier[Matrices] [ identifier[n_pos] ][ literal[string] ]= identifier[tmpH] keyword[for] identifier[specimen] keyword[in] identifier[specimens] : keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data] [ identifier[specimen] ]. identifier[keys] ()): identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) identifier[atrmblock] = identifier[Data] [ identifier[specimen] ][ literal[string] ] keyword[if] identifier[len] ( identifier[atrmblock] )< literal[int] : identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[continue] identifier[B] = identifier[Matrices] [ literal[int] ][ literal[string] ] identifier[Reject_specimen] = keyword[False] identifier[baseline] = literal[string] identifier[Alteration_check] = literal[string] identifier[Alteration_check_index] = literal[string] identifier[baselines] =[] keyword[for] identifier[rec] keyword[in] identifier[atrmblock] : identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ]) keyword[if] identifier[float] ( identifier[rec] [ literal[string] ])!= literal[int] keyword[and] identifier[float] ( identifier[rec] [ literal[string] ])!= literal[int] : identifier[atrm_temperature] = identifier[float] ( identifier[rec] [ literal[string] ]) keyword[if] identifier[float] ( identifier[rec] [ literal[string] ])== literal[int] keyword[and] identifier[float] ( identifier[rec] [ literal[string] ])!= literal[int] : identifier[baselines] . identifier[append] ( identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[moment] ]))) keyword[if] identifier[len] ( identifier[baselines] )!= literal[int] : identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) identifier[baselines] = identifier[array] ( identifier[baselines] ) identifier[baseline] = identifier[array] ([ identifier[mean] ( identifier[baselines] [:, literal[int] ]), identifier[mean] ( identifier[baselines] [:, literal[int] ]), identifier[mean] ( identifier[baselines] [:, literal[int] ])]) keyword[else] : identifier[baseline] = identifier[zeros] ( literal[int] , literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) identifier[M] = identifier[zeros] ([ literal[int] , literal[int] ], literal[string] ) keyword[for] identifier[rec] keyword[in] identifier[atrmblock] : identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[CART] = identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[moment] ]))- identifier[baseline] keyword[if] identifier[float] ( identifier[rec] [ literal[string] ])== literal[int] : keyword[continue] keyword[if] literal[string] keyword[in] identifier[rec] [ literal[string] ]. identifier[split] ( literal[string] ): identifier[Alteration_check] = identifier[CART] identifier[Alteration_check_dc_field_phi] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[Alteration_check_dc_field_theta] = identifier[float] ( identifier[rec] [ literal[string] ]) keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] == literal[int] : identifier[Alteration_check_index] = literal[int] keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] == literal[int] : identifier[Alteration_check_index] = literal[int] keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] == literal[int] : identifier[Alteration_check_index] = literal[int] keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] == literal[int] : identifier[Alteration_check_index] = literal[int] keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] == literal[int] : identifier[Alteration_check_index] = literal[int] keyword[if] identifier[Alteration_check_dc_field_phi] == literal[int] keyword[and] identifier[Alteration_check_dc_field_theta] ==- literal[int] : identifier[Alteration_check_index] = literal[int] identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[continue] identifier[treatment_dc_field_phi] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[treatment_dc_field_theta] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[treatment_dc_field] = identifier[float] ( identifier[rec] [ literal[string] ]) keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] == literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] == literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] == literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] == literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] == literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[if] identifier[treatment_dc_field_phi] == literal[int] keyword[and] identifier[treatment_dc_field_theta] ==- literal[int] : identifier[M] [ literal[int] ]= identifier[CART] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[M] )): keyword[if] identifier[M] [ identifier[i] ][ literal[int] ]== literal[int] keyword[and] identifier[M] [ identifier[i] ][ literal[int] ]== literal[int] keyword[and] identifier[M] [ identifier[i] ][ literal[int] ]== literal[int] : identifier[aniso_logfile] . identifier[write] ( literal[string] %( identifier[specimen] )) identifier[Reject_specimen] = keyword[True] identifier[anisotropy_alt] = literal[int] keyword[if] identifier[Alteration_check] != literal[string] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[M] )): keyword[if] identifier[Alteration_check_index] == identifier[i] : identifier[M_1] = identifier[sqrt] ( identifier[sum] (( identifier[array] ( identifier[M] [ identifier[i] ])** literal[int] ))) identifier[M_2] = identifier[sqrt] ( identifier[sum] ( identifier[Alteration_check] ** literal[int] )) identifier[diff] = identifier[abs] ( identifier[M_1] - identifier[M_2] ) identifier[diff_ratio] = identifier[old_div] ( identifier[diff] , identifier[mean] ([ identifier[M_1] , identifier[M_2] ])) identifier[diff_ratio_perc] = literal[int] * identifier[diff_ratio] keyword[if] identifier[diff_ratio_perc] > identifier[anisotropy_alt] : identifier[anisotropy_alt] = identifier[diff_ratio_perc] keyword[else] : identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[for] identifier[i] keyword[in] identifier[range] ( literal[int] ): identifier[M_1] = identifier[sqrt] ( identifier[sum] ( identifier[array] ( identifier[M] [ identifier[i] ])** literal[int] )) identifier[M_2] = identifier[sqrt] ( identifier[sum] ( identifier[array] ( identifier[M] [ identifier[i] + literal[int] ])** literal[int] )) identifier[diff] = identifier[abs] ( identifier[M_1] - identifier[M_2] ) identifier[diff_ratio] = identifier[old_div] ( identifier[diff] , identifier[max] ( identifier[M_1] , identifier[M_2] )) identifier[diff_ratio_perc] = literal[int] * identifier[diff_ratio] keyword[if] identifier[diff_ratio_perc] > identifier[anisotropy_alt] : identifier[anisotropy_alt] = identifier[diff_ratio_perc] keyword[if] keyword[not] identifier[Reject_specimen] : identifier[K] = identifier[zeros] ( literal[int] , literal[string] ) identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] identifier[K] [ literal[int] ], identifier[K] [ literal[int] ], identifier[K] [ literal[int] ]= identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ], identifier[M] [ literal[int] ][ literal[int] ] keyword[if] identifier[specimen] keyword[not] keyword[in] identifier[list] ( identifier[Data_anisotropy] . identifier[keys] ()): identifier[Data_anisotropy] [ identifier[specimen] ]={} identifier[aniso_parameters] = identifier[calculate_aniso_parameters] ( identifier[B] , identifier[K] ) identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ]= identifier[aniso_parameters] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] % identifier[anisotropy_alt] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[atrmblock] [ literal[int] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[specimen] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[atrmblock] [ literal[int] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[specimen] + literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data] [ identifier[specimen] ]. identifier[keys] ()): identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) identifier[aarmblock] = identifier[Data] [ identifier[specimen] ][ literal[string] ] keyword[if] identifier[len] ( identifier[aarmblock] )< literal[int] : identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[continue] keyword[elif] identifier[len] ( identifier[aarmblock] )== literal[int] : identifier[n_pos] = literal[int] identifier[B] = identifier[Matrices] [ literal[int] ][ literal[string] ] identifier[M] = identifier[zeros] ([ literal[int] , literal[int] ], literal[string] ) keyword[elif] identifier[len] ( identifier[aarmblock] )== literal[int] : identifier[n_pos] = literal[int] identifier[B] = identifier[Matrices] [ literal[int] ][ literal[string] ] identifier[M] = identifier[zeros] ([ literal[int] , literal[int] ], literal[string] ) keyword[elif] identifier[len] ( identifier[aarmblock] )== literal[int] : identifier[n_pos] = literal[int] identifier[B] = identifier[Matrices] [ literal[int] ][ literal[string] ] identifier[M] = identifier[zeros] ([ literal[int] , literal[int] ], literal[string] ) keyword[else] : identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[continue] identifier[Reject_specimen] = keyword[False] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_pos] ): keyword[for] identifier[rec] keyword[in] identifier[aarmblock] : keyword[if] identifier[float] ( identifier[rec] [ literal[string] ])== identifier[i] * literal[int] + literal[int] : identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[M_baseline] = identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[moment] ])) keyword[if] identifier[float] ( identifier[rec] [ literal[string] ])== identifier[i] * literal[int] + literal[int] : identifier[dec] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[inc] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[moment] = identifier[float] ( identifier[rec] [ literal[string] ]) identifier[M_arm] = identifier[array] ( identifier[pmag] . identifier[dir2cart] ([ identifier[dec] , identifier[inc] , identifier[moment] ])) identifier[M] [ identifier[i] ]= identifier[M_arm] - identifier[M_baseline] identifier[K] = identifier[zeros] ( literal[int] * identifier[n_pos] , literal[string] ) keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[n_pos] ): identifier[K] [ identifier[i] * literal[int] ]= identifier[M] [ identifier[i] ][ literal[int] ] identifier[K] [ identifier[i] * literal[int] + literal[int] ]= identifier[M] [ identifier[i] ][ literal[int] ] identifier[K] [ identifier[i] * literal[int] + literal[int] ]= identifier[M] [ identifier[i] ][ literal[int] ] keyword[if] identifier[specimen] keyword[not] keyword[in] identifier[list] ( identifier[Data_anisotropy] . identifier[keys] ()): identifier[Data_anisotropy] [ identifier[specimen] ]={} identifier[aniso_parameters] = identifier[calculate_aniso_parameters] ( identifier[B] , identifier[K] ) identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ]= identifier[aniso_parameters] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[aarmblock] [ literal[int] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[aarmblock] [ literal[int] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[specimen] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= identifier[specimen] + literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ literal[string] ][ literal[string] ]= literal[string] identifier[specimens] = identifier[list] ( identifier[Data_anisotropy] . identifier[keys] ()) identifier[specimens] . identifier[sort] identifier[s_list] = identifier[list] ( identifier[Data] . identifier[keys] ()) keyword[for] identifier[sp] keyword[in] identifier[s_list] : keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data] [ identifier[sp] ]. identifier[keys] ()): keyword[del] identifier[Data] [ identifier[sp] ][ literal[string] ] keyword[for] identifier[specimen] keyword[in] identifier[specimens] : keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data_anisotropy] [ identifier[specimen] ]. identifier[keys] ()): identifier[TYPES] =[ literal[string] ] keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data_anisotropy] [ identifier[specimen] ]. identifier[keys] ()): identifier[TYPES] =[ literal[string] ] keyword[if] literal[string] keyword[in] identifier[list] ( identifier[Data_anisotropy] [ identifier[specimen] ]. identifier[keys] ()) keyword[and] literal[string] keyword[in] identifier[list] ( identifier[Data_anisotropy] [ identifier[specimen] ]. identifier[keys] ()): identifier[TYPES] =[ literal[string] , literal[string] ] identifier[aniso_logfile] . identifier[write] ( literal[string] % identifier[specimen] ) keyword[for] identifier[TYPE] keyword[in] identifier[TYPES] : identifier[String] = literal[string] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[rmag_anistropy_header] )): keyword[try] : identifier[String] = identifier[String] + identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ identifier[rmag_anistropy_header] [ identifier[i] ]]+ literal[string] keyword[except] : identifier[String] = identifier[String] + literal[string] %( identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ identifier[rmag_anistropy_header] [ identifier[i] ]])+ literal[string] identifier[rmag_anisotropy_file] . identifier[write] ( identifier[String] [:- literal[int] ]+ literal[string] ) identifier[String] = literal[string] identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ]= identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ]= identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ] identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ]= identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ literal[string] ] keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[len] ( identifier[rmag_results_header] )): keyword[try] : identifier[String] = identifier[String] + identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ identifier[rmag_results_header] [ identifier[i] ]]+ literal[string] keyword[except] : identifier[String] = identifier[String] + literal[string] %( identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ][ identifier[rmag_results_header] [ identifier[i] ]])+ literal[string] identifier[rmag_results_file] . identifier[write] ( identifier[String] [:- literal[int] ]+ literal[string] ) keyword[if] literal[string] keyword[not] keyword[in] identifier[Data] [ identifier[specimen] ]: identifier[Data] [ identifier[specimen] ][ literal[string] ]={} identifier[Data] [ identifier[specimen] ][ literal[string] ][ identifier[TYPE] ]= identifier[Data_anisotropy] [ identifier[specimen] ][ identifier[TYPE] ] identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[aniso_logfile] . identifier[write] ( literal[string] ) identifier[rmag_anisotropy_file] . identifier[close] () identifier[print] ( literal[string] ) identifier[print] ( literal[string] ) identifier[print] ( literal[string] )
def main(): """ NAME remanence_aniso_magic.py DESCRIPTION This program is similar to aarm_magic.py and atrm_magic.py with minor modifications. Converts magic measurement file with ATRM/AARM data to best-fit tensor (6 elements plus sigma) following Hext (1963), and calculates F-test statistics. Comments: - infield steps are marked with method codes LT-T-I:LP-AN-TRM; LT-AF-I:LP-AN-ARM - zerofield steps are marked with method codes LT-T-Z:LP-AN-TRM; LT-AF-Z:LP-AN-ARM - alteration check is marked with method codes LT-PTRM-I:LP-AN-TRM please notice; - ATRM: The program uses treatment_dc_field_phi/treatment_dc_field_theta columns to infer the direction of the applied field (this is a change from atrm_magic.py) - ATRM: zerofield (baseline) magnetization is subtructed from all infield measurements - AARM: The program uses measurement number (running number) to to infer the direction of the applied field assuming the SIO protocol for 6,9,15 measurements scheme. See cookbook for diagram and details. - AARM: zerofield (baseline) are assumed to be before any infield, and the baseline is subtructed from the subsequent infield magnetization. SYNTAX remanence_aniso_magic.py [-h] [command line options] OPTIONS -h prints help message and quits -f FILE: specify input file, default is magic_measurements.txt INPUT magic measurement file with ATRM and/or AARM data. if both types of measurements exist then the program calculates both. OUTPUT rmag_anisotropy.log -I- information -W- Warning -E- Error rmag_anistropy.txt: This file contains in addition to some some magic information the following: - anistropy tensor s1 to s6 normalized by the trace: |Mx| |s1 s4 s6| |Bx| |My| = |s4 s2 s5| . |By| |Mz| |s6 s5 s3| |Bz| - anisotropy_sigma (Hext, 1963) - anisotropy_alt (altertion check for ATRM in units of %): 100* [abs(M_first-Mlast)/max(M_first,M_last)] - rmag_results.txt: This file contains in addition to some magic information the follow(ing: - anisotropy_t1,anisotropy_t2,anisotropy_t3 : eigenvalues - anisotropy_v*_dec,anisotropy_v*_inc: declination/inclination of the eigenvectors - anisotropy_ftest,anisotropy_ftest12,anisotropy_ftest13 - (the crtical F for 95% confidence level of anistropy is given in result_description column). """ #================================================================================== meas_file = 'magic_measurements.txt' args = sys.argv dir_path = '.' # # get name of file from command line # if '-WD' in args: ind = args.index('-WD') dir_path = args[ind + 1] # depends on [control=['if'], data=['args']] if '-h' in args: print(main.__doc__) sys.exit() # depends on [control=['if'], data=[]] if '-f' in args: ind = args.index('-f') meas_file = sys.argv[ind + 1] # depends on [control=['if'], data=['args']] else: meas_file = dir_path + '/' + meas_file WD = dir_path #====================================== # functions #====================================== def get_Data(magic_file): #------------------------------------------------ # Read magic measurement file and sort to blocks #------------------------------------------------ Data = {} try: (meas_data, file_type) = pmag.magic_read(magic_file) # depends on [control=['try'], data=[]] except: print('-E- ERROR: Cant read magic_measurement.txt file. File is corrupted.') return Data # depends on [control=['except'], data=[]] # get list of unique specimen names #sids=pmag.get_specs(meas_data) # samples ID's for rec in meas_data: s = rec['er_specimen_name'] method_codes = rec['magic_method_codes'].strip('\n') method_codes.replace(' ', '') methods = method_codes.split(':') if 'LP-AN-TRM' in methods: if s not in list(Data.keys()): Data[s] = {} # depends on [control=['if'], data=['s']] if 'atrmblock' not in list(Data[s].keys()): Data[s]['atrmblock'] = [] # depends on [control=['if'], data=[]] Data[s]['atrmblock'].append(rec) # depends on [control=['if'], data=[]] if 'LP-AN-ARM' in methods: if s not in list(Data.keys()): Data[s] = {} # depends on [control=['if'], data=['s']] if 'aarmblock' not in list(Data[s].keys()): Data[s]['aarmblock'] = [] # depends on [control=['if'], data=[]] Data[s]['aarmblock'].append(rec) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] return Data #====================================== # better to put this one in pmagpy #====================================== def calculate_aniso_parameters(B, K): aniso_parameters = {} S_bs = dot(B, K) # normalize by trace trace = S_bs[0] + S_bs[1] + S_bs[2] S_bs = old_div(S_bs, trace) (s1, s2, s3, s4, s5, s6) = (S_bs[0], S_bs[1], S_bs[2], S_bs[3], S_bs[4], S_bs[5]) s_matrix = [[s1, s4, s6], [s4, s2, s5], [s6, s5, s3]] # calculate eigen vector, (t, evectors) = eig(s_matrix) # sort vectors t = list(t) t1 = max(t) ix_1 = t.index(t1) t3 = min(t) ix_3 = t.index(t3) for tt in range(3): if t[tt] != t1 and t[tt] != t3: t2 = t[tt] ix_2 = t.index(t2) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['tt']] v1 = [evectors[0][ix_1], evectors[1][ix_1], evectors[2][ix_1]] v2 = [evectors[0][ix_2], evectors[1][ix_2], evectors[2][ix_2]] v3 = [evectors[0][ix_3], evectors[1][ix_3], evectors[2][ix_3]] DIR_v1 = pmag.cart2dir(v1) DIR_v2 = pmag.cart2dir(v2) DIR_v3 = pmag.cart2dir(v3) aniso_parameters['anisotropy_s1'] = '%f' % s1 aniso_parameters['anisotropy_s2'] = '%f' % s2 aniso_parameters['anisotropy_s3'] = '%f' % s3 aniso_parameters['anisotropy_s4'] = '%f' % s4 aniso_parameters['anisotropy_s5'] = '%f' % s5 aniso_parameters['anisotropy_s6'] = '%f' % s6 aniso_parameters['anisotropy_degree'] = '%f' % old_div(t1, t3) aniso_parameters['anisotropy_t1'] = '%f' % t1 aniso_parameters['anisotropy_t2'] = '%f' % t2 aniso_parameters['anisotropy_t3'] = '%f' % t3 aniso_parameters['anisotropy_v1_dec'] = '%.1f' % DIR_v1[0] aniso_parameters['anisotropy_v1_inc'] = '%.1f' % DIR_v1[1] aniso_parameters['anisotropy_v2_dec'] = '%.1f' % DIR_v2[0] aniso_parameters['anisotropy_v2_inc'] = '%.1f' % DIR_v2[1] aniso_parameters['anisotropy_v3_dec'] = '%.1f' % DIR_v3[0] aniso_parameters['anisotropy_v3_inc'] = '%.1f' % DIR_v3[1] # modified from pmagpy: if old_div(len(K), 3) == 9 or old_div(len(K), 3) == 6 or old_div(len(K), 3) == 15: n_pos = old_div(len(K), 3) tmpH = Matrices[n_pos]['tmpH'] a = s_matrix S = 0.0 comp = zeros(n_pos * 3, 'f') for i in range(n_pos): for j in range(3): index = i * 3 + j compare = a[j][0] * tmpH[i][0] + a[j][1] * tmpH[i][1] + a[j][2] * tmpH[i][2] comp[index] = compare # depends on [control=['for'], data=['j']] # depends on [control=['for'], data=['i']] for i in range(n_pos * 3): d = old_div(K[i], trace) - comp[i] # del values S += d * d # depends on [control=['for'], data=['i']] nf = float(n_pos * 3 - 6) # number of degrees of freedom if S > 0: sigma = math.sqrt(old_div(S, nf)) # depends on [control=['if'], data=['S']] hpars = pmag.dohext(nf, sigma, [s1, s2, s3, s4, s5, s6]) aniso_parameters['anisotropy_sigma'] = '%f' % sigma aniso_parameters['anisotropy_ftest'] = '%f' % hpars['F'] aniso_parameters['anisotropy_ftest12'] = '%f' % hpars['F12'] aniso_parameters['anisotropy_ftest23'] = '%f' % hpars['F23'] aniso_parameters['result_description'] = 'Critical F: %s' % hpars['F_crit'] aniso_parameters['anisotropy_F_crit'] = '%f' % float(hpars['F_crit']) aniso_parameters['anisotropy_n'] = n_pos # depends on [control=['if'], data=[]] return aniso_parameters #====================================== # Main #====================================== aniso_logfile = open(WD + '/rmag_anisotropy.log', 'w') aniso_logfile.write('------------------------\n') aniso_logfile.write('-I- Start rmag anisrotropy script\n') aniso_logfile.write('------------------------\n') Data = get_Data(meas_file) #try: # Data=get_Data(meas_file) #except: # aniso_logfile.write( "-E- Cant open measurement file %s\n" %meas_file) # print "-E- Cant open measurement file %s\n exiting" %meas_file # exit() aniso_logfile.write('-I- Open measurement file %s\n' % meas_file) Data_anisotropy = {} specimens = list(Data.keys()) specimens.sort() #----------------------------------- # Prepare rmag_anisotropy.txt file for writing #----------------------------------- rmag_anisotropy_file = open(WD + '/rmag_anisotropy.txt', 'w') rmag_anisotropy_file.write('tab\trmag_anisotropy\n') rmag_results_file = open(WD + '/rmag_results.txt', 'w') rmag_results_file.write('tab\trmag_results\n') rmag_anistropy_header = ['er_specimen_name', 'er_sample_name', 'er_site_name', 'anisotropy_type', 'anisotropy_n', 'anisotropy_description', 'anisotropy_s1', 'anisotropy_s2', 'anisotropy_s3', 'anisotropy_s4', 'anisotropy_s5', 'anisotropy_s6', 'anisotropy_sigma', 'anisotropy_alt', 'magic_experiment_names', 'magic_method_codes'] String = '' for i in range(len(rmag_anistropy_header)): String = String + rmag_anistropy_header[i] + '\t' # depends on [control=['for'], data=['i']] rmag_anisotropy_file.write(String[:-1] + '\n') rmag_results_header = ['er_specimen_names', 'er_sample_names', 'er_site_names', 'anisotropy_type', 'magic_method_codes', 'magic_experiment_names', 'result_description', 'anisotropy_t1', 'anisotropy_t2', 'anisotropy_t3', 'anisotropy_ftest', 'anisotropy_ftest12', 'anisotropy_ftest23', 'anisotropy_v1_dec', 'anisotropy_v1_inc', 'anisotropy_v2_dec', 'anisotropy_v2_inc', 'anisotropy_v3_dec', 'anisotropy_v3_inc'] String = '' for i in range(len(rmag_results_header)): String = String + rmag_results_header[i] + '\t' # depends on [control=['for'], data=['i']] rmag_results_file.write(String[:-1] + '\n') #----------------------------------- # Matrices definitions: # A design matrix # B dot(inv(dot(A.transpose(),A)),A.transpose()) # tmpH is used for sigma calculation (9,15 measurements only) # # Anisotropy tensor: # # |Mx| |s1 s4 s6| |Bx| # |My| = |s4 s2 s5| . |By| # |Mz| |s6 s5 s3| |Bz| # # A matrix (measurement matrix): # Each mesurement yields three lines in "A" matrix # # |Mi | |Bx 0 0 By 0 Bz| |s1| # |Mi+1| = |0 By 0 Bx Bz 0 | . |s2| # |Mi+2| |0 0 Bz 0 By Bx| |s3| # |s4| # |s5| # #----------------------------------- Matrices = {} for n_pos in [6, 9, 15]: Matrices[n_pos] = {} A = zeros((n_pos * 3, 6), 'f') if n_pos == 6: positions = [[0.0, 0.0, 1.0], [90.0, 0.0, 1.0], [0.0, 90.0, 1.0], [180.0, 0.0, 1.0], [270.0, 0.0, 1.0], [0.0, -90.0, 1.0]] # depends on [control=['if'], data=[]] if n_pos == 15: positions = [[315.0, 0.0, 1.0], [225.0, 0.0, 1.0], [180.0, 0.0, 1.0], [135.0, 0.0, 1.0], [45.0, 0.0, 1.0], [90.0, -45.0, 1.0], [270.0, -45.0, 1.0], [270.0, 0.0, 1.0], [270.0, 45.0, 1.0], [90.0, 45.0, 1.0], [180.0, 45.0, 1.0], [180.0, -45.0, 1.0], [0.0, -90.0, 1.0], [0, -45.0, 1.0], [0, 45.0, 1.0]] # depends on [control=['if'], data=[]] if n_pos == 9: positions = [[315.0, 0.0, 1.0], [225.0, 0.0, 1.0], [180.0, 0.0, 1.0], [90.0, -45.0, 1.0], [270.0, -45.0, 1.0], [270.0, 0.0, 1.0], [180.0, 45.0, 1.0], [180.0, -45.0, 1.0], [0.0, -90.0, 1.0]] # depends on [control=['if'], data=[]] tmpH = zeros((n_pos, 3), 'f') # define tmpH for i in range(len(positions)): CART = pmag.dir2cart(positions[i]) a = CART[0] b = CART[1] c = CART[2] A[3 * i][0] = a A[3 * i][3] = b A[3 * i][5] = c A[3 * i + 1][1] = b A[3 * i + 1][3] = a A[3 * i + 1][4] = c A[3 * i + 2][2] = c A[3 * i + 2][4] = b A[3 * i + 2][5] = a tmpH[i][0] = CART[0] tmpH[i][1] = CART[1] tmpH[i][2] = CART[2] # depends on [control=['for'], data=['i']] B = dot(inv(dot(A.transpose(), A)), A.transpose()) Matrices[n_pos]['A'] = A Matrices[n_pos]['B'] = B Matrices[n_pos]['tmpH'] = tmpH # depends on [control=['for'], data=['n_pos']] for specimen in specimens: if 'atrmblock' in list(Data[specimen].keys()): #----------------------------------- # aTRM 6 positions #----------------------------------- aniso_logfile.write('-I- Start calculating ATRM tensor for specimen %s\n ' % specimen) atrmblock = Data[specimen]['atrmblock'] if len(atrmblock) < 6: aniso_logfile.write('-W- specimen %s has not enough measurementf for ATRM calculation\n' % specimen) continue # depends on [control=['if'], data=[]] B = Matrices[6]['B'] Reject_specimen = False # The zero field step is a "baseline" # Search the baseline in the ATRM measurement baseline = '' Alteration_check = '' Alteration_check_index = '' baselines = [] # search for baseline in atrm blocks for rec in atrmblock: dec = float(rec['measurement_dec']) inc = float(rec['measurement_inc']) moment = float(rec['measurement_magn_moment']) # find the temperature of the atrm if float(rec['treatment_dc_field']) != 0 and float(rec['treatment_temp']) != 273: atrm_temperature = float(rec['treatment_temp']) # depends on [control=['if'], data=[]] # find baseline if float(rec['treatment_dc_field']) == 0 and float(rec['treatment_temp']) != 273: baselines.append(array(pmag.dir2cart([dec, inc, moment]))) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # Find alteration check #print rec['measurement_number'] if len(baselines) != 0: aniso_logfile.write('-I- found ATRM baseline for specimen %s\n' % specimen) baselines = array(baselines) baseline = array([mean(baselines[:, 0]), mean(baselines[:, 1]), mean(baselines[:, 2])]) # depends on [control=['if'], data=[]] else: baseline = zeros(3, 'f') aniso_logfile.write('-I- No aTRM baseline for specimen %s\n' % specimen) # sort measurements M = zeros([6, 3], 'f') for rec in atrmblock: dec = float(rec['measurement_dec']) inc = float(rec['measurement_inc']) moment = float(rec['measurement_magn_moment']) CART = array(pmag.dir2cart([dec, inc, moment])) - baseline if float(rec['treatment_dc_field']) == 0: # Ignore zero field steps continue # depends on [control=['if'], data=[]] if 'LT-PTRM-I' in rec['magic_method_codes'].split(':'): # alteration check Alteration_check = CART Alteration_check_dc_field_phi = float(rec['treatment_dc_field_phi']) Alteration_check_dc_field_theta = float(rec['treatment_dc_field_theta']) if Alteration_check_dc_field_phi == 0 and Alteration_check_dc_field_theta == 0: Alteration_check_index = 0 # depends on [control=['if'], data=[]] if Alteration_check_dc_field_phi == 90 and Alteration_check_dc_field_theta == 0: Alteration_check_index = 1 # depends on [control=['if'], data=[]] if Alteration_check_dc_field_phi == 0 and Alteration_check_dc_field_theta == 90: Alteration_check_index = 2 # depends on [control=['if'], data=[]] if Alteration_check_dc_field_phi == 180 and Alteration_check_dc_field_theta == 0: Alteration_check_index = 3 # depends on [control=['if'], data=[]] if Alteration_check_dc_field_phi == 270 and Alteration_check_dc_field_theta == 0: Alteration_check_index = 4 # depends on [control=['if'], data=[]] if Alteration_check_dc_field_phi == 0 and Alteration_check_dc_field_theta == -90: Alteration_check_index = 5 # depends on [control=['if'], data=[]] aniso_logfile.write('-I- found alteration check for specimen %s\n' % specimen) continue # depends on [control=['if'], data=[]] treatment_dc_field_phi = float(rec['treatment_dc_field_phi']) treatment_dc_field_theta = float(rec['treatment_dc_field_theta']) treatment_dc_field = float(rec['treatment_dc_field']) #+x, M[0] if treatment_dc_field_phi == 0 and treatment_dc_field_theta == 0: M[0] = CART # depends on [control=['if'], data=[]] #+Y , M[1] if treatment_dc_field_phi == 90 and treatment_dc_field_theta == 0: M[1] = CART # depends on [control=['if'], data=[]] #+Z , M[2] if treatment_dc_field_phi == 0 and treatment_dc_field_theta == 90: M[2] = CART # depends on [control=['if'], data=[]] #-x, M[3] if treatment_dc_field_phi == 180 and treatment_dc_field_theta == 0: M[3] = CART # depends on [control=['if'], data=[]] #-Y , M[4] if treatment_dc_field_phi == 270 and treatment_dc_field_theta == 0: M[4] = CART # depends on [control=['if'], data=[]] #-Z , M[5] if treatment_dc_field_phi == 0 and treatment_dc_field_theta == -90: M[5] = CART # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] # check if at least one measurement in missing for i in range(len(M)): if M[i][0] == 0 and M[i][1] == 0 and (M[i][2] == 0): aniso_logfile.write('-E- ERROR: missing atrm data for specimen %s\n' % specimen) Reject_specimen = True # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['i']] # alteration check anisotropy_alt = 0 if Alteration_check != '': for i in range(len(M)): if Alteration_check_index == i: M_1 = sqrt(sum(array(M[i]) ** 2)) M_2 = sqrt(sum(Alteration_check ** 2)) diff = abs(M_1 - M_2) diff_ratio = old_div(diff, mean([M_1, M_2])) diff_ratio_perc = 100 * diff_ratio if diff_ratio_perc > anisotropy_alt: anisotropy_alt = diff_ratio_perc # depends on [control=['if'], data=['diff_ratio_perc', 'anisotropy_alt']] # depends on [control=['if'], data=['i']] # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=['Alteration_check']] else: aniso_logfile.write('-W- Warning: no alteration check for specimen %s \n ' % specimen) # Check for maximum difference in anti parallel directions. # if the difference between the two measurements is more than maximum_diff # The specimen is rejected # i.e. +x versus -x, +y versus -y, etc.s for i in range(3): M_1 = sqrt(sum(array(M[i]) ** 2)) M_2 = sqrt(sum(array(M[i + 3]) ** 2)) diff = abs(M_1 - M_2) diff_ratio = old_div(diff, max(M_1, M_2)) diff_ratio_perc = 100 * diff_ratio if diff_ratio_perc > anisotropy_alt: anisotropy_alt = diff_ratio_perc # depends on [control=['if'], data=['diff_ratio_perc', 'anisotropy_alt']] # depends on [control=['for'], data=['i']] if not Reject_specimen: # K vector (18 elements, M1[x], M1[y], M1[z], ... etc.) K = zeros(18, 'f') (K[0], K[1], K[2]) = (M[0][0], M[0][1], M[0][2]) (K[3], K[4], K[5]) = (M[1][0], M[1][1], M[1][2]) (K[6], K[7], K[8]) = (M[2][0], M[2][1], M[2][2]) (K[9], K[10], K[11]) = (M[3][0], M[3][1], M[3][2]) (K[12], K[13], K[14]) = (M[4][0], M[4][1], M[4][2]) (K[15], K[16], K[17]) = (M[5][0], M[5][1], M[5][2]) if specimen not in list(Data_anisotropy.keys()): Data_anisotropy[specimen] = {} # depends on [control=['if'], data=['specimen']] aniso_parameters = calculate_aniso_parameters(B, K) Data_anisotropy[specimen]['ATRM'] = aniso_parameters Data_anisotropy[specimen]['ATRM']['anisotropy_alt'] = '%.2f' % anisotropy_alt Data_anisotropy[specimen]['ATRM']['anisotropy_type'] = 'ATRM' Data_anisotropy[specimen]['ATRM']['er_sample_name'] = atrmblock[0]['er_sample_name'] Data_anisotropy[specimen]['ATRM']['er_specimen_name'] = specimen Data_anisotropy[specimen]['ATRM']['er_site_name'] = atrmblock[0]['er_site_name'] Data_anisotropy[specimen]['ATRM']['anisotropy_description'] = 'Hext statistics adapted to ATRM' Data_anisotropy[specimen]['ATRM']['magic_experiment_names'] = specimen + ';ATRM' Data_anisotropy[specimen]['ATRM']['magic_method_codes'] = 'LP-AN-TRM:AE-H' # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] #Data_anisotropy[specimen]['ATRM']['rmag_anisotropy_name']=specimen if 'aarmblock' in list(Data[specimen].keys()): #----------------------------------- # AARM - 6, 9 or 15 positions #----------------------------------- aniso_logfile.write('-I- Start calculating AARM tensors specimen %s\n' % specimen) aarmblock = Data[specimen]['aarmblock'] if len(aarmblock) < 12: aniso_logfile.write('-W- WARNING: not enough aarm measurement for specimen %s\n' % specimen) continue # depends on [control=['if'], data=[]] elif len(aarmblock) == 12: n_pos = 6 B = Matrices[6]['B'] M = zeros([6, 3], 'f') # depends on [control=['if'], data=[]] elif len(aarmblock) == 18: n_pos = 9 B = Matrices[9]['B'] M = zeros([9, 3], 'f') # depends on [control=['if'], data=[]] # 15 positions elif len(aarmblock) == 30: n_pos = 15 B = Matrices[15]['B'] M = zeros([15, 3], 'f') # depends on [control=['if'], data=[]] else: aniso_logfile.write('-E- ERROR: number of measurements in aarm block is incorrect sample %s\n' % specimen) continue Reject_specimen = False for i in range(n_pos): for rec in aarmblock: if float(rec['measurement_number']) == i * 2 + 1: dec = float(rec['measurement_dec']) inc = float(rec['measurement_inc']) moment = float(rec['measurement_magn_moment']) M_baseline = array(pmag.dir2cart([dec, inc, moment])) # depends on [control=['if'], data=[]] if float(rec['measurement_number']) == i * 2 + 2: dec = float(rec['measurement_dec']) inc = float(rec['measurement_inc']) moment = float(rec['measurement_magn_moment']) M_arm = array(pmag.dir2cart([dec, inc, moment])) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['rec']] M[i] = M_arm - M_baseline # depends on [control=['for'], data=['i']] K = zeros(3 * n_pos, 'f') for i in range(n_pos): K[i * 3] = M[i][0] K[i * 3 + 1] = M[i][1] K[i * 3 + 2] = M[i][2] # depends on [control=['for'], data=['i']] if specimen not in list(Data_anisotropy.keys()): Data_anisotropy[specimen] = {} # depends on [control=['if'], data=['specimen']] aniso_parameters = calculate_aniso_parameters(B, K) Data_anisotropy[specimen]['AARM'] = aniso_parameters Data_anisotropy[specimen]['AARM']['anisotropy_alt'] = '' Data_anisotropy[specimen]['AARM']['anisotropy_type'] = 'AARM' Data_anisotropy[specimen]['AARM']['er_sample_name'] = aarmblock[0]['er_sample_name'] Data_anisotropy[specimen]['AARM']['er_site_name'] = aarmblock[0]['er_site_name'] Data_anisotropy[specimen]['AARM']['er_specimen_name'] = specimen Data_anisotropy[specimen]['AARM']['anisotropy_description'] = 'Hext statistics adapted to AARM' Data_anisotropy[specimen]['AARM']['magic_experiment_names'] = specimen + ';AARM' Data_anisotropy[specimen]['AARM']['magic_method_codes'] = 'LP-AN-ARM:AE-H' # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['specimen']] #Data_anisotropy[specimen]['AARM']['rmag_anisotropy_name']=specimen #----------------------------------- specimens = list(Data_anisotropy.keys()) specimens.sort # remove previous anistropy data, and replace with the new one: s_list = list(Data.keys()) for sp in s_list: if 'AniSpec' in list(Data[sp].keys()): del Data[sp]['AniSpec'] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['sp']] for specimen in specimens: # if both AARM and ATRM axist prefer the AARM !! if 'AARM' in list(Data_anisotropy[specimen].keys()): TYPES = ['AARM'] # depends on [control=['if'], data=[]] if 'ATRM' in list(Data_anisotropy[specimen].keys()): TYPES = ['ATRM'] # depends on [control=['if'], data=[]] if 'AARM' in list(Data_anisotropy[specimen].keys()) and 'ATRM' in list(Data_anisotropy[specimen].keys()): TYPES = ['ATRM', 'AARM'] aniso_logfile.write('-W- WARNING: both aarm and atrm data exist for specimen %s. using AARM by default. If you prefer using one of them, delete the other!\n' % specimen) # depends on [control=['if'], data=[]] for TYPE in TYPES: String = '' for i in range(len(rmag_anistropy_header)): try: String = String + Data_anisotropy[specimen][TYPE][rmag_anistropy_header[i]] + '\t' # depends on [control=['try'], data=[]] except: String = String + '%f' % Data_anisotropy[specimen][TYPE][rmag_anistropy_header[i]] + '\t' # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] rmag_anisotropy_file.write(String[:-1] + '\n') String = '' Data_anisotropy[specimen][TYPE]['er_specimen_names'] = Data_anisotropy[specimen][TYPE]['er_specimen_name'] Data_anisotropy[specimen][TYPE]['er_sample_names'] = Data_anisotropy[specimen][TYPE]['er_sample_name'] Data_anisotropy[specimen][TYPE]['er_site_names'] = Data_anisotropy[specimen][TYPE]['er_site_name'] for i in range(len(rmag_results_header)): try: String = String + Data_anisotropy[specimen][TYPE][rmag_results_header[i]] + '\t' # depends on [control=['try'], data=[]] except: String = String + '%f' % Data_anisotropy[specimen][TYPE][rmag_results_header[i]] + '\t' # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['i']] rmag_results_file.write(String[:-1] + '\n') if 'AniSpec' not in Data[specimen]: Data[specimen]['AniSpec'] = {} # depends on [control=['if'], data=[]] Data[specimen]['AniSpec'][TYPE] = Data_anisotropy[specimen][TYPE] # depends on [control=['for'], data=['TYPE']] # depends on [control=['for'], data=['specimen']] aniso_logfile.write('------------------------\n') aniso_logfile.write('-I- remanence_aniso_magic script finished sucsessfuly\n') aniso_logfile.write('------------------------\n') rmag_anisotropy_file.close() print('Anisotropy tensors elements are saved in rmag_anistropy.txt') print('Other anisotropy statistics are saved in rmag_results.txt') print('log file is in rmag_anisotropy.log')
def _init_forms(self): """ Init forms for Add and Edit """ super(BaseCRUDView, self)._init_forms() conv = GeneralModelConverter(self.datamodel) if not self.add_form: self.add_form = conv.create_form( self.label_columns, self.add_columns, self.description_columns, self.validators_columns, self.add_form_extra_fields, self.add_form_query_rel_fields, ) if not self.edit_form: self.edit_form = conv.create_form( self.label_columns, self.edit_columns, self.description_columns, self.validators_columns, self.edit_form_extra_fields, self.edit_form_query_rel_fields, )
def function[_init_forms, parameter[self]]: constant[ Init forms for Add and Edit ] call[call[name[super], parameter[name[BaseCRUDView], name[self]]]._init_forms, parameter[]] variable[conv] assign[=] call[name[GeneralModelConverter], parameter[name[self].datamodel]] if <ast.UnaryOp object at 0x7da20e956920> begin[:] name[self].add_form assign[=] call[name[conv].create_form, parameter[name[self].label_columns, name[self].add_columns, name[self].description_columns, name[self].validators_columns, name[self].add_form_extra_fields, name[self].add_form_query_rel_fields]] if <ast.UnaryOp object at 0x7da18eb55480> begin[:] name[self].edit_form assign[=] call[name[conv].create_form, parameter[name[self].label_columns, name[self].edit_columns, name[self].description_columns, name[self].validators_columns, name[self].edit_form_extra_fields, name[self].edit_form_query_rel_fields]]
keyword[def] identifier[_init_forms] ( identifier[self] ): literal[string] identifier[super] ( identifier[BaseCRUDView] , identifier[self] ). identifier[_init_forms] () identifier[conv] = identifier[GeneralModelConverter] ( identifier[self] . identifier[datamodel] ) keyword[if] keyword[not] identifier[self] . identifier[add_form] : identifier[self] . identifier[add_form] = identifier[conv] . identifier[create_form] ( identifier[self] . identifier[label_columns] , identifier[self] . identifier[add_columns] , identifier[self] . identifier[description_columns] , identifier[self] . identifier[validators_columns] , identifier[self] . identifier[add_form_extra_fields] , identifier[self] . identifier[add_form_query_rel_fields] , ) keyword[if] keyword[not] identifier[self] . identifier[edit_form] : identifier[self] . identifier[edit_form] = identifier[conv] . identifier[create_form] ( identifier[self] . identifier[label_columns] , identifier[self] . identifier[edit_columns] , identifier[self] . identifier[description_columns] , identifier[self] . identifier[validators_columns] , identifier[self] . identifier[edit_form_extra_fields] , identifier[self] . identifier[edit_form_query_rel_fields] , )
def _init_forms(self): """ Init forms for Add and Edit """ super(BaseCRUDView, self)._init_forms() conv = GeneralModelConverter(self.datamodel) if not self.add_form: self.add_form = conv.create_form(self.label_columns, self.add_columns, self.description_columns, self.validators_columns, self.add_form_extra_fields, self.add_form_query_rel_fields) # depends on [control=['if'], data=[]] if not self.edit_form: self.edit_form = conv.create_form(self.label_columns, self.edit_columns, self.description_columns, self.validators_columns, self.edit_form_extra_fields, self.edit_form_query_rel_fields) # depends on [control=['if'], data=[]]
def post(self): """Create a new template""" self.reqparse.add_argument('templateName', type=str, required=True) self.reqparse.add_argument('template', type=str, required=True) args = self.reqparse.parse_args() template = db.Template.find_one(template_name=args['templateName']) if template: return self.make_response('Template already exists, update the existing template instead', HTTP.CONFLICT) template = Template() template.template_name = args['templateName'] template.template = args['template'] db.session.add(template) db.session.commit() auditlog(event='template.create', actor=session['user'].username, data=args) return self.make_response('Template {} has been created'.format(template.template_name), HTTP.CREATED)
def function[post, parameter[self]]: constant[Create a new template] call[name[self].reqparse.add_argument, parameter[constant[templateName]]] call[name[self].reqparse.add_argument, parameter[constant[template]]] variable[args] assign[=] call[name[self].reqparse.parse_args, parameter[]] variable[template] assign[=] call[name[db].Template.find_one, parameter[]] if name[template] begin[:] return[call[name[self].make_response, parameter[constant[Template already exists, update the existing template instead], name[HTTP].CONFLICT]]] variable[template] assign[=] call[name[Template], parameter[]] name[template].template_name assign[=] call[name[args]][constant[templateName]] name[template].template assign[=] call[name[args]][constant[template]] call[name[db].session.add, parameter[name[template]]] call[name[db].session.commit, parameter[]] call[name[auditlog], parameter[]] return[call[name[self].make_response, parameter[call[constant[Template {} has been created].format, parameter[name[template].template_name]], name[HTTP].CREATED]]]
keyword[def] identifier[post] ( identifier[self] ): literal[string] identifier[self] . identifier[reqparse] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[required] = keyword[True] ) identifier[self] . identifier[reqparse] . identifier[add_argument] ( literal[string] , identifier[type] = identifier[str] , identifier[required] = keyword[True] ) identifier[args] = identifier[self] . identifier[reqparse] . identifier[parse_args] () identifier[template] = identifier[db] . identifier[Template] . identifier[find_one] ( identifier[template_name] = identifier[args] [ literal[string] ]) keyword[if] identifier[template] : keyword[return] identifier[self] . identifier[make_response] ( literal[string] , identifier[HTTP] . identifier[CONFLICT] ) identifier[template] = identifier[Template] () identifier[template] . identifier[template_name] = identifier[args] [ literal[string] ] identifier[template] . identifier[template] = identifier[args] [ literal[string] ] identifier[db] . identifier[session] . identifier[add] ( identifier[template] ) identifier[db] . identifier[session] . identifier[commit] () identifier[auditlog] ( identifier[event] = literal[string] , identifier[actor] = identifier[session] [ literal[string] ]. identifier[username] , identifier[data] = identifier[args] ) keyword[return] identifier[self] . identifier[make_response] ( literal[string] . identifier[format] ( identifier[template] . identifier[template_name] ), identifier[HTTP] . identifier[CREATED] )
def post(self): """Create a new template""" self.reqparse.add_argument('templateName', type=str, required=True) self.reqparse.add_argument('template', type=str, required=True) args = self.reqparse.parse_args() template = db.Template.find_one(template_name=args['templateName']) if template: return self.make_response('Template already exists, update the existing template instead', HTTP.CONFLICT) # depends on [control=['if'], data=[]] template = Template() template.template_name = args['templateName'] template.template = args['template'] db.session.add(template) db.session.commit() auditlog(event='template.create', actor=session['user'].username, data=args) return self.make_response('Template {} has been created'.format(template.template_name), HTTP.CREATED)
def metadata_lint(old, new, locations): """Run the linter over the new metadata, comparing to the old.""" # ensure we don't modify the metadata old = old.copy() new = new.copy() # remove version info old.pop('$version', None) new.pop('$version', None) for old_group_name in old: if old_group_name not in new: yield LintError('', 'api group removed', api_name=old_group_name) for group_name, new_group in new.items(): old_group = old.get(group_name, {'apis': {}}) for name, api in new_group['apis'].items(): old_api = old_group['apis'].get(name, {}) api_locations = locations[name] for message in lint_api(name, old_api, api, api_locations): message.api_name = name if message.location is None: message.location = api_locations['api'] yield message
def function[metadata_lint, parameter[old, new, locations]]: constant[Run the linter over the new metadata, comparing to the old.] variable[old] assign[=] call[name[old].copy, parameter[]] variable[new] assign[=] call[name[new].copy, parameter[]] call[name[old].pop, parameter[constant[$version], constant[None]]] call[name[new].pop, parameter[constant[$version], constant[None]]] for taget[name[old_group_name]] in starred[name[old]] begin[:] if compare[name[old_group_name] <ast.NotIn object at 0x7da2590d7190> name[new]] begin[:] <ast.Yield object at 0x7da1b11e9f60> for taget[tuple[[<ast.Name object at 0x7da1b11a8580>, <ast.Name object at 0x7da1b11a8520>]]] in starred[call[name[new].items, parameter[]]] begin[:] variable[old_group] assign[=] call[name[old].get, parameter[name[group_name], dictionary[[<ast.Constant object at 0x7da1b11a81c0>], [<ast.Dict object at 0x7da1b11a8100>]]]] for taget[tuple[[<ast.Name object at 0x7da1b11a8040>, <ast.Name object at 0x7da1b11a80d0>]]] in starred[call[call[name[new_group]][constant[apis]].items, parameter[]]] begin[:] variable[old_api] assign[=] call[call[name[old_group]][constant[apis]].get, parameter[name[name], dictionary[[], []]]] variable[api_locations] assign[=] call[name[locations]][name[name]] for taget[name[message]] in starred[call[name[lint_api], parameter[name[name], name[old_api], name[api], name[api_locations]]]] begin[:] name[message].api_name assign[=] name[name] if compare[name[message].location is constant[None]] begin[:] name[message].location assign[=] call[name[api_locations]][constant[api]] <ast.Yield object at 0x7da1b26af070>
keyword[def] identifier[metadata_lint] ( identifier[old] , identifier[new] , identifier[locations] ): literal[string] identifier[old] = identifier[old] . identifier[copy] () identifier[new] = identifier[new] . identifier[copy] () identifier[old] . identifier[pop] ( literal[string] , keyword[None] ) identifier[new] . identifier[pop] ( literal[string] , keyword[None] ) keyword[for] identifier[old_group_name] keyword[in] identifier[old] : keyword[if] identifier[old_group_name] keyword[not] keyword[in] identifier[new] : keyword[yield] identifier[LintError] ( literal[string] , literal[string] , identifier[api_name] = identifier[old_group_name] ) keyword[for] identifier[group_name] , identifier[new_group] keyword[in] identifier[new] . identifier[items] (): identifier[old_group] = identifier[old] . identifier[get] ( identifier[group_name] ,{ literal[string] :{}}) keyword[for] identifier[name] , identifier[api] keyword[in] identifier[new_group] [ literal[string] ]. identifier[items] (): identifier[old_api] = identifier[old_group] [ literal[string] ]. identifier[get] ( identifier[name] ,{}) identifier[api_locations] = identifier[locations] [ identifier[name] ] keyword[for] identifier[message] keyword[in] identifier[lint_api] ( identifier[name] , identifier[old_api] , identifier[api] , identifier[api_locations] ): identifier[message] . identifier[api_name] = identifier[name] keyword[if] identifier[message] . identifier[location] keyword[is] keyword[None] : identifier[message] . identifier[location] = identifier[api_locations] [ literal[string] ] keyword[yield] identifier[message]
def metadata_lint(old, new, locations): """Run the linter over the new metadata, comparing to the old.""" # ensure we don't modify the metadata old = old.copy() new = new.copy() # remove version info old.pop('$version', None) new.pop('$version', None) for old_group_name in old: if old_group_name not in new: yield LintError('', 'api group removed', api_name=old_group_name) # depends on [control=['if'], data=['old_group_name']] # depends on [control=['for'], data=['old_group_name']] for (group_name, new_group) in new.items(): old_group = old.get(group_name, {'apis': {}}) for (name, api) in new_group['apis'].items(): old_api = old_group['apis'].get(name, {}) api_locations = locations[name] for message in lint_api(name, old_api, api, api_locations): message.api_name = name if message.location is None: message.location = api_locations['api'] # depends on [control=['if'], data=[]] yield message # depends on [control=['for'], data=['message']] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=[]]
def hostloc(self): """return host:port""" hostloc = self.hostname if self.port: hostloc = '{hostloc}:{port}'.format(hostloc=hostloc, port=self.port) return hostloc
def function[hostloc, parameter[self]]: constant[return host:port] variable[hostloc] assign[=] name[self].hostname if name[self].port begin[:] variable[hostloc] assign[=] call[constant[{hostloc}:{port}].format, parameter[]] return[name[hostloc]]
keyword[def] identifier[hostloc] ( identifier[self] ): literal[string] identifier[hostloc] = identifier[self] . identifier[hostname] keyword[if] identifier[self] . identifier[port] : identifier[hostloc] = literal[string] . identifier[format] ( identifier[hostloc] = identifier[hostloc] , identifier[port] = identifier[self] . identifier[port] ) keyword[return] identifier[hostloc]
def hostloc(self): """return host:port""" hostloc = self.hostname if self.port: hostloc = '{hostloc}:{port}'.format(hostloc=hostloc, port=self.port) # depends on [control=['if'], data=[]] return hostloc
def user_identity_delete(self, user_id, id, **kwargs): "https://developer.zendesk.com/rest_api/docs/core/user_identities#delete-identity" api_path = "/api/v2/users/{user_id}/identities/{id}.json" api_path = api_path.format(user_id=user_id, id=id) return self.call(api_path, method="DELETE", **kwargs)
def function[user_identity_delete, parameter[self, user_id, id]]: constant[https://developer.zendesk.com/rest_api/docs/core/user_identities#delete-identity] variable[api_path] assign[=] constant[/api/v2/users/{user_id}/identities/{id}.json] variable[api_path] assign[=] call[name[api_path].format, parameter[]] return[call[name[self].call, parameter[name[api_path]]]]
keyword[def] identifier[user_identity_delete] ( identifier[self] , identifier[user_id] , identifier[id] ,** identifier[kwargs] ): literal[string] identifier[api_path] = literal[string] identifier[api_path] = identifier[api_path] . identifier[format] ( identifier[user_id] = identifier[user_id] , identifier[id] = identifier[id] ) keyword[return] identifier[self] . identifier[call] ( identifier[api_path] , identifier[method] = literal[string] ,** identifier[kwargs] )
def user_identity_delete(self, user_id, id, **kwargs): """https://developer.zendesk.com/rest_api/docs/core/user_identities#delete-identity""" api_path = '/api/v2/users/{user_id}/identities/{id}.json' api_path = api_path.format(user_id=user_id, id=id) return self.call(api_path, method='DELETE', **kwargs)
def get_(*keyname): ''' Get metadata keyname : string name of key .. note:: If no keynames are specified, we get all (public) properties CLI Example: .. code-block:: bash salt '*' mdata.get salt:role salt '*' mdata.get user-script salt:role ''' mdata = _check_mdata_get() ret = {} if not keyname: keyname = list_() for k in keyname: if mdata: cmd = '{0} {1}'.format(mdata, k) res = __salt__['cmd.run_all'](cmd, ignore_retcode=True) ret[k] = res['stdout'] if res['retcode'] == 0 else '' else: ret[k] = '' return ret
def function[get_, parameter[]]: constant[ Get metadata keyname : string name of key .. note:: If no keynames are specified, we get all (public) properties CLI Example: .. code-block:: bash salt '*' mdata.get salt:role salt '*' mdata.get user-script salt:role ] variable[mdata] assign[=] call[name[_check_mdata_get], parameter[]] variable[ret] assign[=] dictionary[[], []] if <ast.UnaryOp object at 0x7da1b21f3400> begin[:] variable[keyname] assign[=] call[name[list_], parameter[]] for taget[name[k]] in starred[name[keyname]] begin[:] if name[mdata] begin[:] variable[cmd] assign[=] call[constant[{0} {1}].format, parameter[name[mdata], name[k]]] variable[res] assign[=] call[call[name[__salt__]][constant[cmd.run_all]], parameter[name[cmd]]] call[name[ret]][name[k]] assign[=] <ast.IfExp object at 0x7da1b21f00a0> return[name[ret]]
keyword[def] identifier[get_] (* identifier[keyname] ): literal[string] identifier[mdata] = identifier[_check_mdata_get] () identifier[ret] ={} keyword[if] keyword[not] identifier[keyname] : identifier[keyname] = identifier[list_] () keyword[for] identifier[k] keyword[in] identifier[keyname] : keyword[if] identifier[mdata] : identifier[cmd] = literal[string] . identifier[format] ( identifier[mdata] , identifier[k] ) identifier[res] = identifier[__salt__] [ literal[string] ]( identifier[cmd] , identifier[ignore_retcode] = keyword[True] ) identifier[ret] [ identifier[k] ]= identifier[res] [ literal[string] ] keyword[if] identifier[res] [ literal[string] ]== literal[int] keyword[else] literal[string] keyword[else] : identifier[ret] [ identifier[k] ]= literal[string] keyword[return] identifier[ret]
def get_(*keyname): """ Get metadata keyname : string name of key .. note:: If no keynames are specified, we get all (public) properties CLI Example: .. code-block:: bash salt '*' mdata.get salt:role salt '*' mdata.get user-script salt:role """ mdata = _check_mdata_get() ret = {} if not keyname: keyname = list_() # depends on [control=['if'], data=[]] for k in keyname: if mdata: cmd = '{0} {1}'.format(mdata, k) res = __salt__['cmd.run_all'](cmd, ignore_retcode=True) ret[k] = res['stdout'] if res['retcode'] == 0 else '' # depends on [control=['if'], data=[]] else: ret[k] = '' # depends on [control=['for'], data=['k']] return ret
def out_of_service(self, args): """ Set the Out_Of_Service property so the Present_Value of an I/O may be written. :param args: String with <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ] """ if not self._started: raise ApplicationNotStarted("BACnet stack not running - use startApp()") # with self.this_application._lock: if use lock...won't be able to call read... args = args.split() addr, obj_type, obj_inst = args[:3] try: self.write("{} {} {} outOfService True".format(addr, obj_type, obj_inst)) except NoResponseFromController: pass
def function[out_of_service, parameter[self, args]]: constant[ Set the Out_Of_Service property so the Present_Value of an I/O may be written. :param args: String with <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ] ] if <ast.UnaryOp object at 0x7da1b0650dc0> begin[:] <ast.Raise object at 0x7da1b0653c40> variable[args] assign[=] call[name[args].split, parameter[]] <ast.Tuple object at 0x7da1b0653760> assign[=] call[name[args]][<ast.Slice object at 0x7da1b0651510>] <ast.Try object at 0x7da1b0652f80>
keyword[def] identifier[out_of_service] ( identifier[self] , identifier[args] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_started] : keyword[raise] identifier[ApplicationNotStarted] ( literal[string] ) identifier[args] = identifier[args] . identifier[split] () identifier[addr] , identifier[obj_type] , identifier[obj_inst] = identifier[args] [: literal[int] ] keyword[try] : identifier[self] . identifier[write] ( literal[string] . identifier[format] ( identifier[addr] , identifier[obj_type] , identifier[obj_inst] )) keyword[except] identifier[NoResponseFromController] : keyword[pass]
def out_of_service(self, args): """ Set the Out_Of_Service property so the Present_Value of an I/O may be written. :param args: String with <addr> <type> <inst> <prop> <value> [ <indx> ] [ <priority> ] """ if not self._started: raise ApplicationNotStarted('BACnet stack not running - use startApp()') # depends on [control=['if'], data=[]] # with self.this_application._lock: if use lock...won't be able to call read... args = args.split() (addr, obj_type, obj_inst) = args[:3] try: self.write('{} {} {} outOfService True'.format(addr, obj_type, obj_inst)) # depends on [control=['try'], data=[]] except NoResponseFromController: pass # depends on [control=['except'], data=[]]
def translate(conic, vector): """ Translates a conic by a vector """ # Translation matrix T = N.identity(len(conic)) T[:-1,-1] = -vector return conic.transform(T)
def function[translate, parameter[conic, vector]]: constant[ Translates a conic by a vector ] variable[T] assign[=] call[name[N].identity, parameter[call[name[len], parameter[name[conic]]]]] call[name[T]][tuple[[<ast.Slice object at 0x7da1b1973f70>, <ast.UnaryOp object at 0x7da1b1973250>]]] assign[=] <ast.UnaryOp object at 0x7da1b1971ae0> return[call[name[conic].transform, parameter[name[T]]]]
keyword[def] identifier[translate] ( identifier[conic] , identifier[vector] ): literal[string] identifier[T] = identifier[N] . identifier[identity] ( identifier[len] ( identifier[conic] )) identifier[T] [:- literal[int] ,- literal[int] ]=- identifier[vector] keyword[return] identifier[conic] . identifier[transform] ( identifier[T] )
def translate(conic, vector): """ Translates a conic by a vector """ # Translation matrix T = N.identity(len(conic)) T[:-1, -1] = -vector return conic.transform(T)
def update_floatingip(floatingip_id, port=None, profile=None): ''' Updates a floatingIP CLI Example: .. code-block:: bash salt '*' neutron.update_floatingip network-name port-name :param floatingip_id: ID of floatingIP :param port: ID or name of port, to associate floatingip to `None` or do not specify to disassociate the floatingip (Optional) :param profile: Profile to build on (Optional) :return: Value of updated floating IP information ''' conn = _auth(profile) return conn.update_floatingip(floatingip_id, port)
def function[update_floatingip, parameter[floatingip_id, port, profile]]: constant[ Updates a floatingIP CLI Example: .. code-block:: bash salt '*' neutron.update_floatingip network-name port-name :param floatingip_id: ID of floatingIP :param port: ID or name of port, to associate floatingip to `None` or do not specify to disassociate the floatingip (Optional) :param profile: Profile to build on (Optional) :return: Value of updated floating IP information ] variable[conn] assign[=] call[name[_auth], parameter[name[profile]]] return[call[name[conn].update_floatingip, parameter[name[floatingip_id], name[port]]]]
keyword[def] identifier[update_floatingip] ( identifier[floatingip_id] , identifier[port] = keyword[None] , identifier[profile] = keyword[None] ): literal[string] identifier[conn] = identifier[_auth] ( identifier[profile] ) keyword[return] identifier[conn] . identifier[update_floatingip] ( identifier[floatingip_id] , identifier[port] )
def update_floatingip(floatingip_id, port=None, profile=None): """ Updates a floatingIP CLI Example: .. code-block:: bash salt '*' neutron.update_floatingip network-name port-name :param floatingip_id: ID of floatingIP :param port: ID or name of port, to associate floatingip to `None` or do not specify to disassociate the floatingip (Optional) :param profile: Profile to build on (Optional) :return: Value of updated floating IP information """ conn = _auth(profile) return conn.update_floatingip(floatingip_id, port)
async def trace_back_to_tree(chain): """Trace the chain back to the tree. task.metadata.source: "https://hg.mozilla.org/projects/date//file/a80373508881bfbff67a2a49297c328ff8052572/taskcluster/ci/build" task.payload.env.GECKO_HEAD_REPOSITORY "https://hg.mozilla.org/projects/date/" Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on error. """ errors = [] repos = {} restricted_privs = None rules = {} for my_key, config_key in { 'scopes': 'cot_restricted_scopes', 'trees': 'cot_restricted_trees' }.items(): rules[my_key] = chain.context.config[config_key] # a repo_path of None means we have no restricted privs. # a string repo_path may mean we have higher privs for obj in [chain] + chain.links: source_url = get_source_url(obj) repo_path = match_url_regex( chain.context.config['valid_vcs_rules'], source_url, match_url_path_callback ) repos[obj] = repo_path # check for restricted scopes. my_repo = repos[chain] for scope in chain.task['scopes']: if scope in rules['scopes']: log.info("Found privileged scope {}".format(scope)) restricted_privs = True level = rules['scopes'][scope] if my_repo not in rules['trees'][level]: errors.append("{} {}: repo {} not allowlisted for scope {}!".format( chain.name, chain.task_id, my_repo, scope )) # verify all tasks w/ same decision_task_id have the same source repo. if len(set(repos.values())) > 1: for obj, repo in repos.items(): if obj.decision_task_id == chain.decision_task_id: if repo != my_repo: errors.append("{} {} repo {} doesn't match my repo {}!".format( obj.name, obj.task_id, repo, my_repo )) # if we have restricted privs, the non-sibling tasks must at least be in # a known repo. # (Not currently requiring that all tasks have the same privilege level, # in case a docker-image build is run on mozilla-central and that image # is used for a release-priv task, for example.) elif restricted_privs and repo is None: errors.append("{} {} has no privileged repo on an restricted privilege scope!".format( obj.name, obj.task_id )) # Disallow restricted privs on is_try_or_pull_request. This may be a redundant check. if restricted_privs and await chain.is_try_or_pull_request(): errors.append( "{} {} has restricted privilege scope, and is_try_or_pull_request()!".format( chain.name, chain.task_id ) ) raise_on_errors(errors)
<ast.AsyncFunctionDef object at 0x7da1b0e9c4f0>
keyword[async] keyword[def] identifier[trace_back_to_tree] ( identifier[chain] ): literal[string] identifier[errors] =[] identifier[repos] ={} identifier[restricted_privs] = keyword[None] identifier[rules] ={} keyword[for] identifier[my_key] , identifier[config_key] keyword[in] { literal[string] : literal[string] , literal[string] : literal[string] }. identifier[items] (): identifier[rules] [ identifier[my_key] ]= identifier[chain] . identifier[context] . identifier[config] [ identifier[config_key] ] keyword[for] identifier[obj] keyword[in] [ identifier[chain] ]+ identifier[chain] . identifier[links] : identifier[source_url] = identifier[get_source_url] ( identifier[obj] ) identifier[repo_path] = identifier[match_url_regex] ( identifier[chain] . identifier[context] . identifier[config] [ literal[string] ], identifier[source_url] , identifier[match_url_path_callback] ) identifier[repos] [ identifier[obj] ]= identifier[repo_path] identifier[my_repo] = identifier[repos] [ identifier[chain] ] keyword[for] identifier[scope] keyword[in] identifier[chain] . identifier[task] [ literal[string] ]: keyword[if] identifier[scope] keyword[in] identifier[rules] [ literal[string] ]: identifier[log] . identifier[info] ( literal[string] . identifier[format] ( identifier[scope] )) identifier[restricted_privs] = keyword[True] identifier[level] = identifier[rules] [ literal[string] ][ identifier[scope] ] keyword[if] identifier[my_repo] keyword[not] keyword[in] identifier[rules] [ literal[string] ][ identifier[level] ]: identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[chain] . identifier[name] , identifier[chain] . identifier[task_id] , identifier[my_repo] , identifier[scope] )) keyword[if] identifier[len] ( identifier[set] ( identifier[repos] . identifier[values] ()))> literal[int] : keyword[for] identifier[obj] , identifier[repo] keyword[in] identifier[repos] . identifier[items] (): keyword[if] identifier[obj] . identifier[decision_task_id] == identifier[chain] . identifier[decision_task_id] : keyword[if] identifier[repo] != identifier[my_repo] : identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[obj] . identifier[name] , identifier[obj] . identifier[task_id] , identifier[repo] , identifier[my_repo] )) keyword[elif] identifier[restricted_privs] keyword[and] identifier[repo] keyword[is] keyword[None] : identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[obj] . identifier[name] , identifier[obj] . identifier[task_id] )) keyword[if] identifier[restricted_privs] keyword[and] keyword[await] identifier[chain] . identifier[is_try_or_pull_request] (): identifier[errors] . identifier[append] ( literal[string] . identifier[format] ( identifier[chain] . identifier[name] , identifier[chain] . identifier[task_id] ) ) identifier[raise_on_errors] ( identifier[errors] )
async def trace_back_to_tree(chain): """Trace the chain back to the tree. task.metadata.source: "https://hg.mozilla.org/projects/date//file/a80373508881bfbff67a2a49297c328ff8052572/taskcluster/ci/build" task.payload.env.GECKO_HEAD_REPOSITORY "https://hg.mozilla.org/projects/date/" Args: chain (ChainOfTrust): the chain we're operating on Raises: CoTError: on error. """ errors = [] repos = {} restricted_privs = None rules = {} for (my_key, config_key) in {'scopes': 'cot_restricted_scopes', 'trees': 'cot_restricted_trees'}.items(): rules[my_key] = chain.context.config[config_key] # depends on [control=['for'], data=[]] # a repo_path of None means we have no restricted privs. # a string repo_path may mean we have higher privs for obj in [chain] + chain.links: source_url = get_source_url(obj) repo_path = match_url_regex(chain.context.config['valid_vcs_rules'], source_url, match_url_path_callback) repos[obj] = repo_path # depends on [control=['for'], data=['obj']] # check for restricted scopes. my_repo = repos[chain] for scope in chain.task['scopes']: if scope in rules['scopes']: log.info('Found privileged scope {}'.format(scope)) restricted_privs = True level = rules['scopes'][scope] if my_repo not in rules['trees'][level]: errors.append('{} {}: repo {} not allowlisted for scope {}!'.format(chain.name, chain.task_id, my_repo, scope)) # depends on [control=['if'], data=['my_repo']] # depends on [control=['if'], data=['scope']] # depends on [control=['for'], data=['scope']] # verify all tasks w/ same decision_task_id have the same source repo. if len(set(repos.values())) > 1: for (obj, repo) in repos.items(): if obj.decision_task_id == chain.decision_task_id: if repo != my_repo: errors.append("{} {} repo {} doesn't match my repo {}!".format(obj.name, obj.task_id, repo, my_repo)) # depends on [control=['if'], data=['repo', 'my_repo']] # depends on [control=['if'], data=[]] # if we have restricted privs, the non-sibling tasks must at least be in # a known repo. # (Not currently requiring that all tasks have the same privilege level, # in case a docker-image build is run on mozilla-central and that image # is used for a release-priv task, for example.) elif restricted_privs and repo is None: errors.append('{} {} has no privileged repo on an restricted privilege scope!'.format(obj.name, obj.task_id)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['if'], data=[]] # Disallow restricted privs on is_try_or_pull_request. This may be a redundant check. if restricted_privs and await chain.is_try_or_pull_request(): errors.append('{} {} has restricted privilege scope, and is_try_or_pull_request()!'.format(chain.name, chain.task_id)) # depends on [control=['if'], data=[]] raise_on_errors(errors)
def ghmean(nums): """Return geometric-harmonic mean. Iterates between geometric & harmonic means until they converge to a single value (rounded to 12 digits). Cf. https://en.wikipedia.org/wiki/Geometric-harmonic_mean Parameters ---------- nums : list A series of numbers Returns ------- float The geometric-harmonic mean of nums Examples -------- >>> ghmean([1, 2, 3, 4]) 2.058868154613003 >>> ghmean([1, 2]) 1.3728805006183502 >>> ghmean([0, 5, 1000]) 0.0 >>> ghmean([0, 0]) 0.0 >>> ghmean([0, 0, 5]) nan """ m_g = gmean(nums) m_h = hmean(nums) if math.isnan(m_g) or math.isnan(m_h): return float('nan') while round(m_h, 12) != round(m_g, 12): m_g, m_h = (m_g * m_h) ** (1 / 2), (2 * m_g * m_h) / (m_g + m_h) return m_g
def function[ghmean, parameter[nums]]: constant[Return geometric-harmonic mean. Iterates between geometric & harmonic means until they converge to a single value (rounded to 12 digits). Cf. https://en.wikipedia.org/wiki/Geometric-harmonic_mean Parameters ---------- nums : list A series of numbers Returns ------- float The geometric-harmonic mean of nums Examples -------- >>> ghmean([1, 2, 3, 4]) 2.058868154613003 >>> ghmean([1, 2]) 1.3728805006183502 >>> ghmean([0, 5, 1000]) 0.0 >>> ghmean([0, 0]) 0.0 >>> ghmean([0, 0, 5]) nan ] variable[m_g] assign[=] call[name[gmean], parameter[name[nums]]] variable[m_h] assign[=] call[name[hmean], parameter[name[nums]]] if <ast.BoolOp object at 0x7da1b0108070> begin[:] return[call[name[float], parameter[constant[nan]]]] while compare[call[name[round], parameter[name[m_h], constant[12]]] not_equal[!=] call[name[round], parameter[name[m_g], constant[12]]]] begin[:] <ast.Tuple object at 0x7da1b010a680> assign[=] tuple[[<ast.BinOp object at 0x7da1b010a830>, <ast.BinOp object at 0x7da1b0109ed0>]] return[name[m_g]]
keyword[def] identifier[ghmean] ( identifier[nums] ): literal[string] identifier[m_g] = identifier[gmean] ( identifier[nums] ) identifier[m_h] = identifier[hmean] ( identifier[nums] ) keyword[if] identifier[math] . identifier[isnan] ( identifier[m_g] ) keyword[or] identifier[math] . identifier[isnan] ( identifier[m_h] ): keyword[return] identifier[float] ( literal[string] ) keyword[while] identifier[round] ( identifier[m_h] , literal[int] )!= identifier[round] ( identifier[m_g] , literal[int] ): identifier[m_g] , identifier[m_h] =( identifier[m_g] * identifier[m_h] )**( literal[int] / literal[int] ),( literal[int] * identifier[m_g] * identifier[m_h] )/( identifier[m_g] + identifier[m_h] ) keyword[return] identifier[m_g]
def ghmean(nums): """Return geometric-harmonic mean. Iterates between geometric & harmonic means until they converge to a single value (rounded to 12 digits). Cf. https://en.wikipedia.org/wiki/Geometric-harmonic_mean Parameters ---------- nums : list A series of numbers Returns ------- float The geometric-harmonic mean of nums Examples -------- >>> ghmean([1, 2, 3, 4]) 2.058868154613003 >>> ghmean([1, 2]) 1.3728805006183502 >>> ghmean([0, 5, 1000]) 0.0 >>> ghmean([0, 0]) 0.0 >>> ghmean([0, 0, 5]) nan """ m_g = gmean(nums) m_h = hmean(nums) if math.isnan(m_g) or math.isnan(m_h): return float('nan') # depends on [control=['if'], data=[]] while round(m_h, 12) != round(m_g, 12): (m_g, m_h) = ((m_g * m_h) ** (1 / 2), 2 * m_g * m_h / (m_g + m_h)) # depends on [control=['while'], data=[]] return m_g
def handle_request(self, environ, start_response): """Handle an HTTP request from the client. This is the entry point of the Engine.IO application, using the same interface as a WSGI application. For the typical usage, this function is invoked by the :class:`Middleware` instance, but it can be invoked directly when the middleware is not used. :param environ: The WSGI environment. :param start_response: The WSGI ``start_response`` function. This function returns the HTTP response body to deliver to the client as a byte sequence. """ method = environ['REQUEST_METHOD'] query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) if 'j' in query: self.logger.warning('JSONP requests are not supported') r = self._bad_request() else: sid = query['sid'][0] if 'sid' in query else None b64 = False if 'b64' in query: if query['b64'][0] == "1" or query['b64'][0].lower() == "true": b64 = True if method == 'GET': if sid is None: transport = query.get('transport', ['polling'])[0] if transport != 'polling' and transport != 'websocket': self.logger.warning('Invalid transport %s', transport) r = self._bad_request() else: r = self._handle_connect(environ, start_response, transport, b64) else: if sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: packets = socket.handle_get_request( environ, start_response) if isinstance(packets, list): r = self._ok(packets, b64=b64) else: r = packets except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) r = self._bad_request() if sid in self.sockets and self.sockets[sid].closed: del self.sockets[sid] elif method == 'POST': if sid is None or sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() else: socket = self._get_socket(sid) try: socket.handle_post_request(environ) r = self._ok() except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) r = self._bad_request() except: # pragma: no cover # for any other unexpected errors, we log the error # and keep going self.logger.exception('post request handler error') r = self._ok() elif method == 'OPTIONS': r = self._ok() else: self.logger.warning('Method %s not supported', method) r = self._method_not_found() if not isinstance(r, dict): return r or [] if self.http_compression and \ len(r['response']) >= self.compression_threshold: encodings = [e.split(';')[0].strip() for e in environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] for encoding in encodings: if encoding in self.compression_methods: r['response'] = \ getattr(self, '_' + encoding)(r['response']) r['headers'] += [('Content-Encoding', encoding)] break cors_headers = self._cors_headers(environ) start_response(r['status'], r['headers'] + cors_headers) return [r['response']]
def function[handle_request, parameter[self, environ, start_response]]: constant[Handle an HTTP request from the client. This is the entry point of the Engine.IO application, using the same interface as a WSGI application. For the typical usage, this function is invoked by the :class:`Middleware` instance, but it can be invoked directly when the middleware is not used. :param environ: The WSGI environment. :param start_response: The WSGI ``start_response`` function. This function returns the HTTP response body to deliver to the client as a byte sequence. ] variable[method] assign[=] call[name[environ]][constant[REQUEST_METHOD]] variable[query] assign[=] call[name[urllib].parse.parse_qs, parameter[call[name[environ].get, parameter[constant[QUERY_STRING], constant[]]]]] if compare[constant[j] in name[query]] begin[:] call[name[self].logger.warning, parameter[constant[JSONP requests are not supported]]] variable[r] assign[=] call[name[self]._bad_request, parameter[]] if <ast.UnaryOp object at 0x7da1b088be50> begin[:] return[<ast.BoolOp object at 0x7da1b0889510>] if <ast.BoolOp object at 0x7da1b088bf70> begin[:] variable[encodings] assign[=] <ast.ListComp object at 0x7da1b088bcd0> for taget[name[encoding]] in starred[name[encodings]] begin[:] if compare[name[encoding] in name[self].compression_methods] begin[:] call[name[r]][constant[response]] assign[=] call[call[name[getattr], parameter[name[self], binary_operation[constant[_] + name[encoding]]]], parameter[call[name[r]][constant[response]]]] <ast.AugAssign object at 0x7da1b0889690> break variable[cors_headers] assign[=] call[name[self]._cors_headers, parameter[name[environ]]] call[name[start_response], parameter[call[name[r]][constant[status]], binary_operation[call[name[r]][constant[headers]] + name[cors_headers]]]] return[list[[<ast.Subscript object at 0x7da204344d90>]]]
keyword[def] identifier[handle_request] ( identifier[self] , identifier[environ] , identifier[start_response] ): literal[string] identifier[method] = identifier[environ] [ literal[string] ] identifier[query] = identifier[urllib] . identifier[parse] . identifier[parse_qs] ( identifier[environ] . identifier[get] ( literal[string] , literal[string] )) keyword[if] literal[string] keyword[in] identifier[query] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[else] : identifier[sid] = identifier[query] [ literal[string] ][ literal[int] ] keyword[if] literal[string] keyword[in] identifier[query] keyword[else] keyword[None] identifier[b64] = keyword[False] keyword[if] literal[string] keyword[in] identifier[query] : keyword[if] identifier[query] [ literal[string] ][ literal[int] ]== literal[string] keyword[or] identifier[query] [ literal[string] ][ literal[int] ]. identifier[lower] ()== literal[string] : identifier[b64] = keyword[True] keyword[if] identifier[method] == literal[string] : keyword[if] identifier[sid] keyword[is] keyword[None] : identifier[transport] = identifier[query] . identifier[get] ( literal[string] ,[ literal[string] ])[ literal[int] ] keyword[if] identifier[transport] != literal[string] keyword[and] identifier[transport] != literal[string] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] , identifier[transport] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[else] : identifier[r] = identifier[self] . identifier[_handle_connect] ( identifier[environ] , identifier[start_response] , identifier[transport] , identifier[b64] ) keyword[else] : keyword[if] identifier[sid] keyword[not] keyword[in] identifier[self] . identifier[sockets] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] , identifier[sid] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[else] : identifier[socket] = identifier[self] . identifier[_get_socket] ( identifier[sid] ) keyword[try] : identifier[packets] = identifier[socket] . identifier[handle_get_request] ( identifier[environ] , identifier[start_response] ) keyword[if] identifier[isinstance] ( identifier[packets] , identifier[list] ): identifier[r] = identifier[self] . identifier[_ok] ( identifier[packets] , identifier[b64] = identifier[b64] ) keyword[else] : identifier[r] = identifier[packets] keyword[except] identifier[exceptions] . identifier[EngineIOError] : keyword[if] identifier[sid] keyword[in] identifier[self] . identifier[sockets] : identifier[self] . identifier[disconnect] ( identifier[sid] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[if] identifier[sid] keyword[in] identifier[self] . identifier[sockets] keyword[and] identifier[self] . identifier[sockets] [ identifier[sid] ]. identifier[closed] : keyword[del] identifier[self] . identifier[sockets] [ identifier[sid] ] keyword[elif] identifier[method] == literal[string] : keyword[if] identifier[sid] keyword[is] keyword[None] keyword[or] identifier[sid] keyword[not] keyword[in] identifier[self] . identifier[sockets] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] , identifier[sid] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[else] : identifier[socket] = identifier[self] . identifier[_get_socket] ( identifier[sid] ) keyword[try] : identifier[socket] . identifier[handle_post_request] ( identifier[environ] ) identifier[r] = identifier[self] . identifier[_ok] () keyword[except] identifier[exceptions] . identifier[EngineIOError] : keyword[if] identifier[sid] keyword[in] identifier[self] . identifier[sockets] : identifier[self] . identifier[disconnect] ( identifier[sid] ) identifier[r] = identifier[self] . identifier[_bad_request] () keyword[except] : identifier[self] . identifier[logger] . identifier[exception] ( literal[string] ) identifier[r] = identifier[self] . identifier[_ok] () keyword[elif] identifier[method] == literal[string] : identifier[r] = identifier[self] . identifier[_ok] () keyword[else] : identifier[self] . identifier[logger] . identifier[warning] ( literal[string] , identifier[method] ) identifier[r] = identifier[self] . identifier[_method_not_found] () keyword[if] keyword[not] identifier[isinstance] ( identifier[r] , identifier[dict] ): keyword[return] identifier[r] keyword[or] [] keyword[if] identifier[self] . identifier[http_compression] keyword[and] identifier[len] ( identifier[r] [ literal[string] ])>= identifier[self] . identifier[compression_threshold] : identifier[encodings] =[ identifier[e] . identifier[split] ( literal[string] )[ literal[int] ]. identifier[strip] () keyword[for] identifier[e] keyword[in] identifier[environ] . identifier[get] ( literal[string] , literal[string] ). identifier[split] ( literal[string] )] keyword[for] identifier[encoding] keyword[in] identifier[encodings] : keyword[if] identifier[encoding] keyword[in] identifier[self] . identifier[compression_methods] : identifier[r] [ literal[string] ]= identifier[getattr] ( identifier[self] , literal[string] + identifier[encoding] )( identifier[r] [ literal[string] ]) identifier[r] [ literal[string] ]+=[( literal[string] , identifier[encoding] )] keyword[break] identifier[cors_headers] = identifier[self] . identifier[_cors_headers] ( identifier[environ] ) identifier[start_response] ( identifier[r] [ literal[string] ], identifier[r] [ literal[string] ]+ identifier[cors_headers] ) keyword[return] [ identifier[r] [ literal[string] ]]
def handle_request(self, environ, start_response): """Handle an HTTP request from the client. This is the entry point of the Engine.IO application, using the same interface as a WSGI application. For the typical usage, this function is invoked by the :class:`Middleware` instance, but it can be invoked directly when the middleware is not used. :param environ: The WSGI environment. :param start_response: The WSGI ``start_response`` function. This function returns the HTTP response body to deliver to the client as a byte sequence. """ method = environ['REQUEST_METHOD'] query = urllib.parse.parse_qs(environ.get('QUERY_STRING', '')) if 'j' in query: self.logger.warning('JSONP requests are not supported') r = self._bad_request() # depends on [control=['if'], data=[]] else: sid = query['sid'][0] if 'sid' in query else None b64 = False if 'b64' in query: if query['b64'][0] == '1' or query['b64'][0].lower() == 'true': b64 = True # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['query']] if method == 'GET': if sid is None: transport = query.get('transport', ['polling'])[0] if transport != 'polling' and transport != 'websocket': self.logger.warning('Invalid transport %s', transport) r = self._bad_request() # depends on [control=['if'], data=[]] else: r = self._handle_connect(environ, start_response, transport, b64) # depends on [control=['if'], data=[]] elif sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() # depends on [control=['if'], data=['sid']] else: socket = self._get_socket(sid) try: packets = socket.handle_get_request(environ, start_response) if isinstance(packets, list): r = self._ok(packets, b64=b64) # depends on [control=['if'], data=[]] else: r = packets # depends on [control=['try'], data=[]] except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) # depends on [control=['if'], data=['sid']] r = self._bad_request() # depends on [control=['except'], data=[]] if sid in self.sockets and self.sockets[sid].closed: del self.sockets[sid] # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] elif method == 'POST': if sid is None or sid not in self.sockets: self.logger.warning('Invalid session %s', sid) r = self._bad_request() # depends on [control=['if'], data=[]] else: socket = self._get_socket(sid) try: socket.handle_post_request(environ) r = self._ok() # depends on [control=['try'], data=[]] except exceptions.EngineIOError: if sid in self.sockets: # pragma: no cover self.disconnect(sid) # depends on [control=['if'], data=['sid']] r = self._bad_request() # depends on [control=['except'], data=[]] except: # pragma: no cover # for any other unexpected errors, we log the error # and keep going self.logger.exception('post request handler error') r = self._ok() # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] elif method == 'OPTIONS': r = self._ok() # depends on [control=['if'], data=[]] else: self.logger.warning('Method %s not supported', method) r = self._method_not_found() if not isinstance(r, dict): return r or [] # depends on [control=['if'], data=[]] if self.http_compression and len(r['response']) >= self.compression_threshold: encodings = [e.split(';')[0].strip() for e in environ.get('HTTP_ACCEPT_ENCODING', '').split(',')] for encoding in encodings: if encoding in self.compression_methods: r['response'] = getattr(self, '_' + encoding)(r['response']) r['headers'] += [('Content-Encoding', encoding)] break # depends on [control=['if'], data=['encoding']] # depends on [control=['for'], data=['encoding']] # depends on [control=['if'], data=[]] cors_headers = self._cors_headers(environ) start_response(r['status'], r['headers'] + cors_headers) return [r['response']]
def _interp_limit(invalid, fw_limit, bw_limit): """ Get indexers of values that won't be filled because they exceed the limits. Parameters ---------- invalid : boolean ndarray fw_limit : int or None forward limit to index bw_limit : int or None backward limit to index Returns ------- set of indexers Notes ----- This is equivalent to the more readable, but slower .. code-block:: python def _interp_limit(invalid, fw_limit, bw_limit): for x in np.where(invalid)[0]: if invalid[max(0, x - fw_limit):x + bw_limit + 1].all(): yield x """ # handle forward first; the backward direction is the same except # 1. operate on the reversed array # 2. subtract the returned indices from N - 1 N = len(invalid) f_idx = set() b_idx = set() def inner(invalid, limit): limit = min(limit, N) windowed = _rolling_window(invalid, limit + 1).all(1) idx = (set(np.where(windowed)[0] + limit) | set(np.where((~invalid[:limit + 1]).cumsum() == 0)[0])) return idx if fw_limit is not None: if fw_limit == 0: f_idx = set(np.where(invalid)[0]) else: f_idx = inner(invalid, fw_limit) if bw_limit is not None: if bw_limit == 0: # then we don't even need to care about backwards # just use forwards return f_idx else: b_idx = list(inner(invalid[::-1], bw_limit)) b_idx = set(N - 1 - np.asarray(b_idx)) if fw_limit == 0: return b_idx return f_idx & b_idx
def function[_interp_limit, parameter[invalid, fw_limit, bw_limit]]: constant[ Get indexers of values that won't be filled because they exceed the limits. Parameters ---------- invalid : boolean ndarray fw_limit : int or None forward limit to index bw_limit : int or None backward limit to index Returns ------- set of indexers Notes ----- This is equivalent to the more readable, but slower .. code-block:: python def _interp_limit(invalid, fw_limit, bw_limit): for x in np.where(invalid)[0]: if invalid[max(0, x - fw_limit):x + bw_limit + 1].all(): yield x ] variable[N] assign[=] call[name[len], parameter[name[invalid]]] variable[f_idx] assign[=] call[name[set], parameter[]] variable[b_idx] assign[=] call[name[set], parameter[]] def function[inner, parameter[invalid, limit]]: variable[limit] assign[=] call[name[min], parameter[name[limit], name[N]]] variable[windowed] assign[=] call[call[name[_rolling_window], parameter[name[invalid], binary_operation[name[limit] + constant[1]]]].all, parameter[constant[1]]] variable[idx] assign[=] binary_operation[call[name[set], parameter[binary_operation[call[call[name[np].where, parameter[name[windowed]]]][constant[0]] + name[limit]]]] <ast.BitOr object at 0x7da2590d6aa0> call[name[set], parameter[call[call[name[np].where, parameter[compare[call[<ast.UnaryOp object at 0x7da18dc9b5e0>.cumsum, parameter[]] equal[==] constant[0]]]]][constant[0]]]]] return[name[idx]] if compare[name[fw_limit] is_not constant[None]] begin[:] if compare[name[fw_limit] equal[==] constant[0]] begin[:] variable[f_idx] assign[=] call[name[set], parameter[call[call[name[np].where, parameter[name[invalid]]]][constant[0]]]] if compare[name[bw_limit] is_not constant[None]] begin[:] if compare[name[bw_limit] equal[==] constant[0]] begin[:] return[name[f_idx]] return[binary_operation[name[f_idx] <ast.BitAnd object at 0x7da2590d6b60> name[b_idx]]]
keyword[def] identifier[_interp_limit] ( identifier[invalid] , identifier[fw_limit] , identifier[bw_limit] ): literal[string] identifier[N] = identifier[len] ( identifier[invalid] ) identifier[f_idx] = identifier[set] () identifier[b_idx] = identifier[set] () keyword[def] identifier[inner] ( identifier[invalid] , identifier[limit] ): identifier[limit] = identifier[min] ( identifier[limit] , identifier[N] ) identifier[windowed] = identifier[_rolling_window] ( identifier[invalid] , identifier[limit] + literal[int] ). identifier[all] ( literal[int] ) identifier[idx] =( identifier[set] ( identifier[np] . identifier[where] ( identifier[windowed] )[ literal[int] ]+ identifier[limit] )| identifier[set] ( identifier[np] . identifier[where] ((~ identifier[invalid] [: identifier[limit] + literal[int] ]). identifier[cumsum] ()== literal[int] )[ literal[int] ])) keyword[return] identifier[idx] keyword[if] identifier[fw_limit] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[fw_limit] == literal[int] : identifier[f_idx] = identifier[set] ( identifier[np] . identifier[where] ( identifier[invalid] )[ literal[int] ]) keyword[else] : identifier[f_idx] = identifier[inner] ( identifier[invalid] , identifier[fw_limit] ) keyword[if] identifier[bw_limit] keyword[is] keyword[not] keyword[None] : keyword[if] identifier[bw_limit] == literal[int] : keyword[return] identifier[f_idx] keyword[else] : identifier[b_idx] = identifier[list] ( identifier[inner] ( identifier[invalid] [::- literal[int] ], identifier[bw_limit] )) identifier[b_idx] = identifier[set] ( identifier[N] - literal[int] - identifier[np] . identifier[asarray] ( identifier[b_idx] )) keyword[if] identifier[fw_limit] == literal[int] : keyword[return] identifier[b_idx] keyword[return] identifier[f_idx] & identifier[b_idx]
def _interp_limit(invalid, fw_limit, bw_limit): """ Get indexers of values that won't be filled because they exceed the limits. Parameters ---------- invalid : boolean ndarray fw_limit : int or None forward limit to index bw_limit : int or None backward limit to index Returns ------- set of indexers Notes ----- This is equivalent to the more readable, but slower .. code-block:: python def _interp_limit(invalid, fw_limit, bw_limit): for x in np.where(invalid)[0]: if invalid[max(0, x - fw_limit):x + bw_limit + 1].all(): yield x """ # handle forward first; the backward direction is the same except # 1. operate on the reversed array # 2. subtract the returned indices from N - 1 N = len(invalid) f_idx = set() b_idx = set() def inner(invalid, limit): limit = min(limit, N) windowed = _rolling_window(invalid, limit + 1).all(1) idx = set(np.where(windowed)[0] + limit) | set(np.where((~invalid[:limit + 1]).cumsum() == 0)[0]) return idx if fw_limit is not None: if fw_limit == 0: f_idx = set(np.where(invalid)[0]) # depends on [control=['if'], data=[]] else: f_idx = inner(invalid, fw_limit) # depends on [control=['if'], data=['fw_limit']] if bw_limit is not None: if bw_limit == 0: # then we don't even need to care about backwards # just use forwards return f_idx # depends on [control=['if'], data=[]] else: b_idx = list(inner(invalid[::-1], bw_limit)) b_idx = set(N - 1 - np.asarray(b_idx)) if fw_limit == 0: return b_idx # depends on [control=['if'], data=[]] # depends on [control=['if'], data=['bw_limit']] return f_idx & b_idx
def addError(self, test, err, capt=None): """Add error output to Xunit report. """ exc_type, exc_val, tb = err tb = ''.join(traceback.format_exception( exc_type, exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val), tb )) name = id_split(test.id()) group = self.report_data[name[0]] if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 group.stats['skipped'] += 1 else: type = 'error' self.stats['errors'] += 1 group.stats['errors'] += 1 group.tests.append({ 'name': name[-1], 'failed': True, 'type': type, 'errtype': nice_classname(err[0]), 'message': exc_message(err), 'tb': tb, })
def function[addError, parameter[self, test, err, capt]]: constant[Add error output to Xunit report. ] <ast.Tuple object at 0x7da18c4cf550> assign[=] name[err] variable[tb] assign[=] call[constant[].join, parameter[call[name[traceback].format_exception, parameter[name[exc_type], <ast.IfExp object at 0x7da18c4ccac0>, name[tb]]]]] variable[name] assign[=] call[name[id_split], parameter[call[name[test].id, parameter[]]]] variable[group] assign[=] call[name[self].report_data][call[name[name]][constant[0]]] if call[name[issubclass], parameter[call[name[err]][constant[0]], name[SkipTest]]] begin[:] variable[type] assign[=] constant[skipped] <ast.AugAssign object at 0x7da18c4ce740> <ast.AugAssign object at 0x7da18c4ce950> call[name[group].tests.append, parameter[dictionary[[<ast.Constant object at 0x7da18dc99e70>, <ast.Constant object at 0x7da18dc99cc0>, <ast.Constant object at 0x7da18dc98430>, <ast.Constant object at 0x7da18dc9bac0>, <ast.Constant object at 0x7da18dc993c0>, <ast.Constant object at 0x7da18dc9ac50>], [<ast.Subscript object at 0x7da18dc98700>, <ast.Constant object at 0x7da18dc98fd0>, <ast.Name object at 0x7da18dc9ab00>, <ast.Call object at 0x7da18dc9aaa0>, <ast.Call object at 0x7da18dc9b0a0>, <ast.Name object at 0x7da18dc9bc40>]]]]
keyword[def] identifier[addError] ( identifier[self] , identifier[test] , identifier[err] , identifier[capt] = keyword[None] ): literal[string] identifier[exc_type] , identifier[exc_val] , identifier[tb] = identifier[err] identifier[tb] = literal[string] . identifier[join] ( identifier[traceback] . identifier[format_exception] ( identifier[exc_type] , identifier[exc_val] keyword[if] identifier[isinstance] ( identifier[exc_val] , identifier[exc_type] ) keyword[else] identifier[exc_type] ( identifier[exc_val] ), identifier[tb] )) identifier[name] = identifier[id_split] ( identifier[test] . identifier[id] ()) identifier[group] = identifier[self] . identifier[report_data] [ identifier[name] [ literal[int] ]] keyword[if] identifier[issubclass] ( identifier[err] [ literal[int] ], identifier[SkipTest] ): identifier[type] = literal[string] identifier[self] . identifier[stats] [ literal[string] ]+= literal[int] identifier[group] . identifier[stats] [ literal[string] ]+= literal[int] keyword[else] : identifier[type] = literal[string] identifier[self] . identifier[stats] [ literal[string] ]+= literal[int] identifier[group] . identifier[stats] [ literal[string] ]+= literal[int] identifier[group] . identifier[tests] . identifier[append] ({ literal[string] : identifier[name] [- literal[int] ], literal[string] : keyword[True] , literal[string] : identifier[type] , literal[string] : identifier[nice_classname] ( identifier[err] [ literal[int] ]), literal[string] : identifier[exc_message] ( identifier[err] ), literal[string] : identifier[tb] , })
def addError(self, test, err, capt=None): """Add error output to Xunit report. """ (exc_type, exc_val, tb) = err tb = ''.join(traceback.format_exception(exc_type, exc_val if isinstance(exc_val, exc_type) else exc_type(exc_val), tb)) name = id_split(test.id()) group = self.report_data[name[0]] if issubclass(err[0], SkipTest): type = 'skipped' self.stats['skipped'] += 1 group.stats['skipped'] += 1 # depends on [control=['if'], data=[]] else: type = 'error' self.stats['errors'] += 1 group.stats['errors'] += 1 group.tests.append({'name': name[-1], 'failed': True, 'type': type, 'errtype': nice_classname(err[0]), 'message': exc_message(err), 'tb': tb})
def read_dates_by_service_ids( path: str ) -> Dict[FrozenSet[str], FrozenSet[datetime.date]]: """Find dates with identical service""" feed = load_raw_feed(path) return _dates_by_service_ids(feed)
def function[read_dates_by_service_ids, parameter[path]]: constant[Find dates with identical service] variable[feed] assign[=] call[name[load_raw_feed], parameter[name[path]]] return[call[name[_dates_by_service_ids], parameter[name[feed]]]]
keyword[def] identifier[read_dates_by_service_ids] ( identifier[path] : identifier[str] )-> identifier[Dict] [ identifier[FrozenSet] [ identifier[str] ], identifier[FrozenSet] [ identifier[datetime] . identifier[date] ]]: literal[string] identifier[feed] = identifier[load_raw_feed] ( identifier[path] ) keyword[return] identifier[_dates_by_service_ids] ( identifier[feed] )
def read_dates_by_service_ids(path: str) -> Dict[FrozenSet[str], FrozenSet[datetime.date]]: """Find dates with identical service""" feed = load_raw_feed(path) return _dates_by_service_ids(feed)
def get(name): """ Returns a matcher instance by class or alias name. Arguments: name (str): matcher class name or alias. Returns: matcher: found matcher instance, otherwise ``None``. """ for matcher in matchers: if matcher.__name__ == name or getattr(matcher, 'name', None) == name: return matcher
def function[get, parameter[name]]: constant[ Returns a matcher instance by class or alias name. Arguments: name (str): matcher class name or alias. Returns: matcher: found matcher instance, otherwise ``None``. ] for taget[name[matcher]] in starred[name[matchers]] begin[:] if <ast.BoolOp object at 0x7da1b02410f0> begin[:] return[name[matcher]]
keyword[def] identifier[get] ( identifier[name] ): literal[string] keyword[for] identifier[matcher] keyword[in] identifier[matchers] : keyword[if] identifier[matcher] . identifier[__name__] == identifier[name] keyword[or] identifier[getattr] ( identifier[matcher] , literal[string] , keyword[None] )== identifier[name] : keyword[return] identifier[matcher]
def get(name): """ Returns a matcher instance by class or alias name. Arguments: name (str): matcher class name or alias. Returns: matcher: found matcher instance, otherwise ``None``. """ for matcher in matchers: if matcher.__name__ == name or getattr(matcher, 'name', None) == name: return matcher # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['matcher']]
def prepare_sql(sql, add_semicolon=True, invalid_starts=('--', '/*', '*/', ';')): """Wrapper method for PrepareSQL class.""" return PrepareSQL(sql, add_semicolon, invalid_starts).prepared
def function[prepare_sql, parameter[sql, add_semicolon, invalid_starts]]: constant[Wrapper method for PrepareSQL class.] return[call[name[PrepareSQL], parameter[name[sql], name[add_semicolon], name[invalid_starts]]].prepared]
keyword[def] identifier[prepare_sql] ( identifier[sql] , identifier[add_semicolon] = keyword[True] , identifier[invalid_starts] =( literal[string] , literal[string] , literal[string] , literal[string] )): literal[string] keyword[return] identifier[PrepareSQL] ( identifier[sql] , identifier[add_semicolon] , identifier[invalid_starts] ). identifier[prepared]
def prepare_sql(sql, add_semicolon=True, invalid_starts=('--', '/*', '*/', ';')): """Wrapper method for PrepareSQL class.""" return PrepareSQL(sql, add_semicolon, invalid_starts).prepared
def _Close(self): """Closes the file system object. Raises: IOError: if the close failed. """ self._zip_file.close() self._zip_file = None self._file_object.close() self._file_object = None
def function[_Close, parameter[self]]: constant[Closes the file system object. Raises: IOError: if the close failed. ] call[name[self]._zip_file.close, parameter[]] name[self]._zip_file assign[=] constant[None] call[name[self]._file_object.close, parameter[]] name[self]._file_object assign[=] constant[None]
keyword[def] identifier[_Close] ( identifier[self] ): literal[string] identifier[self] . identifier[_zip_file] . identifier[close] () identifier[self] . identifier[_zip_file] = keyword[None] identifier[self] . identifier[_file_object] . identifier[close] () identifier[self] . identifier[_file_object] = keyword[None]
def _Close(self): """Closes the file system object. Raises: IOError: if the close failed. """ self._zip_file.close() self._zip_file = None self._file_object.close() self._file_object = None
def to_funset(self, index, name="clamped"): """ Converts the clamping to a set of `gringo.Fun`_ object instances Parameters ---------- index : int An external identifier to associate several clampings together in ASP name : str A function name for the clamping Returns ------- set The set of `gringo.Fun`_ object instances .. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun """ fs = set() for var, sign in self: fs.add(gringo.Fun(name, [index, var, sign])) return fs
def function[to_funset, parameter[self, index, name]]: constant[ Converts the clamping to a set of `gringo.Fun`_ object instances Parameters ---------- index : int An external identifier to associate several clampings together in ASP name : str A function name for the clamping Returns ------- set The set of `gringo.Fun`_ object instances .. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun ] variable[fs] assign[=] call[name[set], parameter[]] for taget[tuple[[<ast.Name object at 0x7da204963340>, <ast.Name object at 0x7da204961d20>]]] in starred[name[self]] begin[:] call[name[fs].add, parameter[call[name[gringo].Fun, parameter[name[name], list[[<ast.Name object at 0x7da2049609a0>, <ast.Name object at 0x7da204961db0>, <ast.Name object at 0x7da2049620e0>]]]]]] return[name[fs]]
keyword[def] identifier[to_funset] ( identifier[self] , identifier[index] , identifier[name] = literal[string] ): literal[string] identifier[fs] = identifier[set] () keyword[for] identifier[var] , identifier[sign] keyword[in] identifier[self] : identifier[fs] . identifier[add] ( identifier[gringo] . identifier[Fun] ( identifier[name] ,[ identifier[index] , identifier[var] , identifier[sign] ])) keyword[return] identifier[fs]
def to_funset(self, index, name='clamped'): """ Converts the clamping to a set of `gringo.Fun`_ object instances Parameters ---------- index : int An external identifier to associate several clampings together in ASP name : str A function name for the clamping Returns ------- set The set of `gringo.Fun`_ object instances .. _gringo.Fun: http://potassco.sourceforge.net/gringo.html#Fun """ fs = set() for (var, sign) in self: fs.add(gringo.Fun(name, [index, var, sign])) # depends on [control=['for'], data=[]] return fs
def validate_variable_name(self, name): """ Validate variable name. Arguments: name (string): Property name. Returns: bool: ``True`` if variable name is valid. """ if not name: raise SerializerError("Variable name is empty".format(name)) if name[0] not in PROPERTY_ALLOWED_START: msg = "Variable name '{}' must starts with a letter" raise SerializerError(msg.format(name)) for item in name: if item not in PROPERTY_ALLOWED_CHARS: msg = ("Invalid variable name '{}': it must only contains " "letters, numbers and '_' character") raise SerializerError(msg.format(name)) return True
def function[validate_variable_name, parameter[self, name]]: constant[ Validate variable name. Arguments: name (string): Property name. Returns: bool: ``True`` if variable name is valid. ] if <ast.UnaryOp object at 0x7da204566350> begin[:] <ast.Raise object at 0x7da204564190> if compare[call[name[name]][constant[0]] <ast.NotIn object at 0x7da2590d7190> name[PROPERTY_ALLOWED_START]] begin[:] variable[msg] assign[=] constant[Variable name '{}' must starts with a letter] <ast.Raise object at 0x7da204565120> for taget[name[item]] in starred[name[name]] begin[:] if compare[name[item] <ast.NotIn object at 0x7da2590d7190> name[PROPERTY_ALLOWED_CHARS]] begin[:] variable[msg] assign[=] constant[Invalid variable name '{}': it must only contains letters, numbers and '_' character] <ast.Raise object at 0x7da20c6e74f0> return[constant[True]]
keyword[def] identifier[validate_variable_name] ( identifier[self] , identifier[name] ): literal[string] keyword[if] keyword[not] identifier[name] : keyword[raise] identifier[SerializerError] ( literal[string] . identifier[format] ( identifier[name] )) keyword[if] identifier[name] [ literal[int] ] keyword[not] keyword[in] identifier[PROPERTY_ALLOWED_START] : identifier[msg] = literal[string] keyword[raise] identifier[SerializerError] ( identifier[msg] . identifier[format] ( identifier[name] )) keyword[for] identifier[item] keyword[in] identifier[name] : keyword[if] identifier[item] keyword[not] keyword[in] identifier[PROPERTY_ALLOWED_CHARS] : identifier[msg] =( literal[string] literal[string] ) keyword[raise] identifier[SerializerError] ( identifier[msg] . identifier[format] ( identifier[name] )) keyword[return] keyword[True]
def validate_variable_name(self, name): """ Validate variable name. Arguments: name (string): Property name. Returns: bool: ``True`` if variable name is valid. """ if not name: raise SerializerError('Variable name is empty'.format(name)) # depends on [control=['if'], data=[]] if name[0] not in PROPERTY_ALLOWED_START: msg = "Variable name '{}' must starts with a letter" raise SerializerError(msg.format(name)) # depends on [control=['if'], data=[]] for item in name: if item not in PROPERTY_ALLOWED_CHARS: msg = "Invalid variable name '{}': it must only contains letters, numbers and '_' character" raise SerializerError(msg.format(name)) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['item']] return True
def compute_bg(self, which_data="phase", fit_offset="mean", fit_profile="tilt", border_m=0, border_perc=0, border_px=0, from_mask=None, ret_mask=False): """Compute background correction Parameters ---------- which_data: str or list of str From which type of data to remove the background information. The list contains either "amplitude", "phase", or both. fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_m: float Assume that a frame of `border_m` meters around the image is background. The value is converted to pixels and rounded. border_perc: float Assume that a frame of `border_perc` percent around the image is background. The value is converted to pixels and rounded. If the aspect ratio of the image is not one, then the average of the data's shape is used to compute the percentage in pixels. border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- The `border_*` values are translated to pixel values and the largest pixel border is used to generate a mask image for background computation. If any of the `border_*` arguments are non-zero and `from_mask` is given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. See Also -------- qpimage.bg_estimate.estimate """ which_data = QPImage._conv_which_data(which_data) # check validity if not ("amplitude" in which_data or "phase" in which_data): msg = "`which_data` must contain 'phase' or 'amplitude'!" raise ValueError(msg) # get border in px border_list = [] if border_m: if border_m < 0: raise ValueError("`border_m` must be greater than zero!") border_list.append(border_m / self.meta["pixel size"]) if border_perc: if border_perc < 0 or border_perc > 50: raise ValueError("`border_perc` must be in interval [0, 50]!") size = np.average(self.shape) border_list.append(size * border_perc / 100) if border_px: border_list.append(border_px) # get maximum border size if border_list: border_px = np.int(np.round(np.max(border_list))) elif from_mask is None: raise ValueError("Neither `from_mask` nor `border_*` given!") elif np.all(from_mask == 0): raise ValueError("`from_mask` must not be all-zero!") # Get affected image data imdat_list = [] if "amplitude" in which_data: imdat_list.append(self._amp) if "phase" in which_data: imdat_list.append(self._pha) # Perform correction for imdat in imdat_list: mask = imdat.estimate_bg(fit_offset=fit_offset, fit_profile=fit_profile, border_px=border_px, from_mask=from_mask, ret_mask=ret_mask) return mask
def function[compute_bg, parameter[self, which_data, fit_offset, fit_profile, border_m, border_perc, border_px, from_mask, ret_mask]]: constant[Compute background correction Parameters ---------- which_data: str or list of str From which type of data to remove the background information. The list contains either "amplitude", "phase", or both. fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_m: float Assume that a frame of `border_m` meters around the image is background. The value is converted to pixels and rounded. border_perc: float Assume that a frame of `border_perc` percent around the image is background. The value is converted to pixels and rounded. If the aspect ratio of the image is not one, then the average of the data's shape is used to compute the percentage in pixels. border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- The `border_*` values are translated to pixel values and the largest pixel border is used to generate a mask image for background computation. If any of the `border_*` arguments are non-zero and `from_mask` is given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. See Also -------- qpimage.bg_estimate.estimate ] variable[which_data] assign[=] call[name[QPImage]._conv_which_data, parameter[name[which_data]]] if <ast.UnaryOp object at 0x7da1b1196bc0> begin[:] variable[msg] assign[=] constant[`which_data` must contain 'phase' or 'amplitude'!] <ast.Raise object at 0x7da1b1196050> variable[border_list] assign[=] list[[]] if name[border_m] begin[:] if compare[name[border_m] less[<] constant[0]] begin[:] <ast.Raise object at 0x7da1b1196fb0> call[name[border_list].append, parameter[binary_operation[name[border_m] / call[name[self].meta][constant[pixel size]]]]] if name[border_perc] begin[:] if <ast.BoolOp object at 0x7da1b1197f10> begin[:] <ast.Raise object at 0x7da1b11960e0> variable[size] assign[=] call[name[np].average, parameter[name[self].shape]] call[name[border_list].append, parameter[binary_operation[binary_operation[name[size] * name[border_perc]] / constant[100]]]] if name[border_px] begin[:] call[name[border_list].append, parameter[name[border_px]]] if name[border_list] begin[:] variable[border_px] assign[=] call[name[np].int, parameter[call[name[np].round, parameter[call[name[np].max, parameter[name[border_list]]]]]]] variable[imdat_list] assign[=] list[[]] if compare[constant[amplitude] in name[which_data]] begin[:] call[name[imdat_list].append, parameter[name[self]._amp]] if compare[constant[phase] in name[which_data]] begin[:] call[name[imdat_list].append, parameter[name[self]._pha]] for taget[name[imdat]] in starred[name[imdat_list]] begin[:] variable[mask] assign[=] call[name[imdat].estimate_bg, parameter[]] return[name[mask]]
keyword[def] identifier[compute_bg] ( identifier[self] , identifier[which_data] = literal[string] , identifier[fit_offset] = literal[string] , identifier[fit_profile] = literal[string] , identifier[border_m] = literal[int] , identifier[border_perc] = literal[int] , identifier[border_px] = literal[int] , identifier[from_mask] = keyword[None] , identifier[ret_mask] = keyword[False] ): literal[string] identifier[which_data] = identifier[QPImage] . identifier[_conv_which_data] ( identifier[which_data] ) keyword[if] keyword[not] ( literal[string] keyword[in] identifier[which_data] keyword[or] literal[string] keyword[in] identifier[which_data] ): identifier[msg] = literal[string] keyword[raise] identifier[ValueError] ( identifier[msg] ) identifier[border_list] =[] keyword[if] identifier[border_m] : keyword[if] identifier[border_m] < literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[border_list] . identifier[append] ( identifier[border_m] / identifier[self] . identifier[meta] [ literal[string] ]) keyword[if] identifier[border_perc] : keyword[if] identifier[border_perc] < literal[int] keyword[or] identifier[border_perc] > literal[int] : keyword[raise] identifier[ValueError] ( literal[string] ) identifier[size] = identifier[np] . identifier[average] ( identifier[self] . identifier[shape] ) identifier[border_list] . identifier[append] ( identifier[size] * identifier[border_perc] / literal[int] ) keyword[if] identifier[border_px] : identifier[border_list] . identifier[append] ( identifier[border_px] ) keyword[if] identifier[border_list] : identifier[border_px] = identifier[np] . identifier[int] ( identifier[np] . identifier[round] ( identifier[np] . identifier[max] ( identifier[border_list] ))) keyword[elif] identifier[from_mask] keyword[is] keyword[None] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[elif] identifier[np] . identifier[all] ( identifier[from_mask] == literal[int] ): keyword[raise] identifier[ValueError] ( literal[string] ) identifier[imdat_list] =[] keyword[if] literal[string] keyword[in] identifier[which_data] : identifier[imdat_list] . identifier[append] ( identifier[self] . identifier[_amp] ) keyword[if] literal[string] keyword[in] identifier[which_data] : identifier[imdat_list] . identifier[append] ( identifier[self] . identifier[_pha] ) keyword[for] identifier[imdat] keyword[in] identifier[imdat_list] : identifier[mask] = identifier[imdat] . identifier[estimate_bg] ( identifier[fit_offset] = identifier[fit_offset] , identifier[fit_profile] = identifier[fit_profile] , identifier[border_px] = identifier[border_px] , identifier[from_mask] = identifier[from_mask] , identifier[ret_mask] = identifier[ret_mask] ) keyword[return] identifier[mask]
def compute_bg(self, which_data='phase', fit_offset='mean', fit_profile='tilt', border_m=0, border_perc=0, border_px=0, from_mask=None, ret_mask=False): """Compute background correction Parameters ---------- which_data: str or list of str From which type of data to remove the background information. The list contains either "amplitude", "phase", or both. fit_profile: str The type of background profile to fit: - "offset": offset only - "poly2o": 2D 2nd order polynomial with mixed terms - "tilt": 2D linear tilt with offset (default) fit_offset: str The method for computing the profile offset - "fit": offset as fitting parameter - "gauss": center of a gaussian fit - "mean": simple average - "mode": mode (see `qpimage.bg_estimate.mode`) border_m: float Assume that a frame of `border_m` meters around the image is background. The value is converted to pixels and rounded. border_perc: float Assume that a frame of `border_perc` percent around the image is background. The value is converted to pixels and rounded. If the aspect ratio of the image is not one, then the average of the data's shape is used to compute the percentage in pixels. border_px: float Assume that a frame of `border_px` pixels around the image is background. from_mask: boolean np.ndarray or None Use a boolean array to define the background area. The boolean mask must have the same shape as the input data. `True` elements are used for background estimation. ret_mask: bool Return the boolean mask used to compute the background. Notes ----- The `border_*` values are translated to pixel values and the largest pixel border is used to generate a mask image for background computation. If any of the `border_*` arguments are non-zero and `from_mask` is given, the intersection of the two is used, i.e. the positions where both, the frame mask and `from_mask`, are `True`. See Also -------- qpimage.bg_estimate.estimate """ which_data = QPImage._conv_which_data(which_data) # check validity if not ('amplitude' in which_data or 'phase' in which_data): msg = "`which_data` must contain 'phase' or 'amplitude'!" raise ValueError(msg) # depends on [control=['if'], data=[]] # get border in px border_list = [] if border_m: if border_m < 0: raise ValueError('`border_m` must be greater than zero!') # depends on [control=['if'], data=[]] border_list.append(border_m / self.meta['pixel size']) # depends on [control=['if'], data=[]] if border_perc: if border_perc < 0 or border_perc > 50: raise ValueError('`border_perc` must be in interval [0, 50]!') # depends on [control=['if'], data=[]] size = np.average(self.shape) border_list.append(size * border_perc / 100) # depends on [control=['if'], data=[]] if border_px: border_list.append(border_px) # depends on [control=['if'], data=[]] # get maximum border size if border_list: border_px = np.int(np.round(np.max(border_list))) # depends on [control=['if'], data=[]] elif from_mask is None: raise ValueError('Neither `from_mask` nor `border_*` given!') # depends on [control=['if'], data=[]] elif np.all(from_mask == 0): raise ValueError('`from_mask` must not be all-zero!') # depends on [control=['if'], data=[]] # Get affected image data imdat_list = [] if 'amplitude' in which_data: imdat_list.append(self._amp) # depends on [control=['if'], data=[]] if 'phase' in which_data: imdat_list.append(self._pha) # depends on [control=['if'], data=[]] # Perform correction for imdat in imdat_list: mask = imdat.estimate_bg(fit_offset=fit_offset, fit_profile=fit_profile, border_px=border_px, from_mask=from_mask, ret_mask=ret_mask) # depends on [control=['for'], data=['imdat']] return mask
async def states(self, country: str) -> list: """Return a list of supported states in a country.""" data = await self._request( 'get', 'states', params={'country': country}) return [d['state'] for d in data['data']]
<ast.AsyncFunctionDef object at 0x7da18dc98d00>
keyword[async] keyword[def] identifier[states] ( identifier[self] , identifier[country] : identifier[str] )-> identifier[list] : literal[string] identifier[data] = keyword[await] identifier[self] . identifier[_request] ( literal[string] , literal[string] , identifier[params] ={ literal[string] : identifier[country] }) keyword[return] [ identifier[d] [ literal[string] ] keyword[for] identifier[d] keyword[in] identifier[data] [ literal[string] ]]
async def states(self, country: str) -> list: """Return a list of supported states in a country.""" data = await self._request('get', 'states', params={'country': country}) return [d['state'] for d in data['data']]
def AddKeywordsForName(self, name, keywords): """Associates keywords with name. Records that keywords are associated with name. Args: name: A name which should be associated with some keywords. keywords: A collection of keywords to associate with name. """ data_store.DB.IndexAddKeywordsForName(self.urn, name, keywords)
def function[AddKeywordsForName, parameter[self, name, keywords]]: constant[Associates keywords with name. Records that keywords are associated with name. Args: name: A name which should be associated with some keywords. keywords: A collection of keywords to associate with name. ] call[name[data_store].DB.IndexAddKeywordsForName, parameter[name[self].urn, name[name], name[keywords]]]
keyword[def] identifier[AddKeywordsForName] ( identifier[self] , identifier[name] , identifier[keywords] ): literal[string] identifier[data_store] . identifier[DB] . identifier[IndexAddKeywordsForName] ( identifier[self] . identifier[urn] , identifier[name] , identifier[keywords] )
def AddKeywordsForName(self, name, keywords): """Associates keywords with name. Records that keywords are associated with name. Args: name: A name which should be associated with some keywords. keywords: A collection of keywords to associate with name. """ data_store.DB.IndexAddKeywordsForName(self.urn, name, keywords)
def get_radii(self): """ Get the inner and outer radii of the thread. :return: (<inner radius>, <outer radius>) :rtype: :class:`tuple` .. note:: Ideally this method is overridden in inheriting classes to mathematically determine the radii. Default action is to generate the profile, then use the bounding box to determine min & max radii. However this method is prone to small numeric error. """ bb = self.profile.val().BoundingBox() return (bb.xmin, bb.xmax)
def function[get_radii, parameter[self]]: constant[ Get the inner and outer radii of the thread. :return: (<inner radius>, <outer radius>) :rtype: :class:`tuple` .. note:: Ideally this method is overridden in inheriting classes to mathematically determine the radii. Default action is to generate the profile, then use the bounding box to determine min & max radii. However this method is prone to small numeric error. ] variable[bb] assign[=] call[call[name[self].profile.val, parameter[]].BoundingBox, parameter[]] return[tuple[[<ast.Attribute object at 0x7da204961720>, <ast.Attribute object at 0x7da2049631f0>]]]
keyword[def] identifier[get_radii] ( identifier[self] ): literal[string] identifier[bb] = identifier[self] . identifier[profile] . identifier[val] (). identifier[BoundingBox] () keyword[return] ( identifier[bb] . identifier[xmin] , identifier[bb] . identifier[xmax] )
def get_radii(self): """ Get the inner and outer radii of the thread. :return: (<inner radius>, <outer radius>) :rtype: :class:`tuple` .. note:: Ideally this method is overridden in inheriting classes to mathematically determine the radii. Default action is to generate the profile, then use the bounding box to determine min & max radii. However this method is prone to small numeric error. """ bb = self.profile.val().BoundingBox() return (bb.xmin, bb.xmax)
async def _string_data(self, data): """ This is a private message handler method. It is the message handler for String data messages that will be printed to the console. :param data: message :returns: None - message is sent to console """ reply = '' data = data[1:-1] for x in data: reply_data = x if reply_data: reply += chr(reply_data) if self.log_output: logging.info(reply) else: print(reply)
<ast.AsyncFunctionDef object at 0x7da207f03e20>
keyword[async] keyword[def] identifier[_string_data] ( identifier[self] , identifier[data] ): literal[string] identifier[reply] = literal[string] identifier[data] = identifier[data] [ literal[int] :- literal[int] ] keyword[for] identifier[x] keyword[in] identifier[data] : identifier[reply_data] = identifier[x] keyword[if] identifier[reply_data] : identifier[reply] += identifier[chr] ( identifier[reply_data] ) keyword[if] identifier[self] . identifier[log_output] : identifier[logging] . identifier[info] ( identifier[reply] ) keyword[else] : identifier[print] ( identifier[reply] )
async def _string_data(self, data): """ This is a private message handler method. It is the message handler for String data messages that will be printed to the console. :param data: message :returns: None - message is sent to console """ reply = '' data = data[1:-1] for x in data: reply_data = x if reply_data: reply += chr(reply_data) # depends on [control=['if'], data=[]] # depends on [control=['for'], data=['x']] if self.log_output: logging.info(reply) # depends on [control=['if'], data=[]] else: print(reply)
def download(self): ''' Download all waypoints from the vehicle. The download is asynchronous. Use :py:func:`wait_ready()` to block your thread until the download is complete. ''' self.wait_ready() self._vehicle._ready_attrs.remove('commands') self._vehicle._wp_loaded = False self._vehicle._master.waypoint_request_list_send()
def function[download, parameter[self]]: constant[ Download all waypoints from the vehicle. The download is asynchronous. Use :py:func:`wait_ready()` to block your thread until the download is complete. ] call[name[self].wait_ready, parameter[]] call[name[self]._vehicle._ready_attrs.remove, parameter[constant[commands]]] name[self]._vehicle._wp_loaded assign[=] constant[False] call[name[self]._vehicle._master.waypoint_request_list_send, parameter[]]
keyword[def] identifier[download] ( identifier[self] ): literal[string] identifier[self] . identifier[wait_ready] () identifier[self] . identifier[_vehicle] . identifier[_ready_attrs] . identifier[remove] ( literal[string] ) identifier[self] . identifier[_vehicle] . identifier[_wp_loaded] = keyword[False] identifier[self] . identifier[_vehicle] . identifier[_master] . identifier[waypoint_request_list_send] ()
def download(self): """ Download all waypoints from the vehicle. The download is asynchronous. Use :py:func:`wait_ready()` to block your thread until the download is complete. """ self.wait_ready() self._vehicle._ready_attrs.remove('commands') self._vehicle._wp_loaded = False self._vehicle._master.waypoint_request_list_send()
def get_page_number(self, index): """ Given an index, return page label as specified by catalog['PageLabels']['Nums'] In a PDF, page labels are stored as a list of pairs, like [starting_index, label_format, starting_index, label_format ...] For example: [0, {'S': 'D', 'St': 151}, 4, {'S':'R', 'P':'Foo'}] So we have to first find the correct label_format based on the closest starting_index lower than the requested index, then use the label_format to convert the index to a page label. Label format meaning: /S = [ D Decimal arabic numerals R Uppercase roman numerals r Lowercase roman numerals A Uppercase letters (A to Z for the first 26 pages, AA to ZZ for the next 26, and so on) a Lowercase letters (a to z for the first 26 pages, aa to zz for the next 26, and so on) ] (if no /S, just use prefix ...) /P = text string label /St = integer start value """ # get and cache page ranges if not hasattr(self, 'page_range_pairs'): try: page_ranges = resolve1(self.catalog['PageLabels'])['Nums'] assert len(page_ranges) > 1 and len(page_ranges) % 2 == 0 self.page_range_pairs = list( reversed(list(zip(page_ranges[::2], page_ranges[1::2])))) except: self.page_range_pairs = [] if not self.page_range_pairs: return "" # find page range containing index for starting_index, label_format in self.page_range_pairs: if starting_index <= index: break # we found correct label_format label_format = resolve1(label_format) page_label = "" # handle numeric part of label if 'S' in label_format: # first find number for this page ... page_label = index - starting_index if 'St' in label_format: # alternate start value page_label += label_format['St'] else: page_label += 1 # ... then convert to correct format num_type = label_format['S'].name # roman (upper or lower) if num_type.lower() == 'r': import roman page_label = roman.toRoman(page_label) if num_type == 'r': page_label = page_label.lower() # letters elif num_type.lower() == 'a': # a to z for the first 26 pages, aa to zz for the next 26, and # so on letter = chr(page_label % 26 + 65) letter *= page_label / 26 + 1 if num_type == 'a': letter = letter.lower() page_label = letter # decimal arabic else: # if num_type == 'D': page_label = obj_to_string(page_label) # handle string prefix if 'P' in label_format: page_label = smart_unicode_decode(label_format['P']) + page_label return page_label
def function[get_page_number, parameter[self, index]]: constant[ Given an index, return page label as specified by catalog['PageLabels']['Nums'] In a PDF, page labels are stored as a list of pairs, like [starting_index, label_format, starting_index, label_format ...] For example: [0, {'S': 'D', 'St': 151}, 4, {'S':'R', 'P':'Foo'}] So we have to first find the correct label_format based on the closest starting_index lower than the requested index, then use the label_format to convert the index to a page label. Label format meaning: /S = [ D Decimal arabic numerals R Uppercase roman numerals r Lowercase roman numerals A Uppercase letters (A to Z for the first 26 pages, AA to ZZ for the next 26, and so on) a Lowercase letters (a to z for the first 26 pages, aa to zz for the next 26, and so on) ] (if no /S, just use prefix ...) /P = text string label /St = integer start value ] if <ast.UnaryOp object at 0x7da18eb55810> begin[:] <ast.Try object at 0x7da18eb56b60> if <ast.UnaryOp object at 0x7da2044c37f0> begin[:] return[constant[]] for taget[tuple[[<ast.Name object at 0x7da2044c2b30>, <ast.Name object at 0x7da2044c0c70>]]] in starred[name[self].page_range_pairs] begin[:] if compare[name[starting_index] less_or_equal[<=] name[index]] begin[:] break variable[label_format] assign[=] call[name[resolve1], parameter[name[label_format]]] variable[page_label] assign[=] constant[] if compare[constant[S] in name[label_format]] begin[:] variable[page_label] assign[=] binary_operation[name[index] - name[starting_index]] if compare[constant[St] in name[label_format]] begin[:] <ast.AugAssign object at 0x7da2044c1b70> variable[num_type] assign[=] call[name[label_format]][constant[S]].name if compare[call[name[num_type].lower, parameter[]] equal[==] constant[r]] begin[:] import module[roman] variable[page_label] assign[=] call[name[roman].toRoman, parameter[name[page_label]]] if compare[name[num_type] equal[==] constant[r]] begin[:] variable[page_label] assign[=] call[name[page_label].lower, parameter[]] if compare[constant[P] in name[label_format]] begin[:] variable[page_label] assign[=] binary_operation[call[name[smart_unicode_decode], parameter[call[name[label_format]][constant[P]]]] + name[page_label]] return[name[page_label]]
keyword[def] identifier[get_page_number] ( identifier[self] , identifier[index] ): literal[string] keyword[if] keyword[not] identifier[hasattr] ( identifier[self] , literal[string] ): keyword[try] : identifier[page_ranges] = identifier[resolve1] ( identifier[self] . identifier[catalog] [ literal[string] ])[ literal[string] ] keyword[assert] identifier[len] ( identifier[page_ranges] )> literal[int] keyword[and] identifier[len] ( identifier[page_ranges] )% literal[int] == literal[int] identifier[self] . identifier[page_range_pairs] = identifier[list] ( identifier[reversed] ( identifier[list] ( identifier[zip] ( identifier[page_ranges] [:: literal[int] ], identifier[page_ranges] [ literal[int] :: literal[int] ])))) keyword[except] : identifier[self] . identifier[page_range_pairs] =[] keyword[if] keyword[not] identifier[self] . identifier[page_range_pairs] : keyword[return] literal[string] keyword[for] identifier[starting_index] , identifier[label_format] keyword[in] identifier[self] . identifier[page_range_pairs] : keyword[if] identifier[starting_index] <= identifier[index] : keyword[break] identifier[label_format] = identifier[resolve1] ( identifier[label_format] ) identifier[page_label] = literal[string] keyword[if] literal[string] keyword[in] identifier[label_format] : identifier[page_label] = identifier[index] - identifier[starting_index] keyword[if] literal[string] keyword[in] identifier[label_format] : identifier[page_label] += identifier[label_format] [ literal[string] ] keyword[else] : identifier[page_label] += literal[int] identifier[num_type] = identifier[label_format] [ literal[string] ]. identifier[name] keyword[if] identifier[num_type] . identifier[lower] ()== literal[string] : keyword[import] identifier[roman] identifier[page_label] = identifier[roman] . identifier[toRoman] ( identifier[page_label] ) keyword[if] identifier[num_type] == literal[string] : identifier[page_label] = identifier[page_label] . identifier[lower] () keyword[elif] identifier[num_type] . identifier[lower] ()== literal[string] : identifier[letter] = identifier[chr] ( identifier[page_label] % literal[int] + literal[int] ) identifier[letter] *= identifier[page_label] / literal[int] + literal[int] keyword[if] identifier[num_type] == literal[string] : identifier[letter] = identifier[letter] . identifier[lower] () identifier[page_label] = identifier[letter] keyword[else] : identifier[page_label] = identifier[obj_to_string] ( identifier[page_label] ) keyword[if] literal[string] keyword[in] identifier[label_format] : identifier[page_label] = identifier[smart_unicode_decode] ( identifier[label_format] [ literal[string] ])+ identifier[page_label] keyword[return] identifier[page_label]
def get_page_number(self, index): """ Given an index, return page label as specified by catalog['PageLabels']['Nums'] In a PDF, page labels are stored as a list of pairs, like [starting_index, label_format, starting_index, label_format ...] For example: [0, {'S': 'D', 'St': 151}, 4, {'S':'R', 'P':'Foo'}] So we have to first find the correct label_format based on the closest starting_index lower than the requested index, then use the label_format to convert the index to a page label. Label format meaning: /S = [ D Decimal arabic numerals R Uppercase roman numerals r Lowercase roman numerals A Uppercase letters (A to Z for the first 26 pages, AA to ZZ for the next 26, and so on) a Lowercase letters (a to z for the first 26 pages, aa to zz for the next 26, and so on) ] (if no /S, just use prefix ...) /P = text string label /St = integer start value """ # get and cache page ranges if not hasattr(self, 'page_range_pairs'): try: page_ranges = resolve1(self.catalog['PageLabels'])['Nums'] assert len(page_ranges) > 1 and len(page_ranges) % 2 == 0 self.page_range_pairs = list(reversed(list(zip(page_ranges[::2], page_ranges[1::2])))) # depends on [control=['try'], data=[]] except: self.page_range_pairs = [] # depends on [control=['except'], data=[]] # depends on [control=['if'], data=[]] if not self.page_range_pairs: return '' # depends on [control=['if'], data=[]] # find page range containing index for (starting_index, label_format) in self.page_range_pairs: if starting_index <= index: break # we found correct label_format # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] label_format = resolve1(label_format) page_label = '' # handle numeric part of label if 'S' in label_format: # first find number for this page ... page_label = index - starting_index if 'St' in label_format: # alternate start value page_label += label_format['St'] # depends on [control=['if'], data=['label_format']] else: page_label += 1 # ... then convert to correct format num_type = label_format['S'].name # roman (upper or lower) if num_type.lower() == 'r': import roman page_label = roman.toRoman(page_label) if num_type == 'r': page_label = page_label.lower() # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] # letters elif num_type.lower() == 'a': # a to z for the first 26 pages, aa to zz for the next 26, and # so on letter = chr(page_label % 26 + 65) letter *= page_label / 26 + 1 if num_type == 'a': letter = letter.lower() # depends on [control=['if'], data=[]] page_label = letter # depends on [control=['if'], data=[]] else: # decimal arabic # if num_type == 'D': page_label = obj_to_string(page_label) # depends on [control=['if'], data=['label_format']] # handle string prefix if 'P' in label_format: page_label = smart_unicode_decode(label_format['P']) + page_label # depends on [control=['if'], data=['label_format']] return page_label
def go_online(self, comment=None): """ Executes a Go-Online operation on the specified node typically done when the node has already been forced offline via :func:`go_offline` :param str comment: (optional) comment to audit :raises NodeCommandFailed: online not available :return: None """ self.make_request( NodeCommandFailed, method='update', resource='go_online', params={'comment': comment})
def function[go_online, parameter[self, comment]]: constant[ Executes a Go-Online operation on the specified node typically done when the node has already been forced offline via :func:`go_offline` :param str comment: (optional) comment to audit :raises NodeCommandFailed: online not available :return: None ] call[name[self].make_request, parameter[name[NodeCommandFailed]]]
keyword[def] identifier[go_online] ( identifier[self] , identifier[comment] = keyword[None] ): literal[string] identifier[self] . identifier[make_request] ( identifier[NodeCommandFailed] , identifier[method] = literal[string] , identifier[resource] = literal[string] , identifier[params] ={ literal[string] : identifier[comment] })
def go_online(self, comment=None): """ Executes a Go-Online operation on the specified node typically done when the node has already been forced offline via :func:`go_offline` :param str comment: (optional) comment to audit :raises NodeCommandFailed: online not available :return: None """ self.make_request(NodeCommandFailed, method='update', resource='go_online', params={'comment': comment})
def make(self): """ Evaluate the command, and write it to a file. """ eval = self.command.eval() with open(self.filename, 'w') as f: f.write(eval)
def function[make, parameter[self]]: constant[ Evaluate the command, and write it to a file. ] variable[eval] assign[=] call[name[self].command.eval, parameter[]] with call[name[open], parameter[name[self].filename, constant[w]]] begin[:] call[name[f].write, parameter[name[eval]]]
keyword[def] identifier[make] ( identifier[self] ): literal[string] identifier[eval] = identifier[self] . identifier[command] . identifier[eval] () keyword[with] identifier[open] ( identifier[self] . identifier[filename] , literal[string] ) keyword[as] identifier[f] : identifier[f] . identifier[write] ( identifier[eval] )
def make(self): """ Evaluate the command, and write it to a file. """ eval = self.command.eval() with open(self.filename, 'w') as f: f.write(eval) # depends on [control=['with'], data=['f']]
def event(self, name, **kwargs): """Add Event data to Batch object. Args: name (str): The name for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. event_date (str, kwargs): The event datetime expression for this Group. status (str, kwargs): The status for this Group. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Event. """ group_obj = Event(name, **kwargs) return self._group(group_obj)
def function[event, parameter[self, name]]: constant[Add Event data to Batch object. Args: name (str): The name for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. event_date (str, kwargs): The event datetime expression for this Group. status (str, kwargs): The status for this Group. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Event. ] variable[group_obj] assign[=] call[name[Event], parameter[name[name]]] return[call[name[self]._group, parameter[name[group_obj]]]]
keyword[def] identifier[event] ( identifier[self] , identifier[name] ,** identifier[kwargs] ): literal[string] identifier[group_obj] = identifier[Event] ( identifier[name] ,** identifier[kwargs] ) keyword[return] identifier[self] . identifier[_group] ( identifier[group_obj] )
def event(self, name, **kwargs): """Add Event data to Batch object. Args: name (str): The name for this Group. date_added (str, kwargs): The date timestamp the Indicator was created. event_date (str, kwargs): The event datetime expression for this Group. status (str, kwargs): The status for this Group. xid (str, kwargs): The external id for this Group. Returns: obj: An instance of Event. """ group_obj = Event(name, **kwargs) return self._group(group_obj)
def save(self, force_insert=False, force_update=False, commit=True): """ Se sobreescribe el método save para crear o modificar al User en caso que el parámetro commit sea True. """ usuario = super().save(commit=False) if commit: user = None if self.instance.pk is not None: user = usuario.user else: user = User() user.username = self.cleaned_data['username'] user.set_password(self.cleaned_data['password']) user.email = self.cleaned_data['email'] user.save() usuario.user = user usuario.save() return usuario
def function[save, parameter[self, force_insert, force_update, commit]]: constant[ Se sobreescribe el método save para crear o modificar al User en caso que el parámetro commit sea True. ] variable[usuario] assign[=] call[call[name[super], parameter[]].save, parameter[]] if name[commit] begin[:] variable[user] assign[=] constant[None] if compare[name[self].instance.pk is_not constant[None]] begin[:] variable[user] assign[=] name[usuario].user name[user].username assign[=] call[name[self].cleaned_data][constant[username]] call[name[user].set_password, parameter[call[name[self].cleaned_data][constant[password]]]] name[user].email assign[=] call[name[self].cleaned_data][constant[email]] call[name[user].save, parameter[]] name[usuario].user assign[=] name[user] call[name[usuario].save, parameter[]] return[name[usuario]]
keyword[def] identifier[save] ( identifier[self] , identifier[force_insert] = keyword[False] , identifier[force_update] = keyword[False] , identifier[commit] = keyword[True] ): literal[string] identifier[usuario] = identifier[super] (). identifier[save] ( identifier[commit] = keyword[False] ) keyword[if] identifier[commit] : identifier[user] = keyword[None] keyword[if] identifier[self] . identifier[instance] . identifier[pk] keyword[is] keyword[not] keyword[None] : identifier[user] = identifier[usuario] . identifier[user] keyword[else] : identifier[user] = identifier[User] () identifier[user] . identifier[username] = identifier[self] . identifier[cleaned_data] [ literal[string] ] identifier[user] . identifier[set_password] ( identifier[self] . identifier[cleaned_data] [ literal[string] ]) identifier[user] . identifier[email] = identifier[self] . identifier[cleaned_data] [ literal[string] ] identifier[user] . identifier[save] () identifier[usuario] . identifier[user] = identifier[user] identifier[usuario] . identifier[save] () keyword[return] identifier[usuario]
def save(self, force_insert=False, force_update=False, commit=True): """ Se sobreescribe el método save para crear o modificar al User en caso que el parámetro commit sea True. """ usuario = super().save(commit=False) if commit: user = None if self.instance.pk is not None: user = usuario.user # depends on [control=['if'], data=[]] else: user = User() user.username = self.cleaned_data['username'] user.set_password(self.cleaned_data['password']) user.email = self.cleaned_data['email'] user.save() usuario.user = user usuario.save() # depends on [control=['if'], data=[]] return usuario
def node_equal(node1, node2): '''node_equal High-level api: Evaluate whether two nodes are equal. Parameters ---------- node1 : `Element` A node in a model tree. node2 : `Element` A node in another model tree. Returns ------- bool True if node1 and node2 are equal. ''' if ModelDiff.node_less(node1, node2) and \ ModelDiff.node_less(node2, node1): return True else: return False
def function[node_equal, parameter[node1, node2]]: constant[node_equal High-level api: Evaluate whether two nodes are equal. Parameters ---------- node1 : `Element` A node in a model tree. node2 : `Element` A node in another model tree. Returns ------- bool True if node1 and node2 are equal. ] if <ast.BoolOp object at 0x7da1b25291e0> begin[:] return[constant[True]]
keyword[def] identifier[node_equal] ( identifier[node1] , identifier[node2] ): literal[string] keyword[if] identifier[ModelDiff] . identifier[node_less] ( identifier[node1] , identifier[node2] ) keyword[and] identifier[ModelDiff] . identifier[node_less] ( identifier[node2] , identifier[node1] ): keyword[return] keyword[True] keyword[else] : keyword[return] keyword[False]
def node_equal(node1, node2): """node_equal High-level api: Evaluate whether two nodes are equal. Parameters ---------- node1 : `Element` A node in a model tree. node2 : `Element` A node in another model tree. Returns ------- bool True if node1 and node2 are equal. """ if ModelDiff.node_less(node1, node2) and ModelDiff.node_less(node2, node1): return True # depends on [control=['if'], data=[]] else: return False
def stop(self): """Send a TX_DELETE message to cancel this task. This will delete the entry for the transmission of the CAN-message with the specified can_id CAN identifier. The message length for the command TX_DELETE is {[bcm_msg_head]} (only the header). """ log.debug("Stopping periodic task") stopframe = build_bcm_tx_delete_header(self.can_id_with_flags, self.flags) send_bcm(self.bcm_socket, stopframe)
def function[stop, parameter[self]]: constant[Send a TX_DELETE message to cancel this task. This will delete the entry for the transmission of the CAN-message with the specified can_id CAN identifier. The message length for the command TX_DELETE is {[bcm_msg_head]} (only the header). ] call[name[log].debug, parameter[constant[Stopping periodic task]]] variable[stopframe] assign[=] call[name[build_bcm_tx_delete_header], parameter[name[self].can_id_with_flags, name[self].flags]] call[name[send_bcm], parameter[name[self].bcm_socket, name[stopframe]]]
keyword[def] identifier[stop] ( identifier[self] ): literal[string] identifier[log] . identifier[debug] ( literal[string] ) identifier[stopframe] = identifier[build_bcm_tx_delete_header] ( identifier[self] . identifier[can_id_with_flags] , identifier[self] . identifier[flags] ) identifier[send_bcm] ( identifier[self] . identifier[bcm_socket] , identifier[stopframe] )
def stop(self): """Send a TX_DELETE message to cancel this task. This will delete the entry for the transmission of the CAN-message with the specified can_id CAN identifier. The message length for the command TX_DELETE is {[bcm_msg_head]} (only the header). """ log.debug('Stopping periodic task') stopframe = build_bcm_tx_delete_header(self.can_id_with_flags, self.flags) send_bcm(self.bcm_socket, stopframe)
def GET_AUTH(self, courseid, scoreboardid): # pylint: disable=arguments-differ """ GET request """ course = self.course_factory.get_course(courseid) scoreboards = course.get_descriptor().get('scoreboard', []) try: scoreboardid = int(scoreboardid) scoreboard_name = scoreboards[scoreboardid]["name"] scoreboard_content = scoreboards[scoreboardid]["content"] scoreboard_reverse = bool(scoreboards[scoreboardid].get('reverse', False)) except: raise web.notfound() # Convert scoreboard_content if isinstance(scoreboard_content, str): scoreboard_content = OrderedDict([(scoreboard_content, 1)]) if isinstance(scoreboard_content, list): scoreboard_content = OrderedDict([(entry, 1) for entry in scoreboard_content]) if not isinstance(scoreboard_content, OrderedDict): scoreboard_content = OrderedDict(iter(scoreboard_content.items())) # Get task names task_names = {} for taskid in scoreboard_content: try: task_names[taskid] = course.get_task(taskid).get_name(self.user_manager.session_language()) except: raise web.notfound("Unknown task id "+taskid) # Get all submissions results = self.database.submissions.find({ "courseid": courseid, "taskid": {"$in": list(scoreboard_content.keys())}, "custom.score": {"$exists": True}, "result": "success" }, ["taskid", "username", "custom.score"]) # Get best results per users(/group) result_per_user = {} users = set() for submission in results: # Be sure we have a list if not isinstance(submission["username"], list): submission["username"] = [submission["username"]] submission["username"] = tuple(submission["username"]) if submission["username"] not in result_per_user: result_per_user[submission["username"]] = {} if submission["taskid"] not in result_per_user[submission["username"]]: result_per_user[submission["username"]][submission["taskid"]] = submission["custom"]["score"] else: # keep best score current_score = result_per_user[submission["username"]][submission["taskid"]] new_score = submission["custom"]["score"] task_reversed = scoreboard_reverse != (scoreboard_content[submission["taskid"]] < 0) if task_reversed and current_score > new_score: result_per_user[submission["username"]][submission["taskid"]] = new_score elif not task_reversed and current_score < new_score: result_per_user[submission["username"]][submission["taskid"]] = new_score for user in submission["username"]: users.add(user) # Get user names users_realname = {} for username, userinfo in self.user_manager.get_users_info(list(users)).items(): users_realname[username] = userinfo[0] if userinfo else username # Compute overall result per user, and sort them overall_result_per_user = {} for key, val in result_per_user.items(): total = 0 solved = 0 for taskid, coef in scoreboard_content.items(): if taskid in val: total += val[taskid]*coef solved += 1 overall_result_per_user[key] = {"total": total, "solved": solved} sorted_users = list(overall_result_per_user.keys()) sorted_users = sorted(sorted_users, key=sort_func(overall_result_per_user, scoreboard_reverse)) # Compute table table = [] # Header if len(scoreboard_content) == 1: header = ["", "Student(s)", "Score"] emphasized_columns = [2] else: header = ["", "Student(s)", "Solved", "Total score"] + [task_names[taskid] for taskid in list(scoreboard_content.keys())] emphasized_columns = [2, 3] # Lines old_score = () rank = 0 for user in sorted_users: # Increment rank if needed, and display it line = [] if old_score != (overall_result_per_user[user]["solved"], overall_result_per_user[user]["total"]): rank += 1 old_score = (overall_result_per_user[user]["solved"], overall_result_per_user[user]["total"]) line.append(rank) else: line.append("") # Users line.append(",".join(sorted([users_realname[u] for u in user]))) if len(scoreboard_content) == 1: line.append(overall_result_per_user[user]["total"]) else: line.append(overall_result_per_user[user]["solved"]) line.append(overall_result_per_user[user]["total"]) for taskid in scoreboard_content: line.append(result_per_user[user].get(taskid, "")) table.append(line) renderer = self.template_helper.get_custom_renderer('frontend/plugins/scoreboard') return renderer.scoreboard(course, scoreboardid, scoreboard_name, header, table, emphasized_columns)
def function[GET_AUTH, parameter[self, courseid, scoreboardid]]: constant[ GET request ] variable[course] assign[=] call[name[self].course_factory.get_course, parameter[name[courseid]]] variable[scoreboards] assign[=] call[call[name[course].get_descriptor, parameter[]].get, parameter[constant[scoreboard], list[[]]]] <ast.Try object at 0x7da2046230a0> if call[name[isinstance], parameter[name[scoreboard_content], name[str]]] begin[:] variable[scoreboard_content] assign[=] call[name[OrderedDict], parameter[list[[<ast.Tuple object at 0x7da2046203d0>]]]] if call[name[isinstance], parameter[name[scoreboard_content], name[list]]] begin[:] variable[scoreboard_content] assign[=] call[name[OrderedDict], parameter[<ast.ListComp object at 0x7da204620250>]] if <ast.UnaryOp object at 0x7da2046207c0> begin[:] variable[scoreboard_content] assign[=] call[name[OrderedDict], parameter[call[name[iter], parameter[call[name[scoreboard_content].items, parameter[]]]]]] variable[task_names] assign[=] dictionary[[], []] for taget[name[taskid]] in starred[name[scoreboard_content]] begin[:] <ast.Try object at 0x7da204622230> variable[results] assign[=] call[name[self].database.submissions.find, parameter[dictionary[[<ast.Constant object at 0x7da204620df0>, <ast.Constant object at 0x7da204621e40>, <ast.Constant object at 0x7da204623e80>, <ast.Constant object at 0x7da2046238b0>], [<ast.Name object at 0x7da204623d30>, <ast.Dict object at 0x7da2046234c0>, <ast.Dict object at 0x7da204623580>, <ast.Constant object at 0x7da2046219c0>]], list[[<ast.Constant object at 0x7da204622cb0>, <ast.Constant object at 0x7da204620f70>, <ast.Constant object at 0x7da2046222c0>]]]] variable[result_per_user] assign[=] dictionary[[], []] variable[users] assign[=] call[name[set], parameter[]] for taget[name[submission]] in starred[name[results]] begin[:] if <ast.UnaryOp object at 0x7da204621030> begin[:] call[name[submission]][constant[username]] assign[=] list[[<ast.Subscript object at 0x7da204621ff0>]] call[name[submission]][constant[username]] assign[=] call[name[tuple], parameter[call[name[submission]][constant[username]]]] if compare[call[name[submission]][constant[username]] <ast.NotIn object at 0x7da2590d7190> name[result_per_user]] begin[:] call[name[result_per_user]][call[name[submission]][constant[username]]] assign[=] dictionary[[], []] if compare[call[name[submission]][constant[taskid]] <ast.NotIn object at 0x7da2590d7190> call[name[result_per_user]][call[name[submission]][constant[username]]]] begin[:] call[call[name[result_per_user]][call[name[submission]][constant[username]]]][call[name[submission]][constant[taskid]]] assign[=] call[call[name[submission]][constant[custom]]][constant[score]] for taget[name[user]] in starred[call[name[submission]][constant[username]]] begin[:] call[name[users].add, parameter[name[user]]] variable[users_realname] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18f09f310>, <ast.Name object at 0x7da18f09faf0>]]] in starred[call[call[name[self].user_manager.get_users_info, parameter[call[name[list], parameter[name[users]]]]].items, parameter[]]] begin[:] call[name[users_realname]][name[username]] assign[=] <ast.IfExp object at 0x7da18f09c190> variable[overall_result_per_user] assign[=] dictionary[[], []] for taget[tuple[[<ast.Name object at 0x7da18f09f790>, <ast.Name object at 0x7da18f09d1b0>]]] in starred[call[name[result_per_user].items, parameter[]]] begin[:] variable[total] assign[=] constant[0] variable[solved] assign[=] constant[0] for taget[tuple[[<ast.Name object at 0x7da18f09e5f0>, <ast.Name object at 0x7da18f09cbb0>]]] in starred[call[name[scoreboard_content].items, parameter[]]] begin[:] if compare[name[taskid] in name[val]] begin[:] <ast.AugAssign object at 0x7da18f09cd90> <ast.AugAssign object at 0x7da18f09f220> call[name[overall_result_per_user]][name[key]] assign[=] dictionary[[<ast.Constant object at 0x7da18f09c1f0>, <ast.Constant object at 0x7da18f09cf40>], [<ast.Name object at 0x7da18f09e470>, <ast.Name object at 0x7da18f09f910>]] variable[sorted_users] assign[=] call[name[list], parameter[call[name[overall_result_per_user].keys, parameter[]]]] variable[sorted_users] assign[=] call[name[sorted], parameter[name[sorted_users]]] variable[table] assign[=] list[[]] if compare[call[name[len], parameter[name[scoreboard_content]]] equal[==] constant[1]] begin[:] variable[header] assign[=] list[[<ast.Constant object at 0x7da18f09e4d0>, <ast.Constant object at 0x7da18f09e680>, <ast.Constant object at 0x7da18f09df30>]] variable[emphasized_columns] assign[=] list[[<ast.Constant object at 0x7da18f09f6d0>]] variable[old_score] assign[=] tuple[[]] variable[rank] assign[=] constant[0] for taget[name[user]] in starred[name[sorted_users]] begin[:] variable[line] assign[=] list[[]] if compare[name[old_score] not_equal[!=] tuple[[<ast.Subscript object at 0x7da18f09e3b0>, <ast.Subscript object at 0x7da18f09e530>]]] begin[:] <ast.AugAssign object at 0x7da18f09de70> variable[old_score] assign[=] tuple[[<ast.Subscript object at 0x7da18f09c430>, <ast.Subscript object at 0x7da18f09e320>]] call[name[line].append, parameter[name[rank]]] call[name[line].append, parameter[call[constant[,].join, parameter[call[name[sorted], parameter[<ast.ListComp object at 0x7da20c990460>]]]]]] if compare[call[name[len], parameter[name[scoreboard_content]]] equal[==] constant[1]] begin[:] call[name[line].append, parameter[call[call[name[overall_result_per_user]][name[user]]][constant[total]]]] call[name[table].append, parameter[name[line]]] variable[renderer] assign[=] call[name[self].template_helper.get_custom_renderer, parameter[constant[frontend/plugins/scoreboard]]] return[call[name[renderer].scoreboard, parameter[name[course], name[scoreboardid], name[scoreboard_name], name[header], name[table], name[emphasized_columns]]]]
keyword[def] identifier[GET_AUTH] ( identifier[self] , identifier[courseid] , identifier[scoreboardid] ): literal[string] identifier[course] = identifier[self] . identifier[course_factory] . identifier[get_course] ( identifier[courseid] ) identifier[scoreboards] = identifier[course] . identifier[get_descriptor] (). identifier[get] ( literal[string] ,[]) keyword[try] : identifier[scoreboardid] = identifier[int] ( identifier[scoreboardid] ) identifier[scoreboard_name] = identifier[scoreboards] [ identifier[scoreboardid] ][ literal[string] ] identifier[scoreboard_content] = identifier[scoreboards] [ identifier[scoreboardid] ][ literal[string] ] identifier[scoreboard_reverse] = identifier[bool] ( identifier[scoreboards] [ identifier[scoreboardid] ]. identifier[get] ( literal[string] , keyword[False] )) keyword[except] : keyword[raise] identifier[web] . identifier[notfound] () keyword[if] identifier[isinstance] ( identifier[scoreboard_content] , identifier[str] ): identifier[scoreboard_content] = identifier[OrderedDict] ([( identifier[scoreboard_content] , literal[int] )]) keyword[if] identifier[isinstance] ( identifier[scoreboard_content] , identifier[list] ): identifier[scoreboard_content] = identifier[OrderedDict] ([( identifier[entry] , literal[int] ) keyword[for] identifier[entry] keyword[in] identifier[scoreboard_content] ]) keyword[if] keyword[not] identifier[isinstance] ( identifier[scoreboard_content] , identifier[OrderedDict] ): identifier[scoreboard_content] = identifier[OrderedDict] ( identifier[iter] ( identifier[scoreboard_content] . identifier[items] ())) identifier[task_names] ={} keyword[for] identifier[taskid] keyword[in] identifier[scoreboard_content] : keyword[try] : identifier[task_names] [ identifier[taskid] ]= identifier[course] . identifier[get_task] ( identifier[taskid] ). identifier[get_name] ( identifier[self] . identifier[user_manager] . identifier[session_language] ()) keyword[except] : keyword[raise] identifier[web] . identifier[notfound] ( literal[string] + identifier[taskid] ) identifier[results] = identifier[self] . identifier[database] . identifier[submissions] . identifier[find] ({ literal[string] : identifier[courseid] , literal[string] :{ literal[string] : identifier[list] ( identifier[scoreboard_content] . identifier[keys] ())}, literal[string] :{ literal[string] : keyword[True] }, literal[string] : literal[string] },[ literal[string] , literal[string] , literal[string] ]) identifier[result_per_user] ={} identifier[users] = identifier[set] () keyword[for] identifier[submission] keyword[in] identifier[results] : keyword[if] keyword[not] identifier[isinstance] ( identifier[submission] [ literal[string] ], identifier[list] ): identifier[submission] [ literal[string] ]=[ identifier[submission] [ literal[string] ]] identifier[submission] [ literal[string] ]= identifier[tuple] ( identifier[submission] [ literal[string] ]) keyword[if] identifier[submission] [ literal[string] ] keyword[not] keyword[in] identifier[result_per_user] : identifier[result_per_user] [ identifier[submission] [ literal[string] ]]={} keyword[if] identifier[submission] [ literal[string] ] keyword[not] keyword[in] identifier[result_per_user] [ identifier[submission] [ literal[string] ]]: identifier[result_per_user] [ identifier[submission] [ literal[string] ]][ identifier[submission] [ literal[string] ]]= identifier[submission] [ literal[string] ][ literal[string] ] keyword[else] : identifier[current_score] = identifier[result_per_user] [ identifier[submission] [ literal[string] ]][ identifier[submission] [ literal[string] ]] identifier[new_score] = identifier[submission] [ literal[string] ][ literal[string] ] identifier[task_reversed] = identifier[scoreboard_reverse] !=( identifier[scoreboard_content] [ identifier[submission] [ literal[string] ]]< literal[int] ) keyword[if] identifier[task_reversed] keyword[and] identifier[current_score] > identifier[new_score] : identifier[result_per_user] [ identifier[submission] [ literal[string] ]][ identifier[submission] [ literal[string] ]]= identifier[new_score] keyword[elif] keyword[not] identifier[task_reversed] keyword[and] identifier[current_score] < identifier[new_score] : identifier[result_per_user] [ identifier[submission] [ literal[string] ]][ identifier[submission] [ literal[string] ]]= identifier[new_score] keyword[for] identifier[user] keyword[in] identifier[submission] [ literal[string] ]: identifier[users] . identifier[add] ( identifier[user] ) identifier[users_realname] ={} keyword[for] identifier[username] , identifier[userinfo] keyword[in] identifier[self] . identifier[user_manager] . identifier[get_users_info] ( identifier[list] ( identifier[users] )). identifier[items] (): identifier[users_realname] [ identifier[username] ]= identifier[userinfo] [ literal[int] ] keyword[if] identifier[userinfo] keyword[else] identifier[username] identifier[overall_result_per_user] ={} keyword[for] identifier[key] , identifier[val] keyword[in] identifier[result_per_user] . identifier[items] (): identifier[total] = literal[int] identifier[solved] = literal[int] keyword[for] identifier[taskid] , identifier[coef] keyword[in] identifier[scoreboard_content] . identifier[items] (): keyword[if] identifier[taskid] keyword[in] identifier[val] : identifier[total] += identifier[val] [ identifier[taskid] ]* identifier[coef] identifier[solved] += literal[int] identifier[overall_result_per_user] [ identifier[key] ]={ literal[string] : identifier[total] , literal[string] : identifier[solved] } identifier[sorted_users] = identifier[list] ( identifier[overall_result_per_user] . identifier[keys] ()) identifier[sorted_users] = identifier[sorted] ( identifier[sorted_users] , identifier[key] = identifier[sort_func] ( identifier[overall_result_per_user] , identifier[scoreboard_reverse] )) identifier[table] =[] keyword[if] identifier[len] ( identifier[scoreboard_content] )== literal[int] : identifier[header] =[ literal[string] , literal[string] , literal[string] ] identifier[emphasized_columns] =[ literal[int] ] keyword[else] : identifier[header] =[ literal[string] , literal[string] , literal[string] , literal[string] ]+[ identifier[task_names] [ identifier[taskid] ] keyword[for] identifier[taskid] keyword[in] identifier[list] ( identifier[scoreboard_content] . identifier[keys] ())] identifier[emphasized_columns] =[ literal[int] , literal[int] ] identifier[old_score] =() identifier[rank] = literal[int] keyword[for] identifier[user] keyword[in] identifier[sorted_users] : identifier[line] =[] keyword[if] identifier[old_score] !=( identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ], identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ]): identifier[rank] += literal[int] identifier[old_score] =( identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ], identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ]) identifier[line] . identifier[append] ( identifier[rank] ) keyword[else] : identifier[line] . identifier[append] ( literal[string] ) identifier[line] . identifier[append] ( literal[string] . identifier[join] ( identifier[sorted] ([ identifier[users_realname] [ identifier[u] ] keyword[for] identifier[u] keyword[in] identifier[user] ]))) keyword[if] identifier[len] ( identifier[scoreboard_content] )== literal[int] : identifier[line] . identifier[append] ( identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ]) keyword[else] : identifier[line] . identifier[append] ( identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ]) identifier[line] . identifier[append] ( identifier[overall_result_per_user] [ identifier[user] ][ literal[string] ]) keyword[for] identifier[taskid] keyword[in] identifier[scoreboard_content] : identifier[line] . identifier[append] ( identifier[result_per_user] [ identifier[user] ]. identifier[get] ( identifier[taskid] , literal[string] )) identifier[table] . identifier[append] ( identifier[line] ) identifier[renderer] = identifier[self] . identifier[template_helper] . identifier[get_custom_renderer] ( literal[string] ) keyword[return] identifier[renderer] . identifier[scoreboard] ( identifier[course] , identifier[scoreboardid] , identifier[scoreboard_name] , identifier[header] , identifier[table] , identifier[emphasized_columns] )
def GET_AUTH(self, courseid, scoreboardid): # pylint: disable=arguments-differ ' GET request ' course = self.course_factory.get_course(courseid) scoreboards = course.get_descriptor().get('scoreboard', []) try: scoreboardid = int(scoreboardid) scoreboard_name = scoreboards[scoreboardid]['name'] scoreboard_content = scoreboards[scoreboardid]['content'] scoreboard_reverse = bool(scoreboards[scoreboardid].get('reverse', False)) # depends on [control=['try'], data=[]] except: raise web.notfound() # depends on [control=['except'], data=[]] # Convert scoreboard_content if isinstance(scoreboard_content, str): scoreboard_content = OrderedDict([(scoreboard_content, 1)]) # depends on [control=['if'], data=[]] if isinstance(scoreboard_content, list): scoreboard_content = OrderedDict([(entry, 1) for entry in scoreboard_content]) # depends on [control=['if'], data=[]] if not isinstance(scoreboard_content, OrderedDict): scoreboard_content = OrderedDict(iter(scoreboard_content.items())) # depends on [control=['if'], data=[]] # Get task names task_names = {} for taskid in scoreboard_content: try: task_names[taskid] = course.get_task(taskid).get_name(self.user_manager.session_language()) # depends on [control=['try'], data=[]] except: raise web.notfound('Unknown task id ' + taskid) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=['taskid']] # Get all submissions results = self.database.submissions.find({'courseid': courseid, 'taskid': {'$in': list(scoreboard_content.keys())}, 'custom.score': {'$exists': True}, 'result': 'success'}, ['taskid', 'username', 'custom.score']) # Get best results per users(/group) result_per_user = {} users = set() for submission in results: # Be sure we have a list if not isinstance(submission['username'], list): submission['username'] = [submission['username']] # depends on [control=['if'], data=[]] submission['username'] = tuple(submission['username']) if submission['username'] not in result_per_user: result_per_user[submission['username']] = {} # depends on [control=['if'], data=['result_per_user']] if submission['taskid'] not in result_per_user[submission['username']]: result_per_user[submission['username']][submission['taskid']] = submission['custom']['score'] # depends on [control=['if'], data=[]] else: # keep best score current_score = result_per_user[submission['username']][submission['taskid']] new_score = submission['custom']['score'] task_reversed = scoreboard_reverse != (scoreboard_content[submission['taskid']] < 0) if task_reversed and current_score > new_score: result_per_user[submission['username']][submission['taskid']] = new_score # depends on [control=['if'], data=[]] elif not task_reversed and current_score < new_score: result_per_user[submission['username']][submission['taskid']] = new_score # depends on [control=['if'], data=[]] for user in submission['username']: users.add(user) # depends on [control=['for'], data=['user']] # depends on [control=['for'], data=['submission']] # Get user names users_realname = {} for (username, userinfo) in self.user_manager.get_users_info(list(users)).items(): users_realname[username] = userinfo[0] if userinfo else username # depends on [control=['for'], data=[]] # Compute overall result per user, and sort them overall_result_per_user = {} for (key, val) in result_per_user.items(): total = 0 solved = 0 for (taskid, coef) in scoreboard_content.items(): if taskid in val: total += val[taskid] * coef solved += 1 # depends on [control=['if'], data=['taskid', 'val']] # depends on [control=['for'], data=[]] overall_result_per_user[key] = {'total': total, 'solved': solved} # depends on [control=['for'], data=[]] sorted_users = list(overall_result_per_user.keys()) sorted_users = sorted(sorted_users, key=sort_func(overall_result_per_user, scoreboard_reverse)) # Compute table table = [] # Header if len(scoreboard_content) == 1: header = ['', 'Student(s)', 'Score'] emphasized_columns = [2] # depends on [control=['if'], data=[]] else: header = ['', 'Student(s)', 'Solved', 'Total score'] + [task_names[taskid] for taskid in list(scoreboard_content.keys())] emphasized_columns = [2, 3] # Lines old_score = () rank = 0 for user in sorted_users: # Increment rank if needed, and display it line = [] if old_score != (overall_result_per_user[user]['solved'], overall_result_per_user[user]['total']): rank += 1 old_score = (overall_result_per_user[user]['solved'], overall_result_per_user[user]['total']) line.append(rank) # depends on [control=['if'], data=['old_score']] else: line.append('') # Users line.append(','.join(sorted([users_realname[u] for u in user]))) if len(scoreboard_content) == 1: line.append(overall_result_per_user[user]['total']) # depends on [control=['if'], data=[]] else: line.append(overall_result_per_user[user]['solved']) line.append(overall_result_per_user[user]['total']) for taskid in scoreboard_content: line.append(result_per_user[user].get(taskid, '')) # depends on [control=['for'], data=['taskid']] table.append(line) # depends on [control=['for'], data=['user']] renderer = self.template_helper.get_custom_renderer('frontend/plugins/scoreboard') return renderer.scoreboard(course, scoreboardid, scoreboard_name, header, table, emphasized_columns)
def add_traces(self): """Add traces based on self.data.""" y_distance = self.parent.value('y_distance') self.chan = [] self.chan_pos = [] self.chan_scale = [] row = 0 for one_grp in self.parent.channels.groups: for one_chan in one_grp['chan_to_plot']: # channel name chan_name = one_chan + ' (' + one_grp['name'] + ')' # trace dat = (self.data(trial=0, chan=chan_name) * self.parent.value('y_scale')) dat *= -1 # flip data, upside down path = self.scene.addPath(Path(self.data.axis['time'][0], dat)) path.setPen(QPen(QColor(one_grp['color']), LINE_WIDTH)) # adjust position chan_pos = y_distance * row + y_distance / 2 path.setPos(0, chan_pos) row += 1 self.chan.append(chan_name) self.chan_scale.append(one_grp['scale']) self.chan_pos.append(chan_pos)
def function[add_traces, parameter[self]]: constant[Add traces based on self.data.] variable[y_distance] assign[=] call[name[self].parent.value, parameter[constant[y_distance]]] name[self].chan assign[=] list[[]] name[self].chan_pos assign[=] list[[]] name[self].chan_scale assign[=] list[[]] variable[row] assign[=] constant[0] for taget[name[one_grp]] in starred[name[self].parent.channels.groups] begin[:] for taget[name[one_chan]] in starred[call[name[one_grp]][constant[chan_to_plot]]] begin[:] variable[chan_name] assign[=] binary_operation[binary_operation[binary_operation[name[one_chan] + constant[ (]] + call[name[one_grp]][constant[name]]] + constant[)]] variable[dat] assign[=] binary_operation[call[name[self].data, parameter[]] * call[name[self].parent.value, parameter[constant[y_scale]]]] <ast.AugAssign object at 0x7da1b0e07430> variable[path] assign[=] call[name[self].scene.addPath, parameter[call[name[Path], parameter[call[call[name[self].data.axis][constant[time]]][constant[0]], name[dat]]]]] call[name[path].setPen, parameter[call[name[QPen], parameter[call[name[QColor], parameter[call[name[one_grp]][constant[color]]]], name[LINE_WIDTH]]]]] variable[chan_pos] assign[=] binary_operation[binary_operation[name[y_distance] * name[row]] + binary_operation[name[y_distance] / constant[2]]] call[name[path].setPos, parameter[constant[0], name[chan_pos]]] <ast.AugAssign object at 0x7da18f00e6b0> call[name[self].chan.append, parameter[name[chan_name]]] call[name[self].chan_scale.append, parameter[call[name[one_grp]][constant[scale]]]] call[name[self].chan_pos.append, parameter[name[chan_pos]]]
keyword[def] identifier[add_traces] ( identifier[self] ): literal[string] identifier[y_distance] = identifier[self] . identifier[parent] . identifier[value] ( literal[string] ) identifier[self] . identifier[chan] =[] identifier[self] . identifier[chan_pos] =[] identifier[self] . identifier[chan_scale] =[] identifier[row] = literal[int] keyword[for] identifier[one_grp] keyword[in] identifier[self] . identifier[parent] . identifier[channels] . identifier[groups] : keyword[for] identifier[one_chan] keyword[in] identifier[one_grp] [ literal[string] ]: identifier[chan_name] = identifier[one_chan] + literal[string] + identifier[one_grp] [ literal[string] ]+ literal[string] identifier[dat] =( identifier[self] . identifier[data] ( identifier[trial] = literal[int] , identifier[chan] = identifier[chan_name] )* identifier[self] . identifier[parent] . identifier[value] ( literal[string] )) identifier[dat] *=- literal[int] identifier[path] = identifier[self] . identifier[scene] . identifier[addPath] ( identifier[Path] ( identifier[self] . identifier[data] . identifier[axis] [ literal[string] ][ literal[int] ], identifier[dat] )) identifier[path] . identifier[setPen] ( identifier[QPen] ( identifier[QColor] ( identifier[one_grp] [ literal[string] ]), identifier[LINE_WIDTH] )) identifier[chan_pos] = identifier[y_distance] * identifier[row] + identifier[y_distance] / literal[int] identifier[path] . identifier[setPos] ( literal[int] , identifier[chan_pos] ) identifier[row] += literal[int] identifier[self] . identifier[chan] . identifier[append] ( identifier[chan_name] ) identifier[self] . identifier[chan_scale] . identifier[append] ( identifier[one_grp] [ literal[string] ]) identifier[self] . identifier[chan_pos] . identifier[append] ( identifier[chan_pos] )
def add_traces(self): """Add traces based on self.data.""" y_distance = self.parent.value('y_distance') self.chan = [] self.chan_pos = [] self.chan_scale = [] row = 0 for one_grp in self.parent.channels.groups: for one_chan in one_grp['chan_to_plot']: # channel name chan_name = one_chan + ' (' + one_grp['name'] + ')' # trace dat = self.data(trial=0, chan=chan_name) * self.parent.value('y_scale') dat *= -1 # flip data, upside down path = self.scene.addPath(Path(self.data.axis['time'][0], dat)) path.setPen(QPen(QColor(one_grp['color']), LINE_WIDTH)) # adjust position chan_pos = y_distance * row + y_distance / 2 path.setPos(0, chan_pos) row += 1 self.chan.append(chan_name) self.chan_scale.append(one_grp['scale']) self.chan_pos.append(chan_pos) # depends on [control=['for'], data=['one_chan']] # depends on [control=['for'], data=['one_grp']]
def SETNZ(cpu, dest): """ Sets byte if not zero. :param cpu: current CPU. :param dest: destination operand. """ dest.write(Operators.ITEBV(dest.size, cpu.ZF == False, 1, 0))
def function[SETNZ, parameter[cpu, dest]]: constant[ Sets byte if not zero. :param cpu: current CPU. :param dest: destination operand. ] call[name[dest].write, parameter[call[name[Operators].ITEBV, parameter[name[dest].size, compare[name[cpu].ZF equal[==] constant[False]], constant[1], constant[0]]]]]
keyword[def] identifier[SETNZ] ( identifier[cpu] , identifier[dest] ): literal[string] identifier[dest] . identifier[write] ( identifier[Operators] . identifier[ITEBV] ( identifier[dest] . identifier[size] , identifier[cpu] . identifier[ZF] == keyword[False] , literal[int] , literal[int] ))
def SETNZ(cpu, dest): """ Sets byte if not zero. :param cpu: current CPU. :param dest: destination operand. """ dest.write(Operators.ITEBV(dest.size, cpu.ZF == False, 1, 0))
def changelog(): # type: () -> str """ Print change log since last release. """ # Skip 'v' prefix versions = [x for x in git.tags() if versioning.is_valid(x[1:])] cmd = 'git log --format=%H' if versions: cmd += ' {}..HEAD'.format(versions[-1]) hashes = shell.run(cmd, capture=True).stdout.strip().splitlines() commits = [git.CommitDetails.get(h) for h in hashes] tags = conf.get('changelog.tags', [ {'header': 'Features', 'tag': 'feature'}, {'header': 'Changes', 'tag': 'change'}, {'header': 'Fixes', 'tag': 'fix'}, ]) results = OrderedDict(( (x['header'], []) for x in tags )) for commit in commits: commit_items = extract_changelog_items(commit.desc, tags) for header, items in commit_items.items(): results[header] += items lines = [ '<35>v{}<0>'.format(versioning.current()), '', ] for header, items in results.items(): if items: lines += [ '', '<32>{}<0>'.format(header), '<32>{}<0>'.format('-' * len(header)), '', ] for item_text in items: item_lines = textwrap.wrap(item_text, 77) lines += ['- {}'.format('\n '.join(item_lines))] lines += [''] return '\n'.join(lines)
def function[changelog, parameter[]]: constant[ Print change log since last release. ] variable[versions] assign[=] <ast.ListComp object at 0x7da1b1052a70> variable[cmd] assign[=] constant[git log --format=%H] if name[versions] begin[:] <ast.AugAssign object at 0x7da1b1052200> variable[hashes] assign[=] call[call[call[name[shell].run, parameter[name[cmd]]].stdout.strip, parameter[]].splitlines, parameter[]] variable[commits] assign[=] <ast.ListComp object at 0x7da1b1052cb0> variable[tags] assign[=] call[name[conf].get, parameter[constant[changelog.tags], list[[<ast.Dict object at 0x7da1b1051720>, <ast.Dict object at 0x7da1b1052530>, <ast.Dict object at 0x7da1b1050310>]]]] variable[results] assign[=] call[name[OrderedDict], parameter[<ast.GeneratorExp object at 0x7da1b1051e10>]] for taget[name[commit]] in starred[name[commits]] begin[:] variable[commit_items] assign[=] call[name[extract_changelog_items], parameter[name[commit].desc, name[tags]]] for taget[tuple[[<ast.Name object at 0x7da1b1050130>, <ast.Name object at 0x7da1b10508b0>]]] in starred[call[name[commit_items].items, parameter[]]] begin[:] <ast.AugAssign object at 0x7da1b10502e0> variable[lines] assign[=] list[[<ast.Call object at 0x7da1b1050850>, <ast.Constant object at 0x7da1b10508e0>]] for taget[tuple[[<ast.Name object at 0x7da1b1050b50>, <ast.Name object at 0x7da1b1052710>]]] in starred[call[name[results].items, parameter[]]] begin[:] if name[items] begin[:] <ast.AugAssign object at 0x7da1b1052c50> for taget[name[item_text]] in starred[name[items]] begin[:] variable[item_lines] assign[=] call[name[textwrap].wrap, parameter[name[item_text], constant[77]]] <ast.AugAssign object at 0x7da1b1051270> <ast.AugAssign object at 0x7da1b10d7160> return[call[constant[ ].join, parameter[name[lines]]]]
keyword[def] identifier[changelog] (): literal[string] identifier[versions] =[ identifier[x] keyword[for] identifier[x] keyword[in] identifier[git] . identifier[tags] () keyword[if] identifier[versioning] . identifier[is_valid] ( identifier[x] [ literal[int] :])] identifier[cmd] = literal[string] keyword[if] identifier[versions] : identifier[cmd] += literal[string] . identifier[format] ( identifier[versions] [- literal[int] ]) identifier[hashes] = identifier[shell] . identifier[run] ( identifier[cmd] , identifier[capture] = keyword[True] ). identifier[stdout] . identifier[strip] (). identifier[splitlines] () identifier[commits] =[ identifier[git] . identifier[CommitDetails] . identifier[get] ( identifier[h] ) keyword[for] identifier[h] keyword[in] identifier[hashes] ] identifier[tags] = identifier[conf] . identifier[get] ( literal[string] ,[ { literal[string] : literal[string] , literal[string] : literal[string] }, { literal[string] : literal[string] , literal[string] : literal[string] }, { literal[string] : literal[string] , literal[string] : literal[string] }, ]) identifier[results] = identifier[OrderedDict] (( ( identifier[x] [ literal[string] ],[]) keyword[for] identifier[x] keyword[in] identifier[tags] )) keyword[for] identifier[commit] keyword[in] identifier[commits] : identifier[commit_items] = identifier[extract_changelog_items] ( identifier[commit] . identifier[desc] , identifier[tags] ) keyword[for] identifier[header] , identifier[items] keyword[in] identifier[commit_items] . identifier[items] (): identifier[results] [ identifier[header] ]+= identifier[items] identifier[lines] =[ literal[string] . identifier[format] ( identifier[versioning] . identifier[current] ()), literal[string] , ] keyword[for] identifier[header] , identifier[items] keyword[in] identifier[results] . identifier[items] (): keyword[if] identifier[items] : identifier[lines] +=[ literal[string] , literal[string] . identifier[format] ( identifier[header] ), literal[string] . identifier[format] ( literal[string] * identifier[len] ( identifier[header] )), literal[string] , ] keyword[for] identifier[item_text] keyword[in] identifier[items] : identifier[item_lines] = identifier[textwrap] . identifier[wrap] ( identifier[item_text] , literal[int] ) identifier[lines] +=[ literal[string] . identifier[format] ( literal[string] . identifier[join] ( identifier[item_lines] ))] identifier[lines] +=[ literal[string] ] keyword[return] literal[string] . identifier[join] ( identifier[lines] )
def changelog(): # type: () -> str ' Print change log since last release. ' # Skip 'v' prefix versions = [x for x in git.tags() if versioning.is_valid(x[1:])] cmd = 'git log --format=%H' if versions: cmd += ' {}..HEAD'.format(versions[-1]) # depends on [control=['if'], data=[]] hashes = shell.run(cmd, capture=True).stdout.strip().splitlines() commits = [git.CommitDetails.get(h) for h in hashes] tags = conf.get('changelog.tags', [{'header': 'Features', 'tag': 'feature'}, {'header': 'Changes', 'tag': 'change'}, {'header': 'Fixes', 'tag': 'fix'}]) results = OrderedDict(((x['header'], []) for x in tags)) for commit in commits: commit_items = extract_changelog_items(commit.desc, tags) for (header, items) in commit_items.items(): results[header] += items # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['commit']] lines = ['<35>v{}<0>'.format(versioning.current()), ''] for (header, items) in results.items(): if items: lines += ['', '<32>{}<0>'.format(header), '<32>{}<0>'.format('-' * len(header)), ''] for item_text in items: item_lines = textwrap.wrap(item_text, 77) lines += ['- {}'.format('\n '.join(item_lines))] # depends on [control=['for'], data=['item_text']] lines += [''] # depends on [control=['if'], data=[]] # depends on [control=['for'], data=[]] return '\n'.join(lines)
def extract_names(sender): """Tries to extract sender's names from `From:` header. It could extract not only the actual names but e.g. the name of the company, parts of email, etc. >>> extract_names('Sergey N. Obukhov <[email protected]>') ['Sergey', 'Obukhov', 'serobnic'] >>> extract_names('') [] """ sender = to_unicode(sender, precise=True) # Remove non-alphabetical characters sender = "".join([char if char.isalpha() else ' ' for char in sender]) # Remove too short words and words from "black" list i.e. # words like `ru`, `gmail`, `com`, `org`, etc. sender = [word for word in sender.split() if len(word) > 1 and not word in BAD_SENDER_NAMES] # Remove duplicates names = list(set(sender)) return names
def function[extract_names, parameter[sender]]: constant[Tries to extract sender's names from `From:` header. It could extract not only the actual names but e.g. the name of the company, parts of email, etc. >>> extract_names('Sergey N. Obukhov <[email protected]>') ['Sergey', 'Obukhov', 'serobnic'] >>> extract_names('') [] ] variable[sender] assign[=] call[name[to_unicode], parameter[name[sender]]] variable[sender] assign[=] call[constant[].join, parameter[<ast.ListComp object at 0x7da1b1e67f10>]] variable[sender] assign[=] <ast.ListComp object at 0x7da1b1e67c70> variable[names] assign[=] call[name[list], parameter[call[name[set], parameter[name[sender]]]]] return[name[names]]
keyword[def] identifier[extract_names] ( identifier[sender] ): literal[string] identifier[sender] = identifier[to_unicode] ( identifier[sender] , identifier[precise] = keyword[True] ) identifier[sender] = literal[string] . identifier[join] ([ identifier[char] keyword[if] identifier[char] . identifier[isalpha] () keyword[else] literal[string] keyword[for] identifier[char] keyword[in] identifier[sender] ]) identifier[sender] =[ identifier[word] keyword[for] identifier[word] keyword[in] identifier[sender] . identifier[split] () keyword[if] identifier[len] ( identifier[word] )> literal[int] keyword[and] keyword[not] identifier[word] keyword[in] identifier[BAD_SENDER_NAMES] ] identifier[names] = identifier[list] ( identifier[set] ( identifier[sender] )) keyword[return] identifier[names]
def extract_names(sender): """Tries to extract sender's names from `From:` header. It could extract not only the actual names but e.g. the name of the company, parts of email, etc. >>> extract_names('Sergey N. Obukhov <[email protected]>') ['Sergey', 'Obukhov', 'serobnic'] >>> extract_names('') [] """ sender = to_unicode(sender, precise=True) # Remove non-alphabetical characters sender = ''.join([char if char.isalpha() else ' ' for char in sender]) # Remove too short words and words from "black" list i.e. # words like `ru`, `gmail`, `com`, `org`, etc. sender = [word for word in sender.split() if len(word) > 1 and (not word in BAD_SENDER_NAMES)] # Remove duplicates names = list(set(sender)) return names
def process_selectors(self, index=0, flags=0): """ Process selectors. We do our own selectors as BeautifulSoup4 has some annoying quirks, and we don't really need to do nth selectors or siblings or descendants etc. """ return self.parse_selectors(self.selector_iter(self.pattern), index, flags)
def function[process_selectors, parameter[self, index, flags]]: constant[ Process selectors. We do our own selectors as BeautifulSoup4 has some annoying quirks, and we don't really need to do nth selectors or siblings or descendants etc. ] return[call[name[self].parse_selectors, parameter[call[name[self].selector_iter, parameter[name[self].pattern]], name[index], name[flags]]]]
keyword[def] identifier[process_selectors] ( identifier[self] , identifier[index] = literal[int] , identifier[flags] = literal[int] ): literal[string] keyword[return] identifier[self] . identifier[parse_selectors] ( identifier[self] . identifier[selector_iter] ( identifier[self] . identifier[pattern] ), identifier[index] , identifier[flags] )
def process_selectors(self, index=0, flags=0): """ Process selectors. We do our own selectors as BeautifulSoup4 has some annoying quirks, and we don't really need to do nth selectors or siblings or descendants etc. """ return self.parse_selectors(self.selector_iter(self.pattern), index, flags)
def read_dbf(dbf_path, index = None, cols = False, incl_index = False): """ Read a dbf file as a pandas.DataFrame, optionally selecting the index variable and which columns are to be loaded. __author__ = "Dani Arribas-Bel <[email protected]> " ... Arguments --------- dbf_path : str Path to the DBF file to be read index : str Name of the column to be used as the index of the DataFrame cols : list List with the names of the columns to be read into the DataFrame. Defaults to False, which reads the whole dbf incl_index : Boolean If True index is included in the DataFrame as a column too. Defaults to False Returns ------- df : DataFrame pandas.DataFrame object created """ db = ps.open(dbf_path) if cols: if incl_index: cols.append(index) vars_to_read = cols else: vars_to_read = db.header data = dict([(var, db.by_col(var)) for var in vars_to_read]) if index: index = db.by_col(index) db.close() return pd.DataFrame(data, index=index) else: db.close() return pd.DataFrame(data)
def function[read_dbf, parameter[dbf_path, index, cols, incl_index]]: constant[ Read a dbf file as a pandas.DataFrame, optionally selecting the index variable and which columns are to be loaded. __author__ = "Dani Arribas-Bel <[email protected]> " ... Arguments --------- dbf_path : str Path to the DBF file to be read index : str Name of the column to be used as the index of the DataFrame cols : list List with the names of the columns to be read into the DataFrame. Defaults to False, which reads the whole dbf incl_index : Boolean If True index is included in the DataFrame as a column too. Defaults to False Returns ------- df : DataFrame pandas.DataFrame object created ] variable[db] assign[=] call[name[ps].open, parameter[name[dbf_path]]] if name[cols] begin[:] if name[incl_index] begin[:] call[name[cols].append, parameter[name[index]]] variable[vars_to_read] assign[=] name[cols] variable[data] assign[=] call[name[dict], parameter[<ast.ListComp object at 0x7da18c4cf3d0>]] if name[index] begin[:] variable[index] assign[=] call[name[db].by_col, parameter[name[index]]] call[name[db].close, parameter[]] return[call[name[pd].DataFrame, parameter[name[data]]]]
keyword[def] identifier[read_dbf] ( identifier[dbf_path] , identifier[index] = keyword[None] , identifier[cols] = keyword[False] , identifier[incl_index] = keyword[False] ): literal[string] identifier[db] = identifier[ps] . identifier[open] ( identifier[dbf_path] ) keyword[if] identifier[cols] : keyword[if] identifier[incl_index] : identifier[cols] . identifier[append] ( identifier[index] ) identifier[vars_to_read] = identifier[cols] keyword[else] : identifier[vars_to_read] = identifier[db] . identifier[header] identifier[data] = identifier[dict] ([( identifier[var] , identifier[db] . identifier[by_col] ( identifier[var] )) keyword[for] identifier[var] keyword[in] identifier[vars_to_read] ]) keyword[if] identifier[index] : identifier[index] = identifier[db] . identifier[by_col] ( identifier[index] ) identifier[db] . identifier[close] () keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[data] , identifier[index] = identifier[index] ) keyword[else] : identifier[db] . identifier[close] () keyword[return] identifier[pd] . identifier[DataFrame] ( identifier[data] )
def read_dbf(dbf_path, index=None, cols=False, incl_index=False): """ Read a dbf file as a pandas.DataFrame, optionally selecting the index variable and which columns are to be loaded. __author__ = "Dani Arribas-Bel <[email protected]> " ... Arguments --------- dbf_path : str Path to the DBF file to be read index : str Name of the column to be used as the index of the DataFrame cols : list List with the names of the columns to be read into the DataFrame. Defaults to False, which reads the whole dbf incl_index : Boolean If True index is included in the DataFrame as a column too. Defaults to False Returns ------- df : DataFrame pandas.DataFrame object created """ db = ps.open(dbf_path) if cols: if incl_index: cols.append(index) # depends on [control=['if'], data=[]] vars_to_read = cols # depends on [control=['if'], data=[]] else: vars_to_read = db.header data = dict([(var, db.by_col(var)) for var in vars_to_read]) if index: index = db.by_col(index) db.close() return pd.DataFrame(data, index=index) # depends on [control=['if'], data=[]] else: db.close() return pd.DataFrame(data)
def is_zip_file(models): r''' Ensure that a path is a zip file by: - checking length is 1 - checking extension is '.zip' ''' ext = os.path.splitext(models[0])[1] return (len(models) == 1) and (ext == '.zip')
def function[is_zip_file, parameter[models]]: constant[ Ensure that a path is a zip file by: - checking length is 1 - checking extension is '.zip' ] variable[ext] assign[=] call[call[name[os].path.splitext, parameter[call[name[models]][constant[0]]]]][constant[1]] return[<ast.BoolOp object at 0x7da1b20fb700>]
keyword[def] identifier[is_zip_file] ( identifier[models] ): literal[string] identifier[ext] = identifier[os] . identifier[path] . identifier[splitext] ( identifier[models] [ literal[int] ])[ literal[int] ] keyword[return] ( identifier[len] ( identifier[models] )== literal[int] ) keyword[and] ( identifier[ext] == literal[string] )
def is_zip_file(models): """ Ensure that a path is a zip file by: - checking length is 1 - checking extension is '.zip' """ ext = os.path.splitext(models[0])[1] return len(models) == 1 and ext == '.zip'
def trapz_loglog(y, x, axis=-1, intervals=False): """ Integrate along the given axis using the composite trapezoidal rule in loglog space. Integrate `y` (`x`) along given axis in loglog space. Parameters ---------- y : array_like Input array to integrate. x : array_like, optional Independent variable to integrate over. axis : int, optional Specify the axis. Returns ------- trapz : float Definite integral as approximated by trapezoidal rule in loglog space. """ try: y_unit = y.unit y = y.value except AttributeError: y_unit = 1.0 try: x_unit = x.unit x = x.value except AttributeError: x_unit = 1.0 y = np.asanyarray(y) x = np.asanyarray(x) slice1 = [slice(None)] * y.ndim slice2 = [slice(None)] * y.ndim slice1[axis] = slice(None, -1) slice2[axis] = slice(1, None) slice1 = tuple(slice1) slice2 = tuple(slice2) if x.ndim == 1: shape = [1] * y.ndim shape[axis] = x.shape[0] x = x.reshape(shape) with warnings.catch_warnings(): warnings.simplefilter("ignore") # Compute the power law indices in each integration bin b = np.log10(y[slice2] / y[slice1]) / np.log10(x[slice2] / x[slice1]) # if local powerlaw index is -1, use \int 1/x = log(x); otherwise use # normal powerlaw integration trapzs = np.where( np.abs(b + 1.0) > 1e-10, ( y[slice1] * (x[slice2] * (x[slice2] / x[slice1]) ** b - x[slice1]) ) / (b + 1), x[slice1] * y[slice1] * np.log(x[slice2] / x[slice1]), ) tozero = (y[slice1] == 0.0) + (y[slice2] == 0.0) + (x[slice1] == x[slice2]) trapzs[tozero] = 0.0 if intervals: return trapzs * x_unit * y_unit ret = np.add.reduce(trapzs, axis) * x_unit * y_unit return ret
def function[trapz_loglog, parameter[y, x, axis, intervals]]: constant[ Integrate along the given axis using the composite trapezoidal rule in loglog space. Integrate `y` (`x`) along given axis in loglog space. Parameters ---------- y : array_like Input array to integrate. x : array_like, optional Independent variable to integrate over. axis : int, optional Specify the axis. Returns ------- trapz : float Definite integral as approximated by trapezoidal rule in loglog space. ] <ast.Try object at 0x7da1b0cbb670> <ast.Try object at 0x7da1b0cbb220> variable[y] assign[=] call[name[np].asanyarray, parameter[name[y]]] variable[x] assign[=] call[name[np].asanyarray, parameter[name[x]]] variable[slice1] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0ca7610>]] * name[y].ndim] variable[slice2] assign[=] binary_operation[list[[<ast.Call object at 0x7da1b0ca4610>]] * name[y].ndim] call[name[slice1]][name[axis]] assign[=] call[name[slice], parameter[constant[None], <ast.UnaryOp object at 0x7da1b0ca78e0>]] call[name[slice2]][name[axis]] assign[=] call[name[slice], parameter[constant[1], constant[None]]] variable[slice1] assign[=] call[name[tuple], parameter[name[slice1]]] variable[slice2] assign[=] call[name[tuple], parameter[name[slice2]]] if compare[name[x].ndim equal[==] constant[1]] begin[:] variable[shape] assign[=] binary_operation[list[[<ast.Constant object at 0x7da1b0ca5090>]] * name[y].ndim] call[name[shape]][name[axis]] assign[=] call[name[x].shape][constant[0]] variable[x] assign[=] call[name[x].reshape, parameter[name[shape]]] with call[name[warnings].catch_warnings, parameter[]] begin[:] call[name[warnings].simplefilter, parameter[constant[ignore]]] variable[b] assign[=] binary_operation[call[name[np].log10, parameter[binary_operation[call[name[y]][name[slice2]] / call[name[y]][name[slice1]]]]] / call[name[np].log10, parameter[binary_operation[call[name[x]][name[slice2]] / call[name[x]][name[slice1]]]]]] variable[trapzs] assign[=] call[name[np].where, parameter[compare[call[name[np].abs, parameter[binary_operation[name[b] + constant[1.0]]]] greater[>] constant[1e-10]], binary_operation[binary_operation[call[name[y]][name[slice1]] * binary_operation[binary_operation[call[name[x]][name[slice2]] * binary_operation[binary_operation[call[name[x]][name[slice2]] / call[name[x]][name[slice1]]] ** name[b]]] - call[name[x]][name[slice1]]]] / binary_operation[name[b] + constant[1]]], binary_operation[binary_operation[call[name[x]][name[slice1]] * call[name[y]][name[slice1]]] * call[name[np].log, parameter[binary_operation[call[name[x]][name[slice2]] / call[name[x]][name[slice1]]]]]]]] variable[tozero] assign[=] binary_operation[binary_operation[compare[call[name[y]][name[slice1]] equal[==] constant[0.0]] + compare[call[name[y]][name[slice2]] equal[==] constant[0.0]]] + compare[call[name[x]][name[slice1]] equal[==] call[name[x]][name[slice2]]]] call[name[trapzs]][name[tozero]] assign[=] constant[0.0] if name[intervals] begin[:] return[binary_operation[binary_operation[name[trapzs] * name[x_unit]] * name[y_unit]]] variable[ret] assign[=] binary_operation[binary_operation[call[name[np].add.reduce, parameter[name[trapzs], name[axis]]] * name[x_unit]] * name[y_unit]] return[name[ret]]
keyword[def] identifier[trapz_loglog] ( identifier[y] , identifier[x] , identifier[axis] =- literal[int] , identifier[intervals] = keyword[False] ): literal[string] keyword[try] : identifier[y_unit] = identifier[y] . identifier[unit] identifier[y] = identifier[y] . identifier[value] keyword[except] identifier[AttributeError] : identifier[y_unit] = literal[int] keyword[try] : identifier[x_unit] = identifier[x] . identifier[unit] identifier[x] = identifier[x] . identifier[value] keyword[except] identifier[AttributeError] : identifier[x_unit] = literal[int] identifier[y] = identifier[np] . identifier[asanyarray] ( identifier[y] ) identifier[x] = identifier[np] . identifier[asanyarray] ( identifier[x] ) identifier[slice1] =[ identifier[slice] ( keyword[None] )]* identifier[y] . identifier[ndim] identifier[slice2] =[ identifier[slice] ( keyword[None] )]* identifier[y] . identifier[ndim] identifier[slice1] [ identifier[axis] ]= identifier[slice] ( keyword[None] ,- literal[int] ) identifier[slice2] [ identifier[axis] ]= identifier[slice] ( literal[int] , keyword[None] ) identifier[slice1] = identifier[tuple] ( identifier[slice1] ) identifier[slice2] = identifier[tuple] ( identifier[slice2] ) keyword[if] identifier[x] . identifier[ndim] == literal[int] : identifier[shape] =[ literal[int] ]* identifier[y] . identifier[ndim] identifier[shape] [ identifier[axis] ]= identifier[x] . identifier[shape] [ literal[int] ] identifier[x] = identifier[x] . identifier[reshape] ( identifier[shape] ) keyword[with] identifier[warnings] . identifier[catch_warnings] (): identifier[warnings] . identifier[simplefilter] ( literal[string] ) identifier[b] = identifier[np] . identifier[log10] ( identifier[y] [ identifier[slice2] ]/ identifier[y] [ identifier[slice1] ])/ identifier[np] . identifier[log10] ( identifier[x] [ identifier[slice2] ]/ identifier[x] [ identifier[slice1] ]) identifier[trapzs] = identifier[np] . identifier[where] ( identifier[np] . identifier[abs] ( identifier[b] + literal[int] )> literal[int] , ( identifier[y] [ identifier[slice1] ] *( identifier[x] [ identifier[slice2] ]*( identifier[x] [ identifier[slice2] ]/ identifier[x] [ identifier[slice1] ])** identifier[b] - identifier[x] [ identifier[slice1] ]) ) /( identifier[b] + literal[int] ), identifier[x] [ identifier[slice1] ]* identifier[y] [ identifier[slice1] ]* identifier[np] . identifier[log] ( identifier[x] [ identifier[slice2] ]/ identifier[x] [ identifier[slice1] ]), ) identifier[tozero] =( identifier[y] [ identifier[slice1] ]== literal[int] )+( identifier[y] [ identifier[slice2] ]== literal[int] )+( identifier[x] [ identifier[slice1] ]== identifier[x] [ identifier[slice2] ]) identifier[trapzs] [ identifier[tozero] ]= literal[int] keyword[if] identifier[intervals] : keyword[return] identifier[trapzs] * identifier[x_unit] * identifier[y_unit] identifier[ret] = identifier[np] . identifier[add] . identifier[reduce] ( identifier[trapzs] , identifier[axis] )* identifier[x_unit] * identifier[y_unit] keyword[return] identifier[ret]
def trapz_loglog(y, x, axis=-1, intervals=False): """ Integrate along the given axis using the composite trapezoidal rule in loglog space. Integrate `y` (`x`) along given axis in loglog space. Parameters ---------- y : array_like Input array to integrate. x : array_like, optional Independent variable to integrate over. axis : int, optional Specify the axis. Returns ------- trapz : float Definite integral as approximated by trapezoidal rule in loglog space. """ try: y_unit = y.unit y = y.value # depends on [control=['try'], data=[]] except AttributeError: y_unit = 1.0 # depends on [control=['except'], data=[]] try: x_unit = x.unit x = x.value # depends on [control=['try'], data=[]] except AttributeError: x_unit = 1.0 # depends on [control=['except'], data=[]] y = np.asanyarray(y) x = np.asanyarray(x) slice1 = [slice(None)] * y.ndim slice2 = [slice(None)] * y.ndim slice1[axis] = slice(None, -1) slice2[axis] = slice(1, None) slice1 = tuple(slice1) slice2 = tuple(slice2) if x.ndim == 1: shape = [1] * y.ndim shape[axis] = x.shape[0] x = x.reshape(shape) # depends on [control=['if'], data=[]] with warnings.catch_warnings(): warnings.simplefilter('ignore') # Compute the power law indices in each integration bin b = np.log10(y[slice2] / y[slice1]) / np.log10(x[slice2] / x[slice1]) # if local powerlaw index is -1, use \int 1/x = log(x); otherwise use # normal powerlaw integration trapzs = np.where(np.abs(b + 1.0) > 1e-10, y[slice1] * (x[slice2] * (x[slice2] / x[slice1]) ** b - x[slice1]) / (b + 1), x[slice1] * y[slice1] * np.log(x[slice2] / x[slice1])) # depends on [control=['with'], data=[]] tozero = (y[slice1] == 0.0) + (y[slice2] == 0.0) + (x[slice1] == x[slice2]) trapzs[tozero] = 0.0 if intervals: return trapzs * x_unit * y_unit # depends on [control=['if'], data=[]] ret = np.add.reduce(trapzs, axis) * x_unit * y_unit return ret
def get_status(self, channel=Channel.CHANNEL_CH0): """ Returns the error status of a specific CAN channel. :param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`). :return: Tuple with CAN and USB status (see structure :class:`Status`). :rtype: tuple(int, int) """ status = Status() UcanGetStatusEx(self._handle, channel, byref(status)) return status.can_status, status.usb_status
def function[get_status, parameter[self, channel]]: constant[ Returns the error status of a specific CAN channel. :param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`). :return: Tuple with CAN and USB status (see structure :class:`Status`). :rtype: tuple(int, int) ] variable[status] assign[=] call[name[Status], parameter[]] call[name[UcanGetStatusEx], parameter[name[self]._handle, name[channel], call[name[byref], parameter[name[status]]]]] return[tuple[[<ast.Attribute object at 0x7da1b1be95a0>, <ast.Attribute object at 0x7da1b1c589d0>]]]
keyword[def] identifier[get_status] ( identifier[self] , identifier[channel] = identifier[Channel] . identifier[CHANNEL_CH0] ): literal[string] identifier[status] = identifier[Status] () identifier[UcanGetStatusEx] ( identifier[self] . identifier[_handle] , identifier[channel] , identifier[byref] ( identifier[status] )) keyword[return] identifier[status] . identifier[can_status] , identifier[status] . identifier[usb_status]
def get_status(self, channel=Channel.CHANNEL_CH0): """ Returns the error status of a specific CAN channel. :param int channel: CAN channel, to be used (:data:`Channel.CHANNEL_CH0` or :data:`Channel.CHANNEL_CH1`). :return: Tuple with CAN and USB status (see structure :class:`Status`). :rtype: tuple(int, int) """ status = Status() UcanGetStatusEx(self._handle, channel, byref(status)) return (status.can_status, status.usb_status)
def check_sim_out(self): '''check if we should send new servos to flightgear''' now = time.time() if now - self.last_sim_send_time < 0.02 or self.rc_channels_scaled is None: return self.last_sim_send_time = now servos = [] for ch in range(1,9): servos.append(self.scale_channel(ch, getattr(self.rc_channels_scaled, 'chan%u_scaled' % ch))) servos.extend([0,0,0, 0,0,0]) buf = struct.pack('<14H', *servos) try: self.sim_out.send(buf) except socket.error as e: if not e.errno in [ errno.ECONNREFUSED ]: raise return
def function[check_sim_out, parameter[self]]: constant[check if we should send new servos to flightgear] variable[now] assign[=] call[name[time].time, parameter[]] if <ast.BoolOp object at 0x7da1b16d31c0> begin[:] return[None] name[self].last_sim_send_time assign[=] name[now] variable[servos] assign[=] list[[]] for taget[name[ch]] in starred[call[name[range], parameter[constant[1], constant[9]]]] begin[:] call[name[servos].append, parameter[call[name[self].scale_channel, parameter[name[ch], call[name[getattr], parameter[name[self].rc_channels_scaled, binary_operation[constant[chan%u_scaled] <ast.Mod object at 0x7da2590d6920> name[ch]]]]]]]] call[name[servos].extend, parameter[list[[<ast.Constant object at 0x7da1b17213c0>, <ast.Constant object at 0x7da1b1722e00>, <ast.Constant object at 0x7da1b1723730>, <ast.Constant object at 0x7da1b1722e60>, <ast.Constant object at 0x7da1b1721300>, <ast.Constant object at 0x7da1b17221d0>]]]] variable[buf] assign[=] call[name[struct].pack, parameter[constant[<14H], <ast.Starred object at 0x7da1b17225f0>]] <ast.Try object at 0x7da1b1721a20>
keyword[def] identifier[check_sim_out] ( identifier[self] ): literal[string] identifier[now] = identifier[time] . identifier[time] () keyword[if] identifier[now] - identifier[self] . identifier[last_sim_send_time] < literal[int] keyword[or] identifier[self] . identifier[rc_channels_scaled] keyword[is] keyword[None] : keyword[return] identifier[self] . identifier[last_sim_send_time] = identifier[now] identifier[servos] =[] keyword[for] identifier[ch] keyword[in] identifier[range] ( literal[int] , literal[int] ): identifier[servos] . identifier[append] ( identifier[self] . identifier[scale_channel] ( identifier[ch] , identifier[getattr] ( identifier[self] . identifier[rc_channels_scaled] , literal[string] % identifier[ch] ))) identifier[servos] . identifier[extend] ([ literal[int] , literal[int] , literal[int] , literal[int] , literal[int] , literal[int] ]) identifier[buf] = identifier[struct] . identifier[pack] ( literal[string] ,* identifier[servos] ) keyword[try] : identifier[self] . identifier[sim_out] . identifier[send] ( identifier[buf] ) keyword[except] identifier[socket] . identifier[error] keyword[as] identifier[e] : keyword[if] keyword[not] identifier[e] . identifier[errno] keyword[in] [ identifier[errno] . identifier[ECONNREFUSED] ]: keyword[raise] keyword[return]
def check_sim_out(self): """check if we should send new servos to flightgear""" now = time.time() if now - self.last_sim_send_time < 0.02 or self.rc_channels_scaled is None: return # depends on [control=['if'], data=[]] self.last_sim_send_time = now servos = [] for ch in range(1, 9): servos.append(self.scale_channel(ch, getattr(self.rc_channels_scaled, 'chan%u_scaled' % ch))) # depends on [control=['for'], data=['ch']] servos.extend([0, 0, 0, 0, 0, 0]) buf = struct.pack('<14H', *servos) try: self.sim_out.send(buf) # depends on [control=['try'], data=[]] except socket.error as e: if not e.errno in [errno.ECONNREFUSED]: raise # depends on [control=['if'], data=[]] return # depends on [control=['except'], data=['e']]
def _get_device(self): """ Get the device """ try: device = { "name": self._dev.name, "isReachable": self._dev.isReachable, "isTrusted": self._get_isTrusted(), } except Exception: return None return device
def function[_get_device, parameter[self]]: constant[ Get the device ] <ast.Try object at 0x7da18bc72290> return[name[device]]
keyword[def] identifier[_get_device] ( identifier[self] ): literal[string] keyword[try] : identifier[device] ={ literal[string] : identifier[self] . identifier[_dev] . identifier[name] , literal[string] : identifier[self] . identifier[_dev] . identifier[isReachable] , literal[string] : identifier[self] . identifier[_get_isTrusted] (), } keyword[except] identifier[Exception] : keyword[return] keyword[None] keyword[return] identifier[device]
def _get_device(self): """ Get the device """ try: device = {'name': self._dev.name, 'isReachable': self._dev.isReachable, 'isTrusted': self._get_isTrusted()} # depends on [control=['try'], data=[]] except Exception: return None # depends on [control=['except'], data=[]] return device
def notify(cls, user_or_email_, object_id=None, **filters): """Start notifying the given user or email address when this event occurs and meets the criteria given in ``filters``. Return the created (or the existing matching) Watch so you can call :meth:`~tidings.models.Watch.activate()` on it if you're so inclined. Implementations in subclasses may take different arguments; see the docstring of :meth:`is_notifying()`. Send an activation email if an anonymous watch is created and :data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is ``True``. If the activation request fails, raise a ActivationRequestFailed exception. Calling :meth:`notify()` twice for an anonymous user will send the email each time. """ # A test-for-existence-then-create race condition exists here, but it # doesn't matter: de-duplication on fire() and deletion of all matches # on stop_notifying() nullify its effects. try: # Pick 1 if >1 are returned: watch = cls._watches_belonging_to_user( user_or_email_, object_id=object_id, **filters)[0:1].get() except Watch.DoesNotExist: create_kwargs = {} if cls.content_type: create_kwargs['content_type'] = \ ContentType.objects.get_for_model(cls.content_type) create_kwargs['email' if isinstance(user_or_email_, string_types) else 'user'] = user_or_email_ # Letters that can't be mistaken for other letters or numbers in # most fonts, in case people try to type these: distinguishable_letters = \ 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ' secret = ''.join(random.choice(distinguishable_letters) for x in range(10)) # Registered users don't need to confirm, but anonymous users do. is_active = ('user' in create_kwargs or not settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES) if object_id: create_kwargs['object_id'] = object_id watch = Watch.objects.create( secret=secret, is_active=is_active, event_type=cls.event_type, **create_kwargs) for k, v in iteritems(filters): WatchFilter.objects.create(watch=watch, name=k, value=hash_to_unsigned(v)) # Send email for inactive watches. if not watch.is_active: email = watch.user.email if watch.user else watch.email message = cls._activation_email(watch, email) try: message.send() except SMTPException as e: watch.delete() raise ActivationRequestFailed(e.recipients) return watch
def function[notify, parameter[cls, user_or_email_, object_id]]: constant[Start notifying the given user or email address when this event occurs and meets the criteria given in ``filters``. Return the created (or the existing matching) Watch so you can call :meth:`~tidings.models.Watch.activate()` on it if you're so inclined. Implementations in subclasses may take different arguments; see the docstring of :meth:`is_notifying()`. Send an activation email if an anonymous watch is created and :data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is ``True``. If the activation request fails, raise a ActivationRequestFailed exception. Calling :meth:`notify()` twice for an anonymous user will send the email each time. ] <ast.Try object at 0x7da20c6ab430> if <ast.UnaryOp object at 0x7da1b0569a50> begin[:] variable[email] assign[=] <ast.IfExp object at 0x7da1b0569b40> variable[message] assign[=] call[name[cls]._activation_email, parameter[name[watch], name[email]]] <ast.Try object at 0x7da1b0569990> return[name[watch]]
keyword[def] identifier[notify] ( identifier[cls] , identifier[user_or_email_] , identifier[object_id] = keyword[None] ,** identifier[filters] ): literal[string] keyword[try] : identifier[watch] = identifier[cls] . identifier[_watches_belonging_to_user] ( identifier[user_or_email_] , identifier[object_id] = identifier[object_id] , ** identifier[filters] )[ literal[int] : literal[int] ]. identifier[get] () keyword[except] identifier[Watch] . identifier[DoesNotExist] : identifier[create_kwargs] ={} keyword[if] identifier[cls] . identifier[content_type] : identifier[create_kwargs] [ literal[string] ]= identifier[ContentType] . identifier[objects] . identifier[get_for_model] ( identifier[cls] . identifier[content_type] ) identifier[create_kwargs] [ literal[string] keyword[if] identifier[isinstance] ( identifier[user_or_email_] , identifier[string_types] ) keyword[else] literal[string] ]= identifier[user_or_email_] identifier[distinguishable_letters] = literal[string] identifier[secret] = literal[string] . identifier[join] ( identifier[random] . identifier[choice] ( identifier[distinguishable_letters] ) keyword[for] identifier[x] keyword[in] identifier[range] ( literal[int] )) identifier[is_active] =( literal[string] keyword[in] identifier[create_kwargs] keyword[or] keyword[not] identifier[settings] . identifier[TIDINGS_CONFIRM_ANONYMOUS_WATCHES] ) keyword[if] identifier[object_id] : identifier[create_kwargs] [ literal[string] ]= identifier[object_id] identifier[watch] = identifier[Watch] . identifier[objects] . identifier[create] ( identifier[secret] = identifier[secret] , identifier[is_active] = identifier[is_active] , identifier[event_type] = identifier[cls] . identifier[event_type] , ** identifier[create_kwargs] ) keyword[for] identifier[k] , identifier[v] keyword[in] identifier[iteritems] ( identifier[filters] ): identifier[WatchFilter] . identifier[objects] . identifier[create] ( identifier[watch] = identifier[watch] , identifier[name] = identifier[k] , identifier[value] = identifier[hash_to_unsigned] ( identifier[v] )) keyword[if] keyword[not] identifier[watch] . identifier[is_active] : identifier[email] = identifier[watch] . identifier[user] . identifier[email] keyword[if] identifier[watch] . identifier[user] keyword[else] identifier[watch] . identifier[email] identifier[message] = identifier[cls] . identifier[_activation_email] ( identifier[watch] , identifier[email] ) keyword[try] : identifier[message] . identifier[send] () keyword[except] identifier[SMTPException] keyword[as] identifier[e] : identifier[watch] . identifier[delete] () keyword[raise] identifier[ActivationRequestFailed] ( identifier[e] . identifier[recipients] ) keyword[return] identifier[watch]
def notify(cls, user_or_email_, object_id=None, **filters): """Start notifying the given user or email address when this event occurs and meets the criteria given in ``filters``. Return the created (or the existing matching) Watch so you can call :meth:`~tidings.models.Watch.activate()` on it if you're so inclined. Implementations in subclasses may take different arguments; see the docstring of :meth:`is_notifying()`. Send an activation email if an anonymous watch is created and :data:`~django.conf.settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES` is ``True``. If the activation request fails, raise a ActivationRequestFailed exception. Calling :meth:`notify()` twice for an anonymous user will send the email each time. """ # A test-for-existence-then-create race condition exists here, but it # doesn't matter: de-duplication on fire() and deletion of all matches # on stop_notifying() nullify its effects. try: # Pick 1 if >1 are returned: watch = cls._watches_belonging_to_user(user_or_email_, object_id=object_id, **filters)[0:1].get() # depends on [control=['try'], data=[]] except Watch.DoesNotExist: create_kwargs = {} if cls.content_type: create_kwargs['content_type'] = ContentType.objects.get_for_model(cls.content_type) # depends on [control=['if'], data=[]] create_kwargs['email' if isinstance(user_or_email_, string_types) else 'user'] = user_or_email_ # Letters that can't be mistaken for other letters or numbers in # most fonts, in case people try to type these: distinguishable_letters = 'abcdefghjkmnpqrstuvwxyzABCDEFGHJKLMNPQRTUVWXYZ' secret = ''.join((random.choice(distinguishable_letters) for x in range(10))) # Registered users don't need to confirm, but anonymous users do. is_active = 'user' in create_kwargs or not settings.TIDINGS_CONFIRM_ANONYMOUS_WATCHES if object_id: create_kwargs['object_id'] = object_id # depends on [control=['if'], data=[]] watch = Watch.objects.create(secret=secret, is_active=is_active, event_type=cls.event_type, **create_kwargs) for (k, v) in iteritems(filters): WatchFilter.objects.create(watch=watch, name=k, value=hash_to_unsigned(v)) # depends on [control=['for'], data=[]] # depends on [control=['except'], data=[]] # Send email for inactive watches. if not watch.is_active: email = watch.user.email if watch.user else watch.email message = cls._activation_email(watch, email) try: message.send() # depends on [control=['try'], data=[]] except SMTPException as e: watch.delete() raise ActivationRequestFailed(e.recipients) # depends on [control=['except'], data=['e']] # depends on [control=['if'], data=[]] return watch
def set_cached_value(self, *args, **kwargs): """ Sets the cached value """ key = self.get_cache_key(*args, **kwargs) logger.debug(key) return namedtuple('Settable', ['to'])(lambda value: self.cache.set(key, value))
def function[set_cached_value, parameter[self]]: constant[ Sets the cached value ] variable[key] assign[=] call[name[self].get_cache_key, parameter[<ast.Starred object at 0x7da20c9905e0>]] call[name[logger].debug, parameter[name[key]]] return[call[call[name[namedtuple], parameter[constant[Settable], list[[<ast.Constant object at 0x7da20c991cf0>]]]], parameter[<ast.Lambda object at 0x7da20c992d70>]]]
keyword[def] identifier[set_cached_value] ( identifier[self] ,* identifier[args] ,** identifier[kwargs] ): literal[string] identifier[key] = identifier[self] . identifier[get_cache_key] (* identifier[args] ,** identifier[kwargs] ) identifier[logger] . identifier[debug] ( identifier[key] ) keyword[return] identifier[namedtuple] ( literal[string] ,[ literal[string] ])( keyword[lambda] identifier[value] : identifier[self] . identifier[cache] . identifier[set] ( identifier[key] , identifier[value] ))
def set_cached_value(self, *args, **kwargs): """ Sets the cached value """ key = self.get_cache_key(*args, **kwargs) logger.debug(key) return namedtuple('Settable', ['to'])(lambda value: self.cache.set(key, value))
def _set_apply_exp_traffic_class_map_name(self, v, load=False): """ Setter method for apply_exp_traffic_class_map_name, mapped from YANG variable /qos_mpls/map_apply/apply_exp_traffic_class_map_name (container) If this variable is read-only (config: false) in the source YANG file, then _set_apply_exp_traffic_class_map_name is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_apply_exp_traffic_class_map_name() directly. """ if hasattr(v, "_utype"): v = v._utype(v) try: t = YANGDynClass(v,base=apply_exp_traffic_class_map_name.apply_exp_traffic_class_map_name, is_container='container', presence=False, yang_name="apply-exp-traffic-class-map-name", rest_name="exp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply exp traffic class map', u'cli-sequence-commands': None, u'alt-name': u'exp-traffic-class', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-apply-qos-mpls', defining_module='brocade-apply-qos-mpls', yang_type='container', is_config=True) except (TypeError, ValueError): raise ValueError({ 'error-string': """apply_exp_traffic_class_map_name must be of a type compatible with container""", 'defined-type': "container", 'generated-type': """YANGDynClass(base=apply_exp_traffic_class_map_name.apply_exp_traffic_class_map_name, is_container='container', presence=False, yang_name="apply-exp-traffic-class-map-name", rest_name="exp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply exp traffic class map', u'cli-sequence-commands': None, u'alt-name': u'exp-traffic-class', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-apply-qos-mpls', defining_module='brocade-apply-qos-mpls', yang_type='container', is_config=True)""", }) self.__apply_exp_traffic_class_map_name = t if hasattr(self, '_set'): self._set()
def function[_set_apply_exp_traffic_class_map_name, parameter[self, v, load]]: constant[ Setter method for apply_exp_traffic_class_map_name, mapped from YANG variable /qos_mpls/map_apply/apply_exp_traffic_class_map_name (container) If this variable is read-only (config: false) in the source YANG file, then _set_apply_exp_traffic_class_map_name is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_apply_exp_traffic_class_map_name() directly. ] if call[name[hasattr], parameter[name[v], constant[_utype]]] begin[:] variable[v] assign[=] call[name[v]._utype, parameter[name[v]]] <ast.Try object at 0x7da1b2537c40> name[self].__apply_exp_traffic_class_map_name assign[=] name[t] if call[name[hasattr], parameter[name[self], constant[_set]]] begin[:] call[name[self]._set, parameter[]]
keyword[def] identifier[_set_apply_exp_traffic_class_map_name] ( identifier[self] , identifier[v] , identifier[load] = keyword[False] ): literal[string] keyword[if] identifier[hasattr] ( identifier[v] , literal[string] ): identifier[v] = identifier[v] . identifier[_utype] ( identifier[v] ) keyword[try] : identifier[t] = identifier[YANGDynClass] ( identifier[v] , identifier[base] = identifier[apply_exp_traffic_class_map_name] . identifier[apply_exp_traffic_class_map_name] , identifier[is_container] = literal[string] , identifier[presence] = keyword[False] , identifier[yang_name] = literal[string] , identifier[rest_name] = literal[string] , identifier[parent] = identifier[self] , identifier[path_helper] = identifier[self] . identifier[_path_helper] , identifier[extmethods] = identifier[self] . identifier[_extmethods] , identifier[register_paths] = keyword[True] , identifier[extensions] ={ literal[string] :{ literal[string] : literal[string] , literal[string] : keyword[None] , literal[string] : literal[string] , literal[string] : keyword[None] }}, identifier[namespace] = literal[string] , identifier[defining_module] = literal[string] , identifier[yang_type] = literal[string] , identifier[is_config] = keyword[True] ) keyword[except] ( identifier[TypeError] , identifier[ValueError] ): keyword[raise] identifier[ValueError] ({ literal[string] : literal[string] , literal[string] : literal[string] , literal[string] : literal[string] , }) identifier[self] . identifier[__apply_exp_traffic_class_map_name] = identifier[t] keyword[if] identifier[hasattr] ( identifier[self] , literal[string] ): identifier[self] . identifier[_set] ()
def _set_apply_exp_traffic_class_map_name(self, v, load=False): """ Setter method for apply_exp_traffic_class_map_name, mapped from YANG variable /qos_mpls/map_apply/apply_exp_traffic_class_map_name (container) If this variable is read-only (config: false) in the source YANG file, then _set_apply_exp_traffic_class_map_name is considered as a private method. Backends looking to populate this variable should do so via calling thisObj._set_apply_exp_traffic_class_map_name() directly. """ if hasattr(v, '_utype'): v = v._utype(v) # depends on [control=['if'], data=[]] try: t = YANGDynClass(v, base=apply_exp_traffic_class_map_name.apply_exp_traffic_class_map_name, is_container='container', presence=False, yang_name='apply-exp-traffic-class-map-name', rest_name='exp-traffic-class', parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u'tailf-common': {u'info': u'Apply exp traffic class map', u'cli-sequence-commands': None, u'alt-name': u'exp-traffic-class', u'cli-incomplete-no': None}}, namespace='urn:brocade.com:mgmt:brocade-apply-qos-mpls', defining_module='brocade-apply-qos-mpls', yang_type='container', is_config=True) # depends on [control=['try'], data=[]] except (TypeError, ValueError): raise ValueError({'error-string': 'apply_exp_traffic_class_map_name must be of a type compatible with container', 'defined-type': 'container', 'generated-type': 'YANGDynClass(base=apply_exp_traffic_class_map_name.apply_exp_traffic_class_map_name, is_container=\'container\', presence=False, yang_name="apply-exp-traffic-class-map-name", rest_name="exp-traffic-class", parent=self, path_helper=self._path_helper, extmethods=self._extmethods, register_paths=True, extensions={u\'tailf-common\': {u\'info\': u\'Apply exp traffic class map\', u\'cli-sequence-commands\': None, u\'alt-name\': u\'exp-traffic-class\', u\'cli-incomplete-no\': None}}, namespace=\'urn:brocade.com:mgmt:brocade-apply-qos-mpls\', defining_module=\'brocade-apply-qos-mpls\', yang_type=\'container\', is_config=True)'}) # depends on [control=['except'], data=[]] self.__apply_exp_traffic_class_map_name = t if hasattr(self, '_set'): self._set() # depends on [control=['if'], data=[]]
def validate(self): """Validate the resource""" if not self._resource.get('permissions'): self.permissions = self.default_permissions try: # update _resource so have default values from the schema self._resource = self.schema(self._resource) except MultipleInvalid as e: errors = [format_error(err, self.resource_type) for err in e.errors] raise exceptions.ValidationError({'errors': errors}) yield self.check_service() yield self.check_unique()
def function[validate, parameter[self]]: constant[Validate the resource] if <ast.UnaryOp object at 0x7da1b1451660> begin[:] name[self].permissions assign[=] name[self].default_permissions <ast.Try object at 0x7da1b1451900> <ast.Yield object at 0x7da1b1452680> <ast.Yield object at 0x7da1b1452590>
keyword[def] identifier[validate] ( identifier[self] ): literal[string] keyword[if] keyword[not] identifier[self] . identifier[_resource] . identifier[get] ( literal[string] ): identifier[self] . identifier[permissions] = identifier[self] . identifier[default_permissions] keyword[try] : identifier[self] . identifier[_resource] = identifier[self] . identifier[schema] ( identifier[self] . identifier[_resource] ) keyword[except] identifier[MultipleInvalid] keyword[as] identifier[e] : identifier[errors] =[ identifier[format_error] ( identifier[err] , identifier[self] . identifier[resource_type] ) keyword[for] identifier[err] keyword[in] identifier[e] . identifier[errors] ] keyword[raise] identifier[exceptions] . identifier[ValidationError] ({ literal[string] : identifier[errors] }) keyword[yield] identifier[self] . identifier[check_service] () keyword[yield] identifier[self] . identifier[check_unique] ()
def validate(self): """Validate the resource""" if not self._resource.get('permissions'): self.permissions = self.default_permissions # depends on [control=['if'], data=[]] try: # update _resource so have default values from the schema self._resource = self.schema(self._resource) # depends on [control=['try'], data=[]] except MultipleInvalid as e: errors = [format_error(err, self.resource_type) for err in e.errors] raise exceptions.ValidationError({'errors': errors}) # depends on [control=['except'], data=['e']] yield self.check_service() yield self.check_unique()
def symmetry(self): """ Check whether a mesh has rotational symmetry. Returns ----------- symmetry: None No rotational symmetry 'radial' Symmetric around an axis 'spherical' Symmetric around a point """ symmetry, axis, section = inertia.radial_symmetry(self) self._cache['symmetry_axis'] = axis self._cache['symmetry_section'] = section return symmetry
def function[symmetry, parameter[self]]: constant[ Check whether a mesh has rotational symmetry. Returns ----------- symmetry: None No rotational symmetry 'radial' Symmetric around an axis 'spherical' Symmetric around a point ] <ast.Tuple object at 0x7da20c7c8760> assign[=] call[name[inertia].radial_symmetry, parameter[name[self]]] call[name[self]._cache][constant[symmetry_axis]] assign[=] name[axis] call[name[self]._cache][constant[symmetry_section]] assign[=] name[section] return[name[symmetry]]
keyword[def] identifier[symmetry] ( identifier[self] ): literal[string] identifier[symmetry] , identifier[axis] , identifier[section] = identifier[inertia] . identifier[radial_symmetry] ( identifier[self] ) identifier[self] . identifier[_cache] [ literal[string] ]= identifier[axis] identifier[self] . identifier[_cache] [ literal[string] ]= identifier[section] keyword[return] identifier[symmetry]
def symmetry(self): """ Check whether a mesh has rotational symmetry. Returns ----------- symmetry: None No rotational symmetry 'radial' Symmetric around an axis 'spherical' Symmetric around a point """ (symmetry, axis, section) = inertia.radial_symmetry(self) self._cache['symmetry_axis'] = axis self._cache['symmetry_section'] = section return symmetry
def fit_size_distribution_component_models(self, model_names, model_objs, input_columns, output_columns): """ This calculates 2 principal components for the hail size distribution between the shape and scale parameters. Separate machine learning models are fit to predict each component. Args: model_names: List of machine learning model names model_objs: List of machine learning model objects. input_columns: List of input variables output_columns: Output columns, should contain Shape and Scale. Returns: """ groups = np.unique(self.data["train"]["member"][self.group_col]) weights=None for group in groups: print(group) group_data = self.data["train"]["combo"].loc[self.data["train"]["combo"][self.group_col] == group] group_data = group_data.dropna() group_data = group_data.loc[group_data[output_columns[-1]] > 0] if self.sector: lon_obj = group_data.loc[:,'Centroid_Lon'] lat_obj = group_data.loc[:,'Centroid_Lat'] conus_lat_lon_points = zip(lon_obj.values.ravel(),lat_obj.values.ravel()) center_lon, center_lat = self.proj_dict["lon_0"],self.proj_dict["lat_0"] distances = np.array([np.sqrt((x-center_lon)**2+\ (y-center_lat)**2) for (x, y) in conus_lat_lon_points]) min_dist, max_minus_min = min(distances),max(distances)-min(distances) distance_0_1 = [1.0-((d - min_dist)/(max_minus_min)) for d in distances] weights = np.array(distance_0_1) self.size_distribution_models[group] = {"lognorm": {}} self.size_distribution_models[group]["lognorm"]["pca"] = PCA(n_components=len(output_columns)) log_labels = np.log(group_data[output_columns].values) log_labels[:, np.where(output_columns == "Shape")[0]] *= -1 log_means = log_labels.mean(axis=0) log_sds = log_labels.std(axis=0) log_norm_labels = (log_labels - log_means) / log_sds out_pc_labels = self.size_distribution_models[group]["lognorm"]["pca"].fit_transform(log_norm_labels) self.size_distribution_models[group]['lognorm']['mean'] = log_means self.size_distribution_models[group]['lognorm']['sd'] = log_sds for comp in range(len(output_columns)): self.size_distribution_models[group]["pc_{0:d}".format(comp)] = dict() for m, model_name in enumerate(model_names): print(model_name, comp) self.size_distribution_models[group][ "pc_{0:d}".format(comp)][model_name] = deepcopy(model_objs[m]) try: self.size_distribution_models[group][ "pc_{0:d}".format(comp)][model_name].fit(group_data[input_columns], out_pc_labels[:, comp], sample_weight=weights) except: self.size_distribution_models[group][ "pc_{0:d}".format(comp)][model_name].fit(group_data[input_columns], out_pc_labels[:, comp]) return
def function[fit_size_distribution_component_models, parameter[self, model_names, model_objs, input_columns, output_columns]]: constant[ This calculates 2 principal components for the hail size distribution between the shape and scale parameters. Separate machine learning models are fit to predict each component. Args: model_names: List of machine learning model names model_objs: List of machine learning model objects. input_columns: List of input variables output_columns: Output columns, should contain Shape and Scale. Returns: ] variable[groups] assign[=] call[name[np].unique, parameter[call[call[call[name[self].data][constant[train]]][constant[member]]][name[self].group_col]]] variable[weights] assign[=] constant[None] for taget[name[group]] in starred[name[groups]] begin[:] call[name[print], parameter[name[group]]] variable[group_data] assign[=] call[call[call[name[self].data][constant[train]]][constant[combo]].loc][compare[call[call[call[name[self].data][constant[train]]][constant[combo]]][name[self].group_col] equal[==] name[group]]] variable[group_data] assign[=] call[name[group_data].dropna, parameter[]] variable[group_data] assign[=] call[name[group_data].loc][compare[call[name[group_data]][call[name[output_columns]][<ast.UnaryOp object at 0x7da2045640d0>]] greater[>] constant[0]]] if name[self].sector begin[:] variable[lon_obj] assign[=] call[name[group_data].loc][tuple[[<ast.Slice object at 0x7da204565390>, <ast.Constant object at 0x7da204565000>]]] variable[lat_obj] assign[=] call[name[group_data].loc][tuple[[<ast.Slice object at 0x7da204566200>, <ast.Constant object at 0x7da2045661a0>]]] variable[conus_lat_lon_points] assign[=] call[name[zip], parameter[call[name[lon_obj].values.ravel, parameter[]], call[name[lat_obj].values.ravel, parameter[]]]] <ast.Tuple object at 0x7da204566050> assign[=] tuple[[<ast.Subscript object at 0x7da2045667a0>, <ast.Subscript object at 0x7da204567070>]] variable[distances] assign[=] call[name[np].array, parameter[<ast.ListComp object at 0x7da204564610>]] <ast.Tuple object at 0x7da204566470> assign[=] tuple[[<ast.Call object at 0x7da2045646d0>, <ast.BinOp object at 0x7da2045647c0>]] variable[distance_0_1] assign[=] <ast.ListComp object at 0x7da204567e50> variable[weights] assign[=] call[name[np].array, parameter[name[distance_0_1]]] call[name[self].size_distribution_models][name[group]] assign[=] dictionary[[<ast.Constant object at 0x7da204567df0>], [<ast.Dict object at 0x7da2045652d0>]] call[call[call[name[self].size_distribution_models][name[group]]][constant[lognorm]]][constant[pca]] assign[=] call[name[PCA], parameter[]] variable[log_labels] assign[=] call[name[np].log, parameter[call[name[group_data]][name[output_columns]].values]] <ast.AugAssign object at 0x7da2045667d0> variable[log_means] assign[=] call[name[log_labels].mean, parameter[]] variable[log_sds] assign[=] call[name[log_labels].std, parameter[]] variable[log_norm_labels] assign[=] binary_operation[binary_operation[name[log_labels] - name[log_means]] / name[log_sds]] variable[out_pc_labels] assign[=] call[call[call[call[name[self].size_distribution_models][name[group]]][constant[lognorm]]][constant[pca]].fit_transform, parameter[name[log_norm_labels]]] call[call[call[name[self].size_distribution_models][name[group]]][constant[lognorm]]][constant[mean]] assign[=] name[log_means] call[call[call[name[self].size_distribution_models][name[group]]][constant[lognorm]]][constant[sd]] assign[=] name[log_sds] for taget[name[comp]] in starred[call[name[range], parameter[call[name[len], parameter[name[output_columns]]]]]] begin[:] call[call[name[self].size_distribution_models][name[group]]][call[constant[pc_{0:d}].format, parameter[name[comp]]]] assign[=] call[name[dict], parameter[]] for taget[tuple[[<ast.Name object at 0x7da2045665f0>, <ast.Name object at 0x7da2045647f0>]]] in starred[call[name[enumerate], parameter[name[model_names]]]] begin[:] call[name[print], parameter[name[model_name], name[comp]]] call[call[call[name[self].size_distribution_models][name[group]]][call[constant[pc_{0:d}].format, parameter[name[comp]]]]][name[model_name]] assign[=] call[name[deepcopy], parameter[call[name[model_objs]][name[m]]]] <ast.Try object at 0x7da204566c80> return[None]
keyword[def] identifier[fit_size_distribution_component_models] ( identifier[self] , identifier[model_names] , identifier[model_objs] , identifier[input_columns] , identifier[output_columns] ): literal[string] identifier[groups] = identifier[np] . identifier[unique] ( identifier[self] . identifier[data] [ literal[string] ][ literal[string] ][ identifier[self] . identifier[group_col] ]) identifier[weights] = keyword[None] keyword[for] identifier[group] keyword[in] identifier[groups] : identifier[print] ( identifier[group] ) identifier[group_data] = identifier[self] . identifier[data] [ literal[string] ][ literal[string] ]. identifier[loc] [ identifier[self] . identifier[data] [ literal[string] ][ literal[string] ][ identifier[self] . identifier[group_col] ]== identifier[group] ] identifier[group_data] = identifier[group_data] . identifier[dropna] () identifier[group_data] = identifier[group_data] . identifier[loc] [ identifier[group_data] [ identifier[output_columns] [- literal[int] ]]> literal[int] ] keyword[if] identifier[self] . identifier[sector] : identifier[lon_obj] = identifier[group_data] . identifier[loc] [:, literal[string] ] identifier[lat_obj] = identifier[group_data] . identifier[loc] [:, literal[string] ] identifier[conus_lat_lon_points] = identifier[zip] ( identifier[lon_obj] . identifier[values] . identifier[ravel] (), identifier[lat_obj] . identifier[values] . identifier[ravel] ()) identifier[center_lon] , identifier[center_lat] = identifier[self] . identifier[proj_dict] [ literal[string] ], identifier[self] . identifier[proj_dict] [ literal[string] ] identifier[distances] = identifier[np] . identifier[array] ([ identifier[np] . identifier[sqrt] (( identifier[x] - identifier[center_lon] )** literal[int] +( identifier[y] - identifier[center_lat] )** literal[int] ) keyword[for] ( identifier[x] , identifier[y] ) keyword[in] identifier[conus_lat_lon_points] ]) identifier[min_dist] , identifier[max_minus_min] = identifier[min] ( identifier[distances] ), identifier[max] ( identifier[distances] )- identifier[min] ( identifier[distances] ) identifier[distance_0_1] =[ literal[int] -(( identifier[d] - identifier[min_dist] )/( identifier[max_minus_min] )) keyword[for] identifier[d] keyword[in] identifier[distances] ] identifier[weights] = identifier[np] . identifier[array] ( identifier[distance_0_1] ) identifier[self] . identifier[size_distribution_models] [ identifier[group] ]={ literal[string] :{}} identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] ][ literal[string] ]= identifier[PCA] ( identifier[n_components] = identifier[len] ( identifier[output_columns] )) identifier[log_labels] = identifier[np] . identifier[log] ( identifier[group_data] [ identifier[output_columns] ]. identifier[values] ) identifier[log_labels] [:, identifier[np] . identifier[where] ( identifier[output_columns] == literal[string] )[ literal[int] ]]*=- literal[int] identifier[log_means] = identifier[log_labels] . identifier[mean] ( identifier[axis] = literal[int] ) identifier[log_sds] = identifier[log_labels] . identifier[std] ( identifier[axis] = literal[int] ) identifier[log_norm_labels] =( identifier[log_labels] - identifier[log_means] )/ identifier[log_sds] identifier[out_pc_labels] = identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] ][ literal[string] ]. identifier[fit_transform] ( identifier[log_norm_labels] ) identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] ][ literal[string] ]= identifier[log_means] identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] ][ literal[string] ]= identifier[log_sds] keyword[for] identifier[comp] keyword[in] identifier[range] ( identifier[len] ( identifier[output_columns] )): identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] . identifier[format] ( identifier[comp] )]= identifier[dict] () keyword[for] identifier[m] , identifier[model_name] keyword[in] identifier[enumerate] ( identifier[model_names] ): identifier[print] ( identifier[model_name] , identifier[comp] ) identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] . identifier[format] ( identifier[comp] )][ identifier[model_name] ]= identifier[deepcopy] ( identifier[model_objs] [ identifier[m] ]) keyword[try] : identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] . identifier[format] ( identifier[comp] )][ identifier[model_name] ]. identifier[fit] ( identifier[group_data] [ identifier[input_columns] ], identifier[out_pc_labels] [:, identifier[comp] ], identifier[sample_weight] = identifier[weights] ) keyword[except] : identifier[self] . identifier[size_distribution_models] [ identifier[group] ][ literal[string] . identifier[format] ( identifier[comp] )][ identifier[model_name] ]. identifier[fit] ( identifier[group_data] [ identifier[input_columns] ], identifier[out_pc_labels] [:, identifier[comp] ]) keyword[return]
def fit_size_distribution_component_models(self, model_names, model_objs, input_columns, output_columns): """ This calculates 2 principal components for the hail size distribution between the shape and scale parameters. Separate machine learning models are fit to predict each component. Args: model_names: List of machine learning model names model_objs: List of machine learning model objects. input_columns: List of input variables output_columns: Output columns, should contain Shape and Scale. Returns: """ groups = np.unique(self.data['train']['member'][self.group_col]) weights = None for group in groups: print(group) group_data = self.data['train']['combo'].loc[self.data['train']['combo'][self.group_col] == group] group_data = group_data.dropna() group_data = group_data.loc[group_data[output_columns[-1]] > 0] if self.sector: lon_obj = group_data.loc[:, 'Centroid_Lon'] lat_obj = group_data.loc[:, 'Centroid_Lat'] conus_lat_lon_points = zip(lon_obj.values.ravel(), lat_obj.values.ravel()) (center_lon, center_lat) = (self.proj_dict['lon_0'], self.proj_dict['lat_0']) distances = np.array([np.sqrt((x - center_lon) ** 2 + (y - center_lat) ** 2) for (x, y) in conus_lat_lon_points]) (min_dist, max_minus_min) = (min(distances), max(distances) - min(distances)) distance_0_1 = [1.0 - (d - min_dist) / max_minus_min for d in distances] weights = np.array(distance_0_1) # depends on [control=['if'], data=[]] self.size_distribution_models[group] = {'lognorm': {}} self.size_distribution_models[group]['lognorm']['pca'] = PCA(n_components=len(output_columns)) log_labels = np.log(group_data[output_columns].values) log_labels[:, np.where(output_columns == 'Shape')[0]] *= -1 log_means = log_labels.mean(axis=0) log_sds = log_labels.std(axis=0) log_norm_labels = (log_labels - log_means) / log_sds out_pc_labels = self.size_distribution_models[group]['lognorm']['pca'].fit_transform(log_norm_labels) self.size_distribution_models[group]['lognorm']['mean'] = log_means self.size_distribution_models[group]['lognorm']['sd'] = log_sds for comp in range(len(output_columns)): self.size_distribution_models[group]['pc_{0:d}'.format(comp)] = dict() for (m, model_name) in enumerate(model_names): print(model_name, comp) self.size_distribution_models[group]['pc_{0:d}'.format(comp)][model_name] = deepcopy(model_objs[m]) try: self.size_distribution_models[group]['pc_{0:d}'.format(comp)][model_name].fit(group_data[input_columns], out_pc_labels[:, comp], sample_weight=weights) # depends on [control=['try'], data=[]] except: self.size_distribution_models[group]['pc_{0:d}'.format(comp)][model_name].fit(group_data[input_columns], out_pc_labels[:, comp]) # depends on [control=['except'], data=[]] # depends on [control=['for'], data=[]] # depends on [control=['for'], data=['comp']] # depends on [control=['for'], data=['group']] return
def data_url(content, mimetype=None): """ Returns content encoded as base64 Data URI. :param content: bytes or str or Path :param mimetype: mimetype for :return: str object (consisting only of ASCII, though) .. seealso:: https://en.wikipedia.org/wiki/Data_URI_scheme """ if isinstance(content, pathlib.Path): if not mimetype: mimetype = guess_type(content.name)[0] with content.open('rb') as fp: content = fp.read() else: if isinstance(content, text_type): content = content.encode('utf8') return "data:{0};base64,{1}".format( mimetype or 'application/octet-stream', b64encode(content).decode())
def function[data_url, parameter[content, mimetype]]: constant[ Returns content encoded as base64 Data URI. :param content: bytes or str or Path :param mimetype: mimetype for :return: str object (consisting only of ASCII, though) .. seealso:: https://en.wikipedia.org/wiki/Data_URI_scheme ] if call[name[isinstance], parameter[name[content], name[pathlib].Path]] begin[:] if <ast.UnaryOp object at 0x7da1b26ad330> begin[:] variable[mimetype] assign[=] call[call[name[guess_type], parameter[name[content].name]]][constant[0]] with call[name[content].open, parameter[constant[rb]]] begin[:] variable[content] assign[=] call[name[fp].read, parameter[]] return[call[constant[data:{0};base64,{1}].format, parameter[<ast.BoolOp object at 0x7da20c993010>, call[call[name[b64encode], parameter[name[content]]].decode, parameter[]]]]]
keyword[def] identifier[data_url] ( identifier[content] , identifier[mimetype] = keyword[None] ): literal[string] keyword[if] identifier[isinstance] ( identifier[content] , identifier[pathlib] . identifier[Path] ): keyword[if] keyword[not] identifier[mimetype] : identifier[mimetype] = identifier[guess_type] ( identifier[content] . identifier[name] )[ literal[int] ] keyword[with] identifier[content] . identifier[open] ( literal[string] ) keyword[as] identifier[fp] : identifier[content] = identifier[fp] . identifier[read] () keyword[else] : keyword[if] identifier[isinstance] ( identifier[content] , identifier[text_type] ): identifier[content] = identifier[content] . identifier[encode] ( literal[string] ) keyword[return] literal[string] . identifier[format] ( identifier[mimetype] keyword[or] literal[string] , identifier[b64encode] ( identifier[content] ). identifier[decode] ())
def data_url(content, mimetype=None): """ Returns content encoded as base64 Data URI. :param content: bytes or str or Path :param mimetype: mimetype for :return: str object (consisting only of ASCII, though) .. seealso:: https://en.wikipedia.org/wiki/Data_URI_scheme """ if isinstance(content, pathlib.Path): if not mimetype: mimetype = guess_type(content.name)[0] # depends on [control=['if'], data=[]] with content.open('rb') as fp: content = fp.read() # depends on [control=['with'], data=['fp']] # depends on [control=['if'], data=[]] elif isinstance(content, text_type): content = content.encode('utf8') # depends on [control=['if'], data=[]] return 'data:{0};base64,{1}'.format(mimetype or 'application/octet-stream', b64encode(content).decode())
def methods(self, *args, **kwds): """ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. """ self.callmanager.methods(self, *args, **kwds)
def function[methods, parameter[self]]: constant[ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. ] call[name[self].callmanager.methods, parameter[name[self], <ast.Starred object at 0x7da2044c2a40>]]
keyword[def] identifier[methods] ( identifier[self] ,* identifier[args] ,** identifier[kwds] ): literal[string] identifier[self] . identifier[callmanager] . identifier[methods] ( identifier[self] ,* identifier[args] ,** identifier[kwds] )
def methods(self, *args, **kwds): """ Request info of callable remote methods. Arguments for :meth:`call` except for `name` can be applied to this function too. """ self.callmanager.methods(self, *args, **kwds)
def write(self, filename): """read a sparse matrix, loading row and column name files""" if file_exists(filename): return filename out_files = [filename, filename + ".rownames", filename + ".colnames"] with file_transaction(out_files) as tx_out_files: with open(tx_out_files[0], "wb") as out_handle: scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(self.matrix)) pd.Series(self.rownames).to_csv(tx_out_files[1], index=False) pd.Series(self.colnames).to_csv(tx_out_files[2], index=False) return filename
def function[write, parameter[self, filename]]: constant[read a sparse matrix, loading row and column name files] if call[name[file_exists], parameter[name[filename]]] begin[:] return[name[filename]] variable[out_files] assign[=] list[[<ast.Name object at 0x7da1b1896a10>, <ast.BinOp object at 0x7da1b18946a0>, <ast.BinOp object at 0x7da1b18961a0>]] with call[name[file_transaction], parameter[name[out_files]]] begin[:] with call[name[open], parameter[call[name[tx_out_files]][constant[0]], constant[wb]]] begin[:] call[name[scipy].io.mmwrite, parameter[name[out_handle], call[name[scipy].sparse.csr_matrix, parameter[name[self].matrix]]]] call[call[name[pd].Series, parameter[name[self].rownames]].to_csv, parameter[call[name[tx_out_files]][constant[1]]]] call[call[name[pd].Series, parameter[name[self].colnames]].to_csv, parameter[call[name[tx_out_files]][constant[2]]]] return[name[filename]]
keyword[def] identifier[write] ( identifier[self] , identifier[filename] ): literal[string] keyword[if] identifier[file_exists] ( identifier[filename] ): keyword[return] identifier[filename] identifier[out_files] =[ identifier[filename] , identifier[filename] + literal[string] , identifier[filename] + literal[string] ] keyword[with] identifier[file_transaction] ( identifier[out_files] ) keyword[as] identifier[tx_out_files] : keyword[with] identifier[open] ( identifier[tx_out_files] [ literal[int] ], literal[string] ) keyword[as] identifier[out_handle] : identifier[scipy] . identifier[io] . identifier[mmwrite] ( identifier[out_handle] , identifier[scipy] . identifier[sparse] . identifier[csr_matrix] ( identifier[self] . identifier[matrix] )) identifier[pd] . identifier[Series] ( identifier[self] . identifier[rownames] ). identifier[to_csv] ( identifier[tx_out_files] [ literal[int] ], identifier[index] = keyword[False] ) identifier[pd] . identifier[Series] ( identifier[self] . identifier[colnames] ). identifier[to_csv] ( identifier[tx_out_files] [ literal[int] ], identifier[index] = keyword[False] ) keyword[return] identifier[filename]
def write(self, filename): """read a sparse matrix, loading row and column name files""" if file_exists(filename): return filename # depends on [control=['if'], data=[]] out_files = [filename, filename + '.rownames', filename + '.colnames'] with file_transaction(out_files) as tx_out_files: with open(tx_out_files[0], 'wb') as out_handle: scipy.io.mmwrite(out_handle, scipy.sparse.csr_matrix(self.matrix)) # depends on [control=['with'], data=['out_handle']] pd.Series(self.rownames).to_csv(tx_out_files[1], index=False) pd.Series(self.colnames).to_csv(tx_out_files[2], index=False) # depends on [control=['with'], data=['tx_out_files']] return filename
def ls(self, glob_str): """ Return just the filenames that match `glob_str` inside the store directory. :param str glob_str: A glob string, i.e. 'state_*' :return: list of matched keys """ path = os.path.join(self.uri, glob_str) return [os.path.split(s)[1] for s in glob.glob(path)]
def function[ls, parameter[self, glob_str]]: constant[ Return just the filenames that match `glob_str` inside the store directory. :param str glob_str: A glob string, i.e. 'state_*' :return: list of matched keys ] variable[path] assign[=] call[name[os].path.join, parameter[name[self].uri, name[glob_str]]] return[<ast.ListComp object at 0x7da20e956b90>]
keyword[def] identifier[ls] ( identifier[self] , identifier[glob_str] ): literal[string] identifier[path] = identifier[os] . identifier[path] . identifier[join] ( identifier[self] . identifier[uri] , identifier[glob_str] ) keyword[return] [ identifier[os] . identifier[path] . identifier[split] ( identifier[s] )[ literal[int] ] keyword[for] identifier[s] keyword[in] identifier[glob] . identifier[glob] ( identifier[path] )]
def ls(self, glob_str): """ Return just the filenames that match `glob_str` inside the store directory. :param str glob_str: A glob string, i.e. 'state_*' :return: list of matched keys """ path = os.path.join(self.uri, glob_str) return [os.path.split(s)[1] for s in glob.glob(path)]
def patch_namespaced_persistent_volume_claim(self, name, namespace, body, **kwargs): # noqa: E501 """patch_namespaced_persistent_volume_claim # noqa: E501 partially update the specified PersistentVolumeClaim # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_persistent_volume_claim(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the PersistentVolumeClaim (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1PersistentVolumeClaim If the method is called asynchronously, returns the request thread. """ kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_persistent_volume_claim_with_http_info(name, namespace, body, **kwargs) # noqa: E501 else: (data) = self.patch_namespaced_persistent_volume_claim_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data
def function[patch_namespaced_persistent_volume_claim, parameter[self, name, namespace, body]]: constant[patch_namespaced_persistent_volume_claim # noqa: E501 partially update the specified PersistentVolumeClaim # noqa: E501 This method makes a synchronous HTTP request by default. To make an asynchronous HTTP request, please pass async_req=True >>> thread = api.patch_namespaced_persistent_volume_claim(name, namespace, body, async_req=True) >>> result = thread.get() :param async_req bool :param str name: name of the PersistentVolumeClaim (required) :param str namespace: object name and auth scope, such as for teams and projects (required) :param UNKNOWN_BASE_TYPE body: (required) :param str pretty: If 'true', then the output is pretty printed. :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed :return: V1PersistentVolumeClaim If the method is called asynchronously, returns the request thread. ] call[name[kwargs]][constant[_return_http_data_only]] assign[=] constant[True] if call[name[kwargs].get, parameter[constant[async_req]]] begin[:] return[call[name[self].patch_namespaced_persistent_volume_claim_with_http_info, parameter[name[name], name[namespace], name[body]]]]
keyword[def] identifier[patch_namespaced_persistent_volume_claim] ( identifier[self] , identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ): literal[string] identifier[kwargs] [ literal[string] ]= keyword[True] keyword[if] identifier[kwargs] . identifier[get] ( literal[string] ): keyword[return] identifier[self] . identifier[patch_namespaced_persistent_volume_claim_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ) keyword[else] : ( identifier[data] )= identifier[self] . identifier[patch_namespaced_persistent_volume_claim_with_http_info] ( identifier[name] , identifier[namespace] , identifier[body] ,** identifier[kwargs] ) keyword[return] identifier[data]
def patch_namespaced_persistent_volume_claim(self, name, namespace, body, **kwargs): # noqa: E501 "patch_namespaced_persistent_volume_claim # noqa: E501\n\n partially update the specified PersistentVolumeClaim # noqa: E501\n This method makes a synchronous HTTP request by default. To make an\n asynchronous HTTP request, please pass async_req=True\n >>> thread = api.patch_namespaced_persistent_volume_claim(name, namespace, body, async_req=True)\n >>> result = thread.get()\n\n :param async_req bool\n :param str name: name of the PersistentVolumeClaim (required)\n :param str namespace: object name and auth scope, such as for teams and projects (required)\n :param UNKNOWN_BASE_TYPE body: (required)\n :param str pretty: If 'true', then the output is pretty printed.\n :param str dry_run: When present, indicates that modifications should not be persisted. An invalid or unrecognized dryRun directive will result in an error response and no further processing of the request. Valid values are: - All: all dry run stages will be processed\n :return: V1PersistentVolumeClaim\n If the method is called asynchronously,\n returns the request thread.\n " kwargs['_return_http_data_only'] = True if kwargs.get('async_req'): return self.patch_namespaced_persistent_volume_claim_with_http_info(name, namespace, body, **kwargs) # noqa: E501 # depends on [control=['if'], data=[]] else: data = self.patch_namespaced_persistent_volume_claim_with_http_info(name, namespace, body, **kwargs) # noqa: E501 return data
def load_skel(self, file_name): """ Loads an ASF file into a skeleton structure. :param file_name: The file name to load in. """ fid = open(file_name, 'r') self.read_skel(fid) fid.close() self.name = file_name
def function[load_skel, parameter[self, file_name]]: constant[ Loads an ASF file into a skeleton structure. :param file_name: The file name to load in. ] variable[fid] assign[=] call[name[open], parameter[name[file_name], constant[r]]] call[name[self].read_skel, parameter[name[fid]]] call[name[fid].close, parameter[]] name[self].name assign[=] name[file_name]
keyword[def] identifier[load_skel] ( identifier[self] , identifier[file_name] ): literal[string] identifier[fid] = identifier[open] ( identifier[file_name] , literal[string] ) identifier[self] . identifier[read_skel] ( identifier[fid] ) identifier[fid] . identifier[close] () identifier[self] . identifier[name] = identifier[file_name]
def load_skel(self, file_name): """ Loads an ASF file into a skeleton structure. :param file_name: The file name to load in. """ fid = open(file_name, 'r') self.read_skel(fid) fid.close() self.name = file_name
def create(cls, type): """ Return the specified Filter """ if type == 0: return FilterDropShadow(id) elif type == 1: return FilterBlur(id) elif type == 2: return FilterGlow(id) elif type == 3: return FilterBevel(id) elif type == 4: return FilterGradientGlow(id) elif type == 5: return FilterConvolution(id) elif type == 6: return FilterColorMatrix(id) elif type == 7: return FilterGradientBevel(id) else: raise Exception("Unknown filter type: %d" % type)
def function[create, parameter[cls, type]]: constant[ Return the specified Filter ] if compare[name[type] equal[==] constant[0]] begin[:] return[call[name[FilterDropShadow], parameter[name[id]]]]
keyword[def] identifier[create] ( identifier[cls] , identifier[type] ): literal[string] keyword[if] identifier[type] == literal[int] : keyword[return] identifier[FilterDropShadow] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterBlur] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterGlow] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterBevel] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterGradientGlow] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterConvolution] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterColorMatrix] ( identifier[id] ) keyword[elif] identifier[type] == literal[int] : keyword[return] identifier[FilterGradientBevel] ( identifier[id] ) keyword[else] : keyword[raise] identifier[Exception] ( literal[string] % identifier[type] )
def create(cls, type): """ Return the specified Filter """ if type == 0: return FilterDropShadow(id) # depends on [control=['if'], data=[]] elif type == 1: return FilterBlur(id) # depends on [control=['if'], data=[]] elif type == 2: return FilterGlow(id) # depends on [control=['if'], data=[]] elif type == 3: return FilterBevel(id) # depends on [control=['if'], data=[]] elif type == 4: return FilterGradientGlow(id) # depends on [control=['if'], data=[]] elif type == 5: return FilterConvolution(id) # depends on [control=['if'], data=[]] elif type == 6: return FilterColorMatrix(id) # depends on [control=['if'], data=[]] elif type == 7: return FilterGradientBevel(id) # depends on [control=['if'], data=[]] else: raise Exception('Unknown filter type: %d' % type)
def push_pq(self, tokens): """ Creates and Load object, populates it with data, finds its Bus and adds it. """ logger.debug("Pushing PQ data: %s" % tokens) bus = self.case.buses[tokens["bus_no"] - 1] bus.p_demand = tokens["p"] bus.q_demand = tokens["q"]
def function[push_pq, parameter[self, tokens]]: constant[ Creates and Load object, populates it with data, finds its Bus and adds it. ] call[name[logger].debug, parameter[binary_operation[constant[Pushing PQ data: %s] <ast.Mod object at 0x7da2590d6920> name[tokens]]]] variable[bus] assign[=] call[name[self].case.buses][binary_operation[call[name[tokens]][constant[bus_no]] - constant[1]]] name[bus].p_demand assign[=] call[name[tokens]][constant[p]] name[bus].q_demand assign[=] call[name[tokens]][constant[q]]
keyword[def] identifier[push_pq] ( identifier[self] , identifier[tokens] ): literal[string] identifier[logger] . identifier[debug] ( literal[string] % identifier[tokens] ) identifier[bus] = identifier[self] . identifier[case] . identifier[buses] [ identifier[tokens] [ literal[string] ]- literal[int] ] identifier[bus] . identifier[p_demand] = identifier[tokens] [ literal[string] ] identifier[bus] . identifier[q_demand] = identifier[tokens] [ literal[string] ]
def push_pq(self, tokens): """ Creates and Load object, populates it with data, finds its Bus and adds it. """ logger.debug('Pushing PQ data: %s' % tokens) bus = self.case.buses[tokens['bus_no'] - 1] bus.p_demand = tokens['p'] bus.q_demand = tokens['q']
def intersection(self, meta): """ Get the intersection between the meta data given and the meta data contained within the plates. Since all of the streams have the same meta data keys (but differing values) we only need to consider the first stream. :param meta: The meta data to compare :return: A stream id with the intersection between this node's meta data and the given meta data :type meta: dict :rtype: StreamId """ keys = self._streams[0].stream_id.meta_data.keys() return StreamId(self.node_id, dict(*zip((kk, meta[kk]) for kk in keys)))
def function[intersection, parameter[self, meta]]: constant[ Get the intersection between the meta data given and the meta data contained within the plates. Since all of the streams have the same meta data keys (but differing values) we only need to consider the first stream. :param meta: The meta data to compare :return: A stream id with the intersection between this node's meta data and the given meta data :type meta: dict :rtype: StreamId ] variable[keys] assign[=] call[call[name[self]._streams][constant[0]].stream_id.meta_data.keys, parameter[]] return[call[name[StreamId], parameter[name[self].node_id, call[name[dict], parameter[<ast.Starred object at 0x7da1b23531f0>]]]]]
keyword[def] identifier[intersection] ( identifier[self] , identifier[meta] ): literal[string] identifier[keys] = identifier[self] . identifier[_streams] [ literal[int] ]. identifier[stream_id] . identifier[meta_data] . identifier[keys] () keyword[return] identifier[StreamId] ( identifier[self] . identifier[node_id] , identifier[dict] (* identifier[zip] (( identifier[kk] , identifier[meta] [ identifier[kk] ]) keyword[for] identifier[kk] keyword[in] identifier[keys] )))
def intersection(self, meta): """ Get the intersection between the meta data given and the meta data contained within the plates. Since all of the streams have the same meta data keys (but differing values) we only need to consider the first stream. :param meta: The meta data to compare :return: A stream id with the intersection between this node's meta data and the given meta data :type meta: dict :rtype: StreamId """ keys = self._streams[0].stream_id.meta_data.keys() return StreamId(self.node_id, dict(*zip(((kk, meta[kk]) for kk in keys))))
def register_options(cls, register): """Register options not tied to any particular task or subsystem.""" # The bootstrap options need to be registered on the post-bootstrap Options instance, so it # won't choke on them on the command line, and also so we can access their values as regular # global-scope options, for convenience. cls.register_bootstrap_options(register) register('-x', '--time', type=bool, help='Output a timing report at the end of the run.') register('-e', '--explain', type=bool, help='Explain the execution of goals.') register('--tag', type=list, metavar='[+-]tag1,tag2,...', help="Include only targets with these tags (optional '+' prefix) or without these " "tags ('-' prefix). Useful with ::, to find subsets of targets " "(e.g., integration tests.)") # Toggles v1/v2 `Task` vs `@rule` pipelines on/off. register('--v1', advanced=True, type=bool, default=True, help='Enables execution of v1 Tasks.') register('--v2', advanced=True, type=bool, default=False, help='Enables execution of v2 @console_rules.') register('--v2-ui', default=False, type=bool, daemon=False, help='Whether to show v2 engine execution progress. ' 'This requires the --v2 flag to take effect.') loop_flag = '--loop' register(loop_flag, type=bool, help='Run v2 @console_rules continuously as file changes are detected. Requires ' '`--v2`, and is best utilized with `--v2 --no-v1`.') register('--loop-max', type=int, default=2**32, advanced=True, help='The maximum number of times to loop when `{}` is specified.'.format(loop_flag)) register('-t', '--timeout', advanced=True, type=int, metavar='<seconds>', help='Number of seconds to wait for http connections.') # TODO: After moving to the new options system these abstraction leaks can go away. register('-k', '--kill-nailguns', advanced=True, type=bool, help='Kill nailguns before exiting') register('--fail-fast', advanced=True, type=bool, recursive=True, help='Exit as quickly as possible on error, rather than attempting to continue ' 'to process the non-erroneous subset of the input.') register('--cache-key-gen-version', advanced=True, default='200', recursive=True, help='The cache key generation. Bump this to invalidate every artifact for a scope.') register('--workdir-max-build-entries', advanced=True, type=int, default=8, help='Maximum number of previous builds to keep per task target pair in workdir. ' 'If set, minimum 2 will always be kept to support incremental compilation.') register('--max-subprocess-args', advanced=True, type=int, default=100, recursive=True, help='Used to limit the number of arguments passed to some subprocesses by breaking ' 'the command up into multiple invocations.') register('--lock', advanced=True, type=bool, default=True, help='Use a global lock to exclude other versions of pants from running during ' 'critical operations.')
def function[register_options, parameter[cls, register]]: constant[Register options not tied to any particular task or subsystem.] call[name[cls].register_bootstrap_options, parameter[name[register]]] call[name[register], parameter[constant[-x], constant[--time]]] call[name[register], parameter[constant[-e], constant[--explain]]] call[name[register], parameter[constant[--tag]]] call[name[register], parameter[constant[--v1]]] call[name[register], parameter[constant[--v2]]] call[name[register], parameter[constant[--v2-ui]]] variable[loop_flag] assign[=] constant[--loop] call[name[register], parameter[name[loop_flag]]] call[name[register], parameter[constant[--loop-max]]] call[name[register], parameter[constant[-t], constant[--timeout]]] call[name[register], parameter[constant[-k], constant[--kill-nailguns]]] call[name[register], parameter[constant[--fail-fast]]] call[name[register], parameter[constant[--cache-key-gen-version]]] call[name[register], parameter[constant[--workdir-max-build-entries]]] call[name[register], parameter[constant[--max-subprocess-args]]] call[name[register], parameter[constant[--lock]]]
keyword[def] identifier[register_options] ( identifier[cls] , identifier[register] ): literal[string] identifier[cls] . identifier[register_bootstrap_options] ( identifier[register] ) identifier[register] ( literal[string] , literal[string] , identifier[type] = identifier[bool] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , literal[string] , identifier[type] = identifier[bool] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , identifier[type] = identifier[list] , identifier[metavar] = literal[string] , identifier[help] = literal[string] literal[string] literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[bool] , identifier[default] = keyword[True] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[bool] , identifier[default] = keyword[False] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , identifier[default] = keyword[False] , identifier[type] = identifier[bool] , identifier[daemon] = keyword[False] , identifier[help] = literal[string] literal[string] ) identifier[loop_flag] = literal[string] identifier[register] ( identifier[loop_flag] , identifier[type] = identifier[bool] , identifier[help] = literal[string] literal[string] ) identifier[register] ( literal[string] , identifier[type] = identifier[int] , identifier[default] = literal[int] ** literal[int] , identifier[advanced] = keyword[True] , identifier[help] = literal[string] . identifier[format] ( identifier[loop_flag] )) identifier[register] ( literal[string] , literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[int] , identifier[metavar] = literal[string] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[bool] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[bool] , identifier[recursive] = keyword[True] , identifier[help] = literal[string] literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[default] = literal[string] , identifier[recursive] = keyword[True] , identifier[help] = literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[int] , identifier[default] = literal[int] , identifier[help] = literal[string] literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[int] , identifier[default] = literal[int] , identifier[recursive] = keyword[True] , identifier[help] = literal[string] literal[string] ) identifier[register] ( literal[string] , identifier[advanced] = keyword[True] , identifier[type] = identifier[bool] , identifier[default] = keyword[True] , identifier[help] = literal[string] literal[string] )
def register_options(cls, register): """Register options not tied to any particular task or subsystem.""" # The bootstrap options need to be registered on the post-bootstrap Options instance, so it # won't choke on them on the command line, and also so we can access their values as regular # global-scope options, for convenience. cls.register_bootstrap_options(register) register('-x', '--time', type=bool, help='Output a timing report at the end of the run.') register('-e', '--explain', type=bool, help='Explain the execution of goals.') register('--tag', type=list, metavar='[+-]tag1,tag2,...', help="Include only targets with these tags (optional '+' prefix) or without these tags ('-' prefix). Useful with ::, to find subsets of targets (e.g., integration tests.)") # Toggles v1/v2 `Task` vs `@rule` pipelines on/off. register('--v1', advanced=True, type=bool, default=True, help='Enables execution of v1 Tasks.') register('--v2', advanced=True, type=bool, default=False, help='Enables execution of v2 @console_rules.') register('--v2-ui', default=False, type=bool, daemon=False, help='Whether to show v2 engine execution progress. This requires the --v2 flag to take effect.') loop_flag = '--loop' register(loop_flag, type=bool, help='Run v2 @console_rules continuously as file changes are detected. Requires `--v2`, and is best utilized with `--v2 --no-v1`.') register('--loop-max', type=int, default=2 ** 32, advanced=True, help='The maximum number of times to loop when `{}` is specified.'.format(loop_flag)) register('-t', '--timeout', advanced=True, type=int, metavar='<seconds>', help='Number of seconds to wait for http connections.') # TODO: After moving to the new options system these abstraction leaks can go away. register('-k', '--kill-nailguns', advanced=True, type=bool, help='Kill nailguns before exiting') register('--fail-fast', advanced=True, type=bool, recursive=True, help='Exit as quickly as possible on error, rather than attempting to continue to process the non-erroneous subset of the input.') register('--cache-key-gen-version', advanced=True, default='200', recursive=True, help='The cache key generation. Bump this to invalidate every artifact for a scope.') register('--workdir-max-build-entries', advanced=True, type=int, default=8, help='Maximum number of previous builds to keep per task target pair in workdir. If set, minimum 2 will always be kept to support incremental compilation.') register('--max-subprocess-args', advanced=True, type=int, default=100, recursive=True, help='Used to limit the number of arguments passed to some subprocesses by breaking the command up into multiple invocations.') register('--lock', advanced=True, type=bool, default=True, help='Use a global lock to exclude other versions of pants from running during critical operations.')
def _init_vertical(self): """Create and grid the widgets for a vertical orientation.""" self.scale.grid(row=0, sticky='ns') # showvalue padx1, padx2 = 0, 0 pady1, pady2 = 0, 0 if self._showvalue: self.label.configure(text=self._formatter.format(self._start)) if self._labelpos == 'w': self.label.place(in_=self.scale, bordermode='outside', relx=0, y=0, anchor='e') self.update_idletasks() padx1 = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() padx1 = max(self.label.winfo_width(), padx1) elif self._labelpos == 'e': self.label.place(in_=self.scale, bordermode='outside', relx=1, y=1, anchor='w') self.update_idletasks() padx2 = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() padx2 = max(self.label.winfo_width(), padx2) else: # self._labelpos in ['n', 's']: if self._labelpos == 'n': rely = 0 anchor = 's' pady1 = self.label.winfo_reqheight() else: rely = 1 anchor = 'n' pady2 = self.label.winfo_reqheight() self.label.place(in_=self.scale, bordermode='outside', relx=0.5, rely=rely, anchor=anchor) self.update_idletasks() w = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() w = max(w, self.label.winfo_width()) ws = self.scale.winfo_reqwidth() if w > ws: padx = (w - ws) // 2 if self._tickinterval: if self._tickpos == 'e': padx1 = padx else: # self._tickpos == 'w' padx2 = padx else: padx1, padx2 = padx, padx # ticks padx1_2, padx2_2 = 0, 0 if self._tickinterval: nb_interv = int(self._extent / self._tickinterval) if self._tickpos == 'w': for i in range(nb_interv + 1): tick = self._start + i * self._tickinterval self.ticks.append(tick) self.ticklabels.append(ttk.Label(self, style=self._style_name + ".TLabel", text=self._formatter.format(tick))) self.ticklabels[i].place(in_=self.scale, bordermode='outside', x=-1 - padx1, y=0, anchor='e') self.update_idletasks() padx1_2 = max(self.ticklabels[i].winfo_width(), padx1_2) else: # self._tickpos == 'e' w = self.scale.winfo_reqwidth() for i in range(nb_interv + 1): tick = self._start + i * self._tickinterval self.ticks.append(tick) self.ticklabels.append(ttk.Label(self, style=self._style_name + ".TLabel", text=self._formatter.format(tick))) self.ticklabels[i].place(in_=self.scale, bordermode='outside', x=w + 1 + padx2, y=0, anchor='w') self.update_idletasks() padx2_2 = max(self.ticklabels[i].winfo_width(), padx2_2) self.scale.grid_configure(padx=(padx1 + padx1_2 + 1, padx2 + padx2_2 + 1), pady=(pady1, pady2))
def function[_init_vertical, parameter[self]]: constant[Create and grid the widgets for a vertical orientation.] call[name[self].scale.grid, parameter[]] <ast.Tuple object at 0x7da1b2290880> assign[=] tuple[[<ast.Constant object at 0x7da1b2290940>, <ast.Constant object at 0x7da1b2290970>]] <ast.Tuple object at 0x7da1b22909d0> assign[=] tuple[[<ast.Constant object at 0x7da1b2290a90>, <ast.Constant object at 0x7da1b2290ac0>]] if name[self]._showvalue begin[:] call[name[self].label.configure, parameter[]] if compare[name[self]._labelpos equal[==] constant[w]] begin[:] call[name[self].label.place, parameter[]] call[name[self].update_idletasks, parameter[]] variable[padx1] assign[=] call[name[self].label.winfo_width, parameter[]] call[name[self].label.configure, parameter[]] call[name[self].update_idletasks, parameter[]] variable[padx1] assign[=] call[name[max], parameter[call[name[self].label.winfo_width, parameter[]], name[padx1]]] <ast.Tuple object at 0x7da1b2293e20> assign[=] tuple[[<ast.Constant object at 0x7da1b2293ee0>, <ast.Constant object at 0x7da1b2293f10>]] if name[self]._tickinterval begin[:] variable[nb_interv] assign[=] call[name[int], parameter[binary_operation[name[self]._extent / name[self]._tickinterval]]] if compare[name[self]._tickpos equal[==] constant[w]] begin[:] for taget[name[i]] in starred[call[name[range], parameter[binary_operation[name[nb_interv] + constant[1]]]]] begin[:] variable[tick] assign[=] binary_operation[name[self]._start + binary_operation[name[i] * name[self]._tickinterval]] call[name[self].ticks.append, parameter[name[tick]]] call[name[self].ticklabels.append, parameter[call[name[ttk].Label, parameter[name[self]]]]] call[call[name[self].ticklabels][name[i]].place, parameter[]] call[name[self].update_idletasks, parameter[]] variable[padx1_2] assign[=] call[name[max], parameter[call[call[name[self].ticklabels][name[i]].winfo_width, parameter[]], name[padx1_2]]] call[name[self].scale.grid_configure, parameter[]]
keyword[def] identifier[_init_vertical] ( identifier[self] ): literal[string] identifier[self] . identifier[scale] . identifier[grid] ( identifier[row] = literal[int] , identifier[sticky] = literal[string] ) identifier[padx1] , identifier[padx2] = literal[int] , literal[int] identifier[pady1] , identifier[pady2] = literal[int] , literal[int] keyword[if] identifier[self] . identifier[_showvalue] : identifier[self] . identifier[label] . identifier[configure] ( identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[self] . identifier[_start] )) keyword[if] identifier[self] . identifier[_labelpos] == literal[string] : identifier[self] . identifier[label] . identifier[place] ( identifier[in_] = identifier[self] . identifier[scale] , identifier[bordermode] = literal[string] , identifier[relx] = literal[int] , identifier[y] = literal[int] , identifier[anchor] = literal[string] ) identifier[self] . identifier[update_idletasks] () identifier[padx1] = identifier[self] . identifier[label] . identifier[winfo_width] () identifier[self] . identifier[label] . identifier[configure] ( identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[self] . identifier[_start] + identifier[self] . identifier[_extent] )) identifier[self] . identifier[update_idletasks] () identifier[padx1] = identifier[max] ( identifier[self] . identifier[label] . identifier[winfo_width] (), identifier[padx1] ) keyword[elif] identifier[self] . identifier[_labelpos] == literal[string] : identifier[self] . identifier[label] . identifier[place] ( identifier[in_] = identifier[self] . identifier[scale] , identifier[bordermode] = literal[string] , identifier[relx] = literal[int] , identifier[y] = literal[int] , identifier[anchor] = literal[string] ) identifier[self] . identifier[update_idletasks] () identifier[padx2] = identifier[self] . identifier[label] . identifier[winfo_width] () identifier[self] . identifier[label] . identifier[configure] ( identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[self] . identifier[_start] + identifier[self] . identifier[_extent] )) identifier[self] . identifier[update_idletasks] () identifier[padx2] = identifier[max] ( identifier[self] . identifier[label] . identifier[winfo_width] (), identifier[padx2] ) keyword[else] : keyword[if] identifier[self] . identifier[_labelpos] == literal[string] : identifier[rely] = literal[int] identifier[anchor] = literal[string] identifier[pady1] = identifier[self] . identifier[label] . identifier[winfo_reqheight] () keyword[else] : identifier[rely] = literal[int] identifier[anchor] = literal[string] identifier[pady2] = identifier[self] . identifier[label] . identifier[winfo_reqheight] () identifier[self] . identifier[label] . identifier[place] ( identifier[in_] = identifier[self] . identifier[scale] , identifier[bordermode] = literal[string] , identifier[relx] = literal[int] , identifier[rely] = identifier[rely] , identifier[anchor] = identifier[anchor] ) identifier[self] . identifier[update_idletasks] () identifier[w] = identifier[self] . identifier[label] . identifier[winfo_width] () identifier[self] . identifier[label] . identifier[configure] ( identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[self] . identifier[_start] + identifier[self] . identifier[_extent] )) identifier[self] . identifier[update_idletasks] () identifier[w] = identifier[max] ( identifier[w] , identifier[self] . identifier[label] . identifier[winfo_width] ()) identifier[ws] = identifier[self] . identifier[scale] . identifier[winfo_reqwidth] () keyword[if] identifier[w] > identifier[ws] : identifier[padx] =( identifier[w] - identifier[ws] )// literal[int] keyword[if] identifier[self] . identifier[_tickinterval] : keyword[if] identifier[self] . identifier[_tickpos] == literal[string] : identifier[padx1] = identifier[padx] keyword[else] : identifier[padx2] = identifier[padx] keyword[else] : identifier[padx1] , identifier[padx2] = identifier[padx] , identifier[padx] identifier[padx1_2] , identifier[padx2_2] = literal[int] , literal[int] keyword[if] identifier[self] . identifier[_tickinterval] : identifier[nb_interv] = identifier[int] ( identifier[self] . identifier[_extent] / identifier[self] . identifier[_tickinterval] ) keyword[if] identifier[self] . identifier[_tickpos] == literal[string] : keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nb_interv] + literal[int] ): identifier[tick] = identifier[self] . identifier[_start] + identifier[i] * identifier[self] . identifier[_tickinterval] identifier[self] . identifier[ticks] . identifier[append] ( identifier[tick] ) identifier[self] . identifier[ticklabels] . identifier[append] ( identifier[ttk] . identifier[Label] ( identifier[self] , identifier[style] = identifier[self] . identifier[_style_name] + literal[string] , identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[tick] ))) identifier[self] . identifier[ticklabels] [ identifier[i] ]. identifier[place] ( identifier[in_] = identifier[self] . identifier[scale] , identifier[bordermode] = literal[string] , identifier[x] =- literal[int] - identifier[padx1] , identifier[y] = literal[int] , identifier[anchor] = literal[string] ) identifier[self] . identifier[update_idletasks] () identifier[padx1_2] = identifier[max] ( identifier[self] . identifier[ticklabels] [ identifier[i] ]. identifier[winfo_width] (), identifier[padx1_2] ) keyword[else] : identifier[w] = identifier[self] . identifier[scale] . identifier[winfo_reqwidth] () keyword[for] identifier[i] keyword[in] identifier[range] ( identifier[nb_interv] + literal[int] ): identifier[tick] = identifier[self] . identifier[_start] + identifier[i] * identifier[self] . identifier[_tickinterval] identifier[self] . identifier[ticks] . identifier[append] ( identifier[tick] ) identifier[self] . identifier[ticklabels] . identifier[append] ( identifier[ttk] . identifier[Label] ( identifier[self] , identifier[style] = identifier[self] . identifier[_style_name] + literal[string] , identifier[text] = identifier[self] . identifier[_formatter] . identifier[format] ( identifier[tick] ))) identifier[self] . identifier[ticklabels] [ identifier[i] ]. identifier[place] ( identifier[in_] = identifier[self] . identifier[scale] , identifier[bordermode] = literal[string] , identifier[x] = identifier[w] + literal[int] + identifier[padx2] , identifier[y] = literal[int] , identifier[anchor] = literal[string] ) identifier[self] . identifier[update_idletasks] () identifier[padx2_2] = identifier[max] ( identifier[self] . identifier[ticklabels] [ identifier[i] ]. identifier[winfo_width] (), identifier[padx2_2] ) identifier[self] . identifier[scale] . identifier[grid_configure] ( identifier[padx] =( identifier[padx1] + identifier[padx1_2] + literal[int] , identifier[padx2] + identifier[padx2_2] + literal[int] ), identifier[pady] =( identifier[pady1] , identifier[pady2] ))
def _init_vertical(self): """Create and grid the widgets for a vertical orientation.""" self.scale.grid(row=0, sticky='ns') # showvalue (padx1, padx2) = (0, 0) (pady1, pady2) = (0, 0) if self._showvalue: self.label.configure(text=self._formatter.format(self._start)) if self._labelpos == 'w': self.label.place(in_=self.scale, bordermode='outside', relx=0, y=0, anchor='e') self.update_idletasks() padx1 = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() padx1 = max(self.label.winfo_width(), padx1) # depends on [control=['if'], data=[]] elif self._labelpos == 'e': self.label.place(in_=self.scale, bordermode='outside', relx=1, y=1, anchor='w') self.update_idletasks() padx2 = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() padx2 = max(self.label.winfo_width(), padx2) # depends on [control=['if'], data=[]] else: # self._labelpos in ['n', 's']: if self._labelpos == 'n': rely = 0 anchor = 's' pady1 = self.label.winfo_reqheight() # depends on [control=['if'], data=[]] else: rely = 1 anchor = 'n' pady2 = self.label.winfo_reqheight() self.label.place(in_=self.scale, bordermode='outside', relx=0.5, rely=rely, anchor=anchor) self.update_idletasks() w = self.label.winfo_width() self.label.configure(text=self._formatter.format(self._start + self._extent)) self.update_idletasks() w = max(w, self.label.winfo_width()) ws = self.scale.winfo_reqwidth() if w > ws: padx = (w - ws) // 2 if self._tickinterval: if self._tickpos == 'e': padx1 = padx # depends on [control=['if'], data=[]] else: # self._tickpos == 'w' padx2 = padx # depends on [control=['if'], data=[]] else: (padx1, padx2) = (padx, padx) # depends on [control=['if'], data=['w', 'ws']] # depends on [control=['if'], data=[]] # ticks (padx1_2, padx2_2) = (0, 0) if self._tickinterval: nb_interv = int(self._extent / self._tickinterval) if self._tickpos == 'w': for i in range(nb_interv + 1): tick = self._start + i * self._tickinterval self.ticks.append(tick) self.ticklabels.append(ttk.Label(self, style=self._style_name + '.TLabel', text=self._formatter.format(tick))) self.ticklabels[i].place(in_=self.scale, bordermode='outside', x=-1 - padx1, y=0, anchor='e') self.update_idletasks() padx1_2 = max(self.ticklabels[i].winfo_width(), padx1_2) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] else: # self._tickpos == 'e' w = self.scale.winfo_reqwidth() for i in range(nb_interv + 1): tick = self._start + i * self._tickinterval self.ticks.append(tick) self.ticklabels.append(ttk.Label(self, style=self._style_name + '.TLabel', text=self._formatter.format(tick))) self.ticklabels[i].place(in_=self.scale, bordermode='outside', x=w + 1 + padx2, y=0, anchor='w') self.update_idletasks() padx2_2 = max(self.ticklabels[i].winfo_width(), padx2_2) # depends on [control=['for'], data=['i']] # depends on [control=['if'], data=[]] self.scale.grid_configure(padx=(padx1 + padx1_2 + 1, padx2 + padx2_2 + 1), pady=(pady1, pady2))
def parse_seconds(value): ''' Parse string into Seconds instances. Handled formats: HH:MM:SS HH:MM SS ''' svalue = str(value) colons = svalue.count(':') if colons == 2: hours, minutes, seconds = [int(v) for v in svalue.split(':')] elif colons == 1: hours, minutes = [int(v) for v in svalue.split(':')] seconds = 0 elif colons == 0: hours = 0 minutes = 0 seconds = int(svalue) else: raise ValueError('Must be in seconds or HH:MM:SS format') return Seconds.from_hms(hours, minutes, seconds)
def function[parse_seconds, parameter[value]]: constant[ Parse string into Seconds instances. Handled formats: HH:MM:SS HH:MM SS ] variable[svalue] assign[=] call[name[str], parameter[name[value]]] variable[colons] assign[=] call[name[svalue].count, parameter[constant[:]]] if compare[name[colons] equal[==] constant[2]] begin[:] <ast.Tuple object at 0x7da18f58d4b0> assign[=] <ast.ListComp object at 0x7da18f58f490> return[call[name[Seconds].from_hms, parameter[name[hours], name[minutes], name[seconds]]]]
keyword[def] identifier[parse_seconds] ( identifier[value] ): literal[string] identifier[svalue] = identifier[str] ( identifier[value] ) identifier[colons] = identifier[svalue] . identifier[count] ( literal[string] ) keyword[if] identifier[colons] == literal[int] : identifier[hours] , identifier[minutes] , identifier[seconds] =[ identifier[int] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[svalue] . identifier[split] ( literal[string] )] keyword[elif] identifier[colons] == literal[int] : identifier[hours] , identifier[minutes] =[ identifier[int] ( identifier[v] ) keyword[for] identifier[v] keyword[in] identifier[svalue] . identifier[split] ( literal[string] )] identifier[seconds] = literal[int] keyword[elif] identifier[colons] == literal[int] : identifier[hours] = literal[int] identifier[minutes] = literal[int] identifier[seconds] = identifier[int] ( identifier[svalue] ) keyword[else] : keyword[raise] identifier[ValueError] ( literal[string] ) keyword[return] identifier[Seconds] . identifier[from_hms] ( identifier[hours] , identifier[minutes] , identifier[seconds] )
def parse_seconds(value): """ Parse string into Seconds instances. Handled formats: HH:MM:SS HH:MM SS """ svalue = str(value) colons = svalue.count(':') if colons == 2: (hours, minutes, seconds) = [int(v) for v in svalue.split(':')] # depends on [control=['if'], data=[]] elif colons == 1: (hours, minutes) = [int(v) for v in svalue.split(':')] seconds = 0 # depends on [control=['if'], data=[]] elif colons == 0: hours = 0 minutes = 0 seconds = int(svalue) # depends on [control=['if'], data=[]] else: raise ValueError('Must be in seconds or HH:MM:SS format') return Seconds.from_hms(hours, minutes, seconds)
def write_config_file(self, f, comments): """This method write a sample file, with attributes, descriptions, sample values, required flags, using the configuration object properties. """ if self.conf_hidden: return False if comments: f.write("\n") f.write("# Attribute (") f.write(str(self.e_type.__name__)) f.write(") : ") f.write(self._name.upper()) f.write("\n") if self._desc and self._desc != argparse.SUPPRESS: f.write("# Description : ") for i in self._desc.split('\n'): f.write("# ") f.write(i) f.write("\n") f.write("\n") if not self.conf_required: f.write(";") f.write(self._name) f.write("=") if self.default is not None and not self.hidden: f.write(str(self.default)) f.write("\n")
def function[write_config_file, parameter[self, f, comments]]: constant[This method write a sample file, with attributes, descriptions, sample values, required flags, using the configuration object properties. ] if name[self].conf_hidden begin[:] return[constant[False]] if name[comments] begin[:] call[name[f].write, parameter[constant[ ]]] call[name[f].write, parameter[constant[# Attribute (]]] call[name[f].write, parameter[call[name[str], parameter[name[self].e_type.__name__]]]] call[name[f].write, parameter[constant[) : ]]] call[name[f].write, parameter[call[name[self]._name.upper, parameter[]]]] call[name[f].write, parameter[constant[ ]]] if <ast.BoolOp object at 0x7da18f09cfa0> begin[:] call[name[f].write, parameter[constant[# Description : ]]] for taget[name[i]] in starred[call[name[self]._desc.split, parameter[constant[ ]]]] begin[:] call[name[f].write, parameter[constant[# ]]] call[name[f].write, parameter[name[i]]] call[name[f].write, parameter[constant[ ]]] call[name[f].write, parameter[constant[ ]]] if <ast.UnaryOp object at 0x7da18f09ce50> begin[:] call[name[f].write, parameter[constant[;]]] call[name[f].write, parameter[name[self]._name]] call[name[f].write, parameter[constant[=]]] if <ast.BoolOp object at 0x7da18dc98be0> begin[:] call[name[f].write, parameter[call[name[str], parameter[name[self].default]]]] call[name[f].write, parameter[constant[ ]]]
keyword[def] identifier[write_config_file] ( identifier[self] , identifier[f] , identifier[comments] ): literal[string] keyword[if] identifier[self] . identifier[conf_hidden] : keyword[return] keyword[False] keyword[if] identifier[comments] : identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( identifier[str] ( identifier[self] . identifier[e_type] . identifier[__name__] )) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( identifier[self] . identifier[_name] . identifier[upper] ()) identifier[f] . identifier[write] ( literal[string] ) keyword[if] identifier[self] . identifier[_desc] keyword[and] identifier[self] . identifier[_desc] != identifier[argparse] . identifier[SUPPRESS] : identifier[f] . identifier[write] ( literal[string] ) keyword[for] identifier[i] keyword[in] identifier[self] . identifier[_desc] . identifier[split] ( literal[string] ): identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( identifier[i] ) identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( literal[string] ) keyword[if] keyword[not] identifier[self] . identifier[conf_required] : identifier[f] . identifier[write] ( literal[string] ) identifier[f] . identifier[write] ( identifier[self] . identifier[_name] ) identifier[f] . identifier[write] ( literal[string] ) keyword[if] identifier[self] . identifier[default] keyword[is] keyword[not] keyword[None] keyword[and] keyword[not] identifier[self] . identifier[hidden] : identifier[f] . identifier[write] ( identifier[str] ( identifier[self] . identifier[default] )) identifier[f] . identifier[write] ( literal[string] )
def write_config_file(self, f, comments): """This method write a sample file, with attributes, descriptions, sample values, required flags, using the configuration object properties. """ if self.conf_hidden: return False # depends on [control=['if'], data=[]] if comments: f.write('\n') f.write('# Attribute (') f.write(str(self.e_type.__name__)) f.write(') : ') f.write(self._name.upper()) f.write('\n') if self._desc and self._desc != argparse.SUPPRESS: f.write('# Description : ') for i in self._desc.split('\n'): f.write('# ') f.write(i) f.write('\n') # depends on [control=['for'], data=['i']] f.write('\n') # depends on [control=['if'], data=[]] # depends on [control=['if'], data=[]] if not self.conf_required: f.write(';') # depends on [control=['if'], data=[]] f.write(self._name) f.write('=') if self.default is not None and (not self.hidden): f.write(str(self.default)) # depends on [control=['if'], data=[]] f.write('\n')
def color2idx(self, red, green, blue): """Get an Excel index from""" xlwt_colors = [ (0, 0, 0), (255, 255, 255), (255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (0, 0, 0), (255, 255, 255), (255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (128, 0, 0), (0, 128, 0), (0, 0, 128), (128, 128, 0), (128, 0, 128), (0, 128, 128), (192, 192, 192), (128, 128, 128), (153, 153, 255), (153, 51, 102), (255, 255, 204), (204, 255, 255), (102, 0, 102), (255, 128, 128), (0, 102, 204), (204, 204, 255), (0, 0, 128), (255, 0, 255), (255, 255, 0), (0, 255, 255), (128, 0, 128), (128, 0, 0), (0, 128, 128), (0, 0, 255), (0, 204, 255), (204, 255, 255), (204, 255, 204), (255, 255, 153), (153, 204, 255), (255, 153, 204), (204, 153, 255), (255, 204, 153), (51, 102, 255), (51, 204, 204), (153, 204, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), (102, 102, 153), (150, 150, 150), (0, 51, 102), (51, 153, 102), (0, 51, 0), (51, 51, 0), (153, 51, 0), (153, 51, 102), (51, 51, 153), (51, 51, 51) ] distances = [abs(red - r) + abs(green - g) + abs(blue - b) for r, g, b in xlwt_colors] min_dist_idx = distances.index(min(distances)) return min_dist_idx
def function[color2idx, parameter[self, red, green, blue]]: constant[Get an Excel index from] variable[xlwt_colors] assign[=] list[[<ast.Tuple object at 0x7da2046238e0>, <ast.Tuple object at 0x7da204621b40>, <ast.Tuple object at 0x7da204620c70>, <ast.Tuple object at 0x7da204623fa0>, <ast.Tuple object at 0x7da204623640>, <ast.Tuple object at 0x7da204623040>, <ast.Tuple object at 0x7da2046208b0>, <ast.Tuple object at 0x7da204621cc0>, <ast.Tuple object at 0x7da204623cd0>, <ast.Tuple object at 0x7da204620580>, <ast.Tuple object at 0x7da204623190>, <ast.Tuple object at 0x7da204622aa0>, <ast.Tuple object at 0x7da204623130>, <ast.Tuple object at 0x7da2046235e0>, <ast.Tuple object at 0x7da204622620>, <ast.Tuple object at 0x7da204623610>, <ast.Tuple object at 0x7da2046225c0>, <ast.Tuple object at 0x7da204620430>, <ast.Tuple object at 0x7da204621990>, <ast.Tuple object at 0x7da204623eb0>, <ast.Tuple object at 0x7da204623ee0>, <ast.Tuple object at 0x7da204621240>, <ast.Tuple object at 0x7da204621210>, <ast.Tuple object at 0x7da1b16be3b0>, <ast.Tuple object at 0x7da1b16bdf00>, <ast.Tuple object at 0x7da1b16bfd30>, <ast.Tuple object at 0x7da1b16bd600>, <ast.Tuple object at 0x7da1b16be6e0>, <ast.Tuple object at 0x7da1b16bc4f0>, <ast.Tuple object at 0x7da1b16bfa60>, <ast.Tuple object at 0x7da1b16be3e0>, <ast.Tuple object at 0x7da1b16be9e0>, <ast.Tuple object at 0x7da1b16bd270>, <ast.Tuple object at 0x7da1b16bf700>, <ast.Tuple object at 0x7da1b16bcc40>, <ast.Tuple object at 0x7da1b16bdde0>, <ast.Tuple object at 0x7da1b16bea70>, <ast.Tuple object at 0x7da1b16becb0>, <ast.Tuple object at 0x7da1b16bc520>, <ast.Tuple object at 0x7da1b16bd750>, <ast.Tuple object at 0x7da1b16bd450>, <ast.Tuple object at 0x7da1b16be5f0>, <ast.Tuple object at 0x7da1b16bd150>, <ast.Tuple object at 0x7da1b16bf370>, <ast.Tuple object at 0x7da1b16bec80>, <ast.Tuple object at 0x7da1b16bcbe0>, <ast.Tuple object at 0x7da1b16bcdc0>, <ast.Tuple object at 0x7da1b16bc1f0>, <ast.Tuple object at 0x7da1b16be410>, <ast.Tuple object at 0x7da1b16bdff0>, <ast.Tuple object at 0x7da1b16bf250>, <ast.Tuple object at 0x7da1b16bd000>, <ast.Tuple object at 0x7da1b16bf460>, <ast.Tuple object at 0x7da1b16bf7c0>, <ast.Tuple object at 0x7da1b16bdcf0>, <ast.Tuple object at 0x7da1b16bc850>, <ast.Tuple object at 0x7da1b16bfca0>, <ast.Tuple object at 0x7da1b16bd6f0>, <ast.Tuple object at 0x7da1b16bc160>, <ast.Tuple object at 0x7da1b16bfee0>, <ast.Tuple object at 0x7da1b16bd690>, <ast.Tuple object at 0x7da1b16be710>, <ast.Tuple object at 0x7da1b16bead0>, <ast.Tuple object at 0x7da1b16beb60>]] variable[distances] assign[=] <ast.ListComp object at 0x7da1b16bfd90> variable[min_dist_idx] assign[=] call[name[distances].index, parameter[call[name[min], parameter[name[distances]]]]] return[name[min_dist_idx]]
keyword[def] identifier[color2idx] ( identifier[self] , identifier[red] , identifier[green] , identifier[blue] ): literal[string] identifier[xlwt_colors] =[ ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ), ( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ),( literal[int] , literal[int] , literal[int] ) ] identifier[distances] =[ identifier[abs] ( identifier[red] - identifier[r] )+ identifier[abs] ( identifier[green] - identifier[g] )+ identifier[abs] ( identifier[blue] - identifier[b] ) keyword[for] identifier[r] , identifier[g] , identifier[b] keyword[in] identifier[xlwt_colors] ] identifier[min_dist_idx] = identifier[distances] . identifier[index] ( identifier[min] ( identifier[distances] )) keyword[return] identifier[min_dist_idx]
def color2idx(self, red, green, blue): """Get an Excel index from""" xlwt_colors = [(0, 0, 0), (255, 255, 255), (255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (0, 0, 0), (255, 255, 255), (255, 0, 0), (0, 255, 0), (0, 0, 255), (255, 255, 0), (255, 0, 255), (0, 255, 255), (128, 0, 0), (0, 128, 0), (0, 0, 128), (128, 128, 0), (128, 0, 128), (0, 128, 128), (192, 192, 192), (128, 128, 128), (153, 153, 255), (153, 51, 102), (255, 255, 204), (204, 255, 255), (102, 0, 102), (255, 128, 128), (0, 102, 204), (204, 204, 255), (0, 0, 128), (255, 0, 255), (255, 255, 0), (0, 255, 255), (128, 0, 128), (128, 0, 0), (0, 128, 128), (0, 0, 255), (0, 204, 255), (204, 255, 255), (204, 255, 204), (255, 255, 153), (153, 204, 255), (255, 153, 204), (204, 153, 255), (255, 204, 153), (51, 102, 255), (51, 204, 204), (153, 204, 0), (255, 204, 0), (255, 153, 0), (255, 102, 0), (102, 102, 153), (150, 150, 150), (0, 51, 102), (51, 153, 102), (0, 51, 0), (51, 51, 0), (153, 51, 0), (153, 51, 102), (51, 51, 153), (51, 51, 51)] distances = [abs(red - r) + abs(green - g) + abs(blue - b) for (r, g, b) in xlwt_colors] min_dist_idx = distances.index(min(distances)) return min_dist_idx
def _set_command_options(self, command_obj, option_dict=None): """ Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). (Adopted from distutils.dist.Distribution._set_command_options) """ command_name = command_obj.get_command_name() if option_dict is None: option_dict = self.get_option_dict(command_name) if DEBUG: self.announce(" setting options for '%s' command:" % command_name) for (option, (source, value)) in option_dict.items(): if DEBUG: self.announce(" %s = %s (from %s)" % (option, value, source)) try: bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] except AttributeError: bool_opts = [] try: neg_opt = command_obj.negative_opt except AttributeError: neg_opt = {} try: is_string = isinstance(value, six.string_types) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) elif option in bool_opts and is_string: setattr(command_obj, option, strtobool(value)) elif hasattr(command_obj, option): setattr(command_obj, option, value) else: raise DistutilsOptionError( "error in %s: command '%s' has no such option '%s'" % (source, command_name, option)) except ValueError as msg: raise DistutilsOptionError(msg)
def function[_set_command_options, parameter[self, command_obj, option_dict]]: constant[ Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). (Adopted from distutils.dist.Distribution._set_command_options) ] variable[command_name] assign[=] call[name[command_obj].get_command_name, parameter[]] if compare[name[option_dict] is constant[None]] begin[:] variable[option_dict] assign[=] call[name[self].get_option_dict, parameter[name[command_name]]] if name[DEBUG] begin[:] call[name[self].announce, parameter[binary_operation[constant[ setting options for '%s' command:] <ast.Mod object at 0x7da2590d6920> name[command_name]]]] for taget[tuple[[<ast.Name object at 0x7da1b1b140a0>, <ast.Tuple object at 0x7da1b1b17f10>]]] in starred[call[name[option_dict].items, parameter[]]] begin[:] if name[DEBUG] begin[:] call[name[self].announce, parameter[binary_operation[constant[ %s = %s (from %s)] <ast.Mod object at 0x7da2590d6920> tuple[[<ast.Name object at 0x7da1b1b17bb0>, <ast.Name object at 0x7da1b1b16e30>, <ast.Name object at 0x7da1b1b14160>]]]]] <ast.Try object at 0x7da1b1b17b50> <ast.Try object at 0x7da1b1b145e0> <ast.Try object at 0x7da1b1b16bf0>
keyword[def] identifier[_set_command_options] ( identifier[self] , identifier[command_obj] , identifier[option_dict] = keyword[None] ): literal[string] identifier[command_name] = identifier[command_obj] . identifier[get_command_name] () keyword[if] identifier[option_dict] keyword[is] keyword[None] : identifier[option_dict] = identifier[self] . identifier[get_option_dict] ( identifier[command_name] ) keyword[if] identifier[DEBUG] : identifier[self] . identifier[announce] ( literal[string] % identifier[command_name] ) keyword[for] ( identifier[option] ,( identifier[source] , identifier[value] )) keyword[in] identifier[option_dict] . identifier[items] (): keyword[if] identifier[DEBUG] : identifier[self] . identifier[announce] ( literal[string] %( identifier[option] , identifier[value] , identifier[source] )) keyword[try] : identifier[bool_opts] =[ identifier[translate_longopt] ( identifier[o] ) keyword[for] identifier[o] keyword[in] identifier[command_obj] . identifier[boolean_options] ] keyword[except] identifier[AttributeError] : identifier[bool_opts] =[] keyword[try] : identifier[neg_opt] = identifier[command_obj] . identifier[negative_opt] keyword[except] identifier[AttributeError] : identifier[neg_opt] ={} keyword[try] : identifier[is_string] = identifier[isinstance] ( identifier[value] , identifier[six] . identifier[string_types] ) keyword[if] identifier[option] keyword[in] identifier[neg_opt] keyword[and] identifier[is_string] : identifier[setattr] ( identifier[command_obj] , identifier[neg_opt] [ identifier[option] ], keyword[not] identifier[strtobool] ( identifier[value] )) keyword[elif] identifier[option] keyword[in] identifier[bool_opts] keyword[and] identifier[is_string] : identifier[setattr] ( identifier[command_obj] , identifier[option] , identifier[strtobool] ( identifier[value] )) keyword[elif] identifier[hasattr] ( identifier[command_obj] , identifier[option] ): identifier[setattr] ( identifier[command_obj] , identifier[option] , identifier[value] ) keyword[else] : keyword[raise] identifier[DistutilsOptionError] ( literal[string] %( identifier[source] , identifier[command_name] , identifier[option] )) keyword[except] identifier[ValueError] keyword[as] identifier[msg] : keyword[raise] identifier[DistutilsOptionError] ( identifier[msg] )
def _set_command_options(self, command_obj, option_dict=None): """ Set the options for 'command_obj' from 'option_dict'. Basically this means copying elements of a dictionary ('option_dict') to attributes of an instance ('command'). 'command_obj' must be a Command instance. If 'option_dict' is not supplied, uses the standard option dictionary for this command (from 'self.command_options'). (Adopted from distutils.dist.Distribution._set_command_options) """ command_name = command_obj.get_command_name() if option_dict is None: option_dict = self.get_option_dict(command_name) # depends on [control=['if'], data=['option_dict']] if DEBUG: self.announce(" setting options for '%s' command:" % command_name) # depends on [control=['if'], data=[]] for (option, (source, value)) in option_dict.items(): if DEBUG: self.announce(' %s = %s (from %s)' % (option, value, source)) # depends on [control=['if'], data=[]] try: bool_opts = [translate_longopt(o) for o in command_obj.boolean_options] # depends on [control=['try'], data=[]] except AttributeError: bool_opts = [] # depends on [control=['except'], data=[]] try: neg_opt = command_obj.negative_opt # depends on [control=['try'], data=[]] except AttributeError: neg_opt = {} # depends on [control=['except'], data=[]] try: is_string = isinstance(value, six.string_types) if option in neg_opt and is_string: setattr(command_obj, neg_opt[option], not strtobool(value)) # depends on [control=['if'], data=[]] elif option in bool_opts and is_string: setattr(command_obj, option, strtobool(value)) # depends on [control=['if'], data=[]] elif hasattr(command_obj, option): setattr(command_obj, option, value) # depends on [control=['if'], data=[]] else: raise DistutilsOptionError("error in %s: command '%s' has no such option '%s'" % (source, command_name, option)) # depends on [control=['try'], data=[]] except ValueError as msg: raise DistutilsOptionError(msg) # depends on [control=['except'], data=['msg']] # depends on [control=['for'], data=[]]
def _log_file_ind(self,inum): """ Information about available profile.data or log.data files. Parameters ---------- inum : integer Attempt to get number of inum's profile.data file. inum_max: max number of profile.data or log.data files available """ self._profiles_index() if inum <= 0: print("Smallest argument is 1") return inum_max = len(self.log_ind) inum -= 1 if inum > inum_max: print('There are only '+str(inum_max)+' profile file available.') log_data_number = -1 return log_data_number else: log_data_number=self.log_ind[self.model[inum]] print('The '+str(inum+1)+'. profile.data file is '+ \ str(log_data_number)) return log_data_number
def function[_log_file_ind, parameter[self, inum]]: constant[ Information about available profile.data or log.data files. Parameters ---------- inum : integer Attempt to get number of inum's profile.data file. inum_max: max number of profile.data or log.data files available ] call[name[self]._profiles_index, parameter[]] if compare[name[inum] less_or_equal[<=] constant[0]] begin[:] call[name[print], parameter[constant[Smallest argument is 1]]] return[None] variable[inum_max] assign[=] call[name[len], parameter[name[self].log_ind]] <ast.AugAssign object at 0x7da1b19a3400> if compare[name[inum] greater[>] name[inum_max]] begin[:] call[name[print], parameter[binary_operation[binary_operation[constant[There are only ] + call[name[str], parameter[name[inum_max]]]] + constant[ profile file available.]]]] variable[log_data_number] assign[=] <ast.UnaryOp object at 0x7da1b19a3910> return[name[log_data_number]]
keyword[def] identifier[_log_file_ind] ( identifier[self] , identifier[inum] ): literal[string] identifier[self] . identifier[_profiles_index] () keyword[if] identifier[inum] <= literal[int] : identifier[print] ( literal[string] ) keyword[return] identifier[inum_max] = identifier[len] ( identifier[self] . identifier[log_ind] ) identifier[inum] -= literal[int] keyword[if] identifier[inum] > identifier[inum_max] : identifier[print] ( literal[string] + identifier[str] ( identifier[inum_max] )+ literal[string] ) identifier[log_data_number] =- literal[int] keyword[return] identifier[log_data_number] keyword[else] : identifier[log_data_number] = identifier[self] . identifier[log_ind] [ identifier[self] . identifier[model] [ identifier[inum] ]] identifier[print] ( literal[string] + identifier[str] ( identifier[inum] + literal[int] )+ literal[string] + identifier[str] ( identifier[log_data_number] )) keyword[return] identifier[log_data_number]
def _log_file_ind(self, inum): """ Information about available profile.data or log.data files. Parameters ---------- inum : integer Attempt to get number of inum's profile.data file. inum_max: max number of profile.data or log.data files available """ self._profiles_index() if inum <= 0: print('Smallest argument is 1') return # depends on [control=['if'], data=[]] inum_max = len(self.log_ind) inum -= 1 if inum > inum_max: print('There are only ' + str(inum_max) + ' profile file available.') log_data_number = -1 return log_data_number # depends on [control=['if'], data=['inum_max']] else: log_data_number = self.log_ind[self.model[inum]] print('The ' + str(inum + 1) + '. profile.data file is ' + str(log_data_number)) return log_data_number
def addFeatureSet(self, featureSet): """ Adds the specified featureSet to this dataset. """ id_ = featureSet.getId() self._featureSetIdMap[id_] = featureSet self._featureSetIds.append(id_) name = featureSet.getLocalId() self._featureSetNameMap[name] = featureSet
def function[addFeatureSet, parameter[self, featureSet]]: constant[ Adds the specified featureSet to this dataset. ] variable[id_] assign[=] call[name[featureSet].getId, parameter[]] call[name[self]._featureSetIdMap][name[id_]] assign[=] name[featureSet] call[name[self]._featureSetIds.append, parameter[name[id_]]] variable[name] assign[=] call[name[featureSet].getLocalId, parameter[]] call[name[self]._featureSetNameMap][name[name]] assign[=] name[featureSet]
keyword[def] identifier[addFeatureSet] ( identifier[self] , identifier[featureSet] ): literal[string] identifier[id_] = identifier[featureSet] . identifier[getId] () identifier[self] . identifier[_featureSetIdMap] [ identifier[id_] ]= identifier[featureSet] identifier[self] . identifier[_featureSetIds] . identifier[append] ( identifier[id_] ) identifier[name] = identifier[featureSet] . identifier[getLocalId] () identifier[self] . identifier[_featureSetNameMap] [ identifier[name] ]= identifier[featureSet]
def addFeatureSet(self, featureSet): """ Adds the specified featureSet to this dataset. """ id_ = featureSet.getId() self._featureSetIdMap[id_] = featureSet self._featureSetIds.append(id_) name = featureSet.getLocalId() self._featureSetNameMap[name] = featureSet
def split_by_proportionally_distribute_labels(self, proportions={}, use_lengths=True): """ Split the corpus into subsets, so the occurrence of the labels is distributed amongst the subsets according to the given proportions. Args: proportions (dict): A dictionary containing the relative size of the target subsets. The key is an identifier for the subset. use_lengths (bool): If True the lengths of the labels are considered for splitting proportionally, otherwise only the number of occurrences is taken into account. Returns: (dict): A dictionary containing the subsets with the identifier from the input as key. """ identifiers = {} for utterance in self.corpus.utterances.values(): if use_lengths: identifiers[utterance.idx] = {l: int(d * 100) for l, d in utterance.label_total_duration().items()} else: identifiers[utterance.idx] = utterance.label_count() splits = utils.get_identifiers_splitted_by_weights(identifiers, proportions) return self._subviews_from_utterance_splits(splits)
def function[split_by_proportionally_distribute_labels, parameter[self, proportions, use_lengths]]: constant[ Split the corpus into subsets, so the occurrence of the labels is distributed amongst the subsets according to the given proportions. Args: proportions (dict): A dictionary containing the relative size of the target subsets. The key is an identifier for the subset. use_lengths (bool): If True the lengths of the labels are considered for splitting proportionally, otherwise only the number of occurrences is taken into account. Returns: (dict): A dictionary containing the subsets with the identifier from the input as key. ] variable[identifiers] assign[=] dictionary[[], []] for taget[name[utterance]] in starred[call[name[self].corpus.utterances.values, parameter[]]] begin[:] if name[use_lengths] begin[:] call[name[identifiers]][name[utterance].idx] assign[=] <ast.DictComp object at 0x7da1b0b80460> variable[splits] assign[=] call[name[utils].get_identifiers_splitted_by_weights, parameter[name[identifiers], name[proportions]]] return[call[name[self]._subviews_from_utterance_splits, parameter[name[splits]]]]
keyword[def] identifier[split_by_proportionally_distribute_labels] ( identifier[self] , identifier[proportions] ={}, identifier[use_lengths] = keyword[True] ): literal[string] identifier[identifiers] ={} keyword[for] identifier[utterance] keyword[in] identifier[self] . identifier[corpus] . identifier[utterances] . identifier[values] (): keyword[if] identifier[use_lengths] : identifier[identifiers] [ identifier[utterance] . identifier[idx] ]={ identifier[l] : identifier[int] ( identifier[d] * literal[int] ) keyword[for] identifier[l] , identifier[d] keyword[in] identifier[utterance] . identifier[label_total_duration] (). identifier[items] ()} keyword[else] : identifier[identifiers] [ identifier[utterance] . identifier[idx] ]= identifier[utterance] . identifier[label_count] () identifier[splits] = identifier[utils] . identifier[get_identifiers_splitted_by_weights] ( identifier[identifiers] , identifier[proportions] ) keyword[return] identifier[self] . identifier[_subviews_from_utterance_splits] ( identifier[splits] )
def split_by_proportionally_distribute_labels(self, proportions={}, use_lengths=True): """ Split the corpus into subsets, so the occurrence of the labels is distributed amongst the subsets according to the given proportions. Args: proportions (dict): A dictionary containing the relative size of the target subsets. The key is an identifier for the subset. use_lengths (bool): If True the lengths of the labels are considered for splitting proportionally, otherwise only the number of occurrences is taken into account. Returns: (dict): A dictionary containing the subsets with the identifier from the input as key. """ identifiers = {} for utterance in self.corpus.utterances.values(): if use_lengths: identifiers[utterance.idx] = {l: int(d * 100) for (l, d) in utterance.label_total_duration().items()} # depends on [control=['if'], data=[]] else: identifiers[utterance.idx] = utterance.label_count() # depends on [control=['for'], data=['utterance']] splits = utils.get_identifiers_splitted_by_weights(identifiers, proportions) return self._subviews_from_utterance_splits(splits)
def isfile_strict(path): """Same as os.path.isfile() but does not swallow EACCES / EPERM exceptions, see: http://mail.python.org/pipermail/python-dev/2012-June/120787.html """ try: st = os.stat(path) except OSError: err = sys.exc_info()[1] if err.errno in (errno.EPERM, errno.EACCES): raise return False else: return stat.S_ISREG(st.st_mode)
def function[isfile_strict, parameter[path]]: constant[Same as os.path.isfile() but does not swallow EACCES / EPERM exceptions, see: http://mail.python.org/pipermail/python-dev/2012-June/120787.html ] <ast.Try object at 0x7da18f8137f0>
keyword[def] identifier[isfile_strict] ( identifier[path] ): literal[string] keyword[try] : identifier[st] = identifier[os] . identifier[stat] ( identifier[path] ) keyword[except] identifier[OSError] : identifier[err] = identifier[sys] . identifier[exc_info] ()[ literal[int] ] keyword[if] identifier[err] . identifier[errno] keyword[in] ( identifier[errno] . identifier[EPERM] , identifier[errno] . identifier[EACCES] ): keyword[raise] keyword[return] keyword[False] keyword[else] : keyword[return] identifier[stat] . identifier[S_ISREG] ( identifier[st] . identifier[st_mode] )
def isfile_strict(path): """Same as os.path.isfile() but does not swallow EACCES / EPERM exceptions, see: http://mail.python.org/pipermail/python-dev/2012-June/120787.html """ try: st = os.stat(path) # depends on [control=['try'], data=[]] except OSError: err = sys.exc_info()[1] if err.errno in (errno.EPERM, errno.EACCES): raise # depends on [control=['if'], data=[]] return False # depends on [control=['except'], data=[]] else: return stat.S_ISREG(st.st_mode)
def contact_advertiser(self, name, email, contact_number, message): """ This method allows you to contact the advertiser of a listing. :param name: Your name :param email: Your email address. :param contact_number: Your contact number. :param message: Your message. :return: """ req = Request(debug=self._debug) ad_search_type = self.search_type agent_id = self.agent_id ad_id = self.id response = req.post('https://www.daft.ie/ajax_endpoint.php?', params={ 'action': 'daft_contact_advertiser', 'from': name, 'email': email, 'message': message, 'contact_number': contact_number, 'type': ad_search_type, 'agent_id': agent_id, 'id': ad_id }) if self._debug: logging.info("Status code: %d" % response.status_code) logging.info("Response: %s" % response.content) if response.status_code != 200: logging.error("Status code: %d" % response.status_code) logging.error("Response: %s" % response.content) return response.status_code == 200
def function[contact_advertiser, parameter[self, name, email, contact_number, message]]: constant[ This method allows you to contact the advertiser of a listing. :param name: Your name :param email: Your email address. :param contact_number: Your contact number. :param message: Your message. :return: ] variable[req] assign[=] call[name[Request], parameter[]] variable[ad_search_type] assign[=] name[self].search_type variable[agent_id] assign[=] name[self].agent_id variable[ad_id] assign[=] name[self].id variable[response] assign[=] call[name[req].post, parameter[constant[https://www.daft.ie/ajax_endpoint.php?]]] if name[self]._debug begin[:] call[name[logging].info, parameter[binary_operation[constant[Status code: %d] <ast.Mod object at 0x7da2590d6920> name[response].status_code]]] call[name[logging].info, parameter[binary_operation[constant[Response: %s] <ast.Mod object at 0x7da2590d6920> name[response].content]]] if compare[name[response].status_code not_equal[!=] constant[200]] begin[:] call[name[logging].error, parameter[binary_operation[constant[Status code: %d] <ast.Mod object at 0x7da2590d6920> name[response].status_code]]] call[name[logging].error, parameter[binary_operation[constant[Response: %s] <ast.Mod object at 0x7da2590d6920> name[response].content]]] return[compare[name[response].status_code equal[==] constant[200]]]
keyword[def] identifier[contact_advertiser] ( identifier[self] , identifier[name] , identifier[email] , identifier[contact_number] , identifier[message] ): literal[string] identifier[req] = identifier[Request] ( identifier[debug] = identifier[self] . identifier[_debug] ) identifier[ad_search_type] = identifier[self] . identifier[search_type] identifier[agent_id] = identifier[self] . identifier[agent_id] identifier[ad_id] = identifier[self] . identifier[id] identifier[response] = identifier[req] . identifier[post] ( literal[string] , identifier[params] ={ literal[string] : literal[string] , literal[string] : identifier[name] , literal[string] : identifier[email] , literal[string] : identifier[message] , literal[string] : identifier[contact_number] , literal[string] : identifier[ad_search_type] , literal[string] : identifier[agent_id] , literal[string] : identifier[ad_id] }) keyword[if] identifier[self] . identifier[_debug] : identifier[logging] . identifier[info] ( literal[string] % identifier[response] . identifier[status_code] ) identifier[logging] . identifier[info] ( literal[string] % identifier[response] . identifier[content] ) keyword[if] identifier[response] . identifier[status_code] != literal[int] : identifier[logging] . identifier[error] ( literal[string] % identifier[response] . identifier[status_code] ) identifier[logging] . identifier[error] ( literal[string] % identifier[response] . identifier[content] ) keyword[return] identifier[response] . identifier[status_code] == literal[int]
def contact_advertiser(self, name, email, contact_number, message): """ This method allows you to contact the advertiser of a listing. :param name: Your name :param email: Your email address. :param contact_number: Your contact number. :param message: Your message. :return: """ req = Request(debug=self._debug) ad_search_type = self.search_type agent_id = self.agent_id ad_id = self.id response = req.post('https://www.daft.ie/ajax_endpoint.php?', params={'action': 'daft_contact_advertiser', 'from': name, 'email': email, 'message': message, 'contact_number': contact_number, 'type': ad_search_type, 'agent_id': agent_id, 'id': ad_id}) if self._debug: logging.info('Status code: %d' % response.status_code) logging.info('Response: %s' % response.content) # depends on [control=['if'], data=[]] if response.status_code != 200: logging.error('Status code: %d' % response.status_code) logging.error('Response: %s' % response.content) # depends on [control=['if'], data=[]] return response.status_code == 200
def trunc_list(s: List) -> List: """Truncate lists to maximum length.""" if len(s) > max_list_size: i = max_list_size // 2 j = i - 1 s = s[:i] + [ELLIPSIS] + s[-j:] return s
def function[trunc_list, parameter[s]]: constant[Truncate lists to maximum length.] if compare[call[name[len], parameter[name[s]]] greater[>] name[max_list_size]] begin[:] variable[i] assign[=] binary_operation[name[max_list_size] <ast.FloorDiv object at 0x7da2590d6bc0> constant[2]] variable[j] assign[=] binary_operation[name[i] - constant[1]] variable[s] assign[=] binary_operation[binary_operation[call[name[s]][<ast.Slice object at 0x7da1b2262dd0>] + list[[<ast.Name object at 0x7da1b2260d00>]]] + call[name[s]][<ast.Slice object at 0x7da1b2263a00>]] return[name[s]]
keyword[def] identifier[trunc_list] ( identifier[s] : identifier[List] )-> identifier[List] : literal[string] keyword[if] identifier[len] ( identifier[s] )> identifier[max_list_size] : identifier[i] = identifier[max_list_size] // literal[int] identifier[j] = identifier[i] - literal[int] identifier[s] = identifier[s] [: identifier[i] ]+[ identifier[ELLIPSIS] ]+ identifier[s] [- identifier[j] :] keyword[return] identifier[s]
def trunc_list(s: List) -> List: """Truncate lists to maximum length.""" if len(s) > max_list_size: i = max_list_size // 2 j = i - 1 s = s[:i] + [ELLIPSIS] + s[-j:] # depends on [control=['if'], data=['max_list_size']] return s
def DeregisterPathSpec(cls, path_spec_type): """Deregisters a path specification. Args: path_spec_type (type): path specification type. Raises: KeyError: if path specification is not registered. """ type_indicator = path_spec_type.TYPE_INDICATOR if type_indicator not in cls._path_spec_types: raise KeyError( 'Path specification type: {0:s} not set.'.format(type_indicator)) del cls._path_spec_types[type_indicator] if type_indicator in cls._system_level_type_indicators: del cls._system_level_type_indicators[type_indicator]
def function[DeregisterPathSpec, parameter[cls, path_spec_type]]: constant[Deregisters a path specification. Args: path_spec_type (type): path specification type. Raises: KeyError: if path specification is not registered. ] variable[type_indicator] assign[=] name[path_spec_type].TYPE_INDICATOR if compare[name[type_indicator] <ast.NotIn object at 0x7da2590d7190> name[cls]._path_spec_types] begin[:] <ast.Raise object at 0x7da1b0655e10> <ast.Delete object at 0x7da1b0655cc0> if compare[name[type_indicator] in name[cls]._system_level_type_indicators] begin[:] <ast.Delete object at 0x7da1b07f6050>
keyword[def] identifier[DeregisterPathSpec] ( identifier[cls] , identifier[path_spec_type] ): literal[string] identifier[type_indicator] = identifier[path_spec_type] . identifier[TYPE_INDICATOR] keyword[if] identifier[type_indicator] keyword[not] keyword[in] identifier[cls] . identifier[_path_spec_types] : keyword[raise] identifier[KeyError] ( literal[string] . identifier[format] ( identifier[type_indicator] )) keyword[del] identifier[cls] . identifier[_path_spec_types] [ identifier[type_indicator] ] keyword[if] identifier[type_indicator] keyword[in] identifier[cls] . identifier[_system_level_type_indicators] : keyword[del] identifier[cls] . identifier[_system_level_type_indicators] [ identifier[type_indicator] ]
def DeregisterPathSpec(cls, path_spec_type): """Deregisters a path specification. Args: path_spec_type (type): path specification type. Raises: KeyError: if path specification is not registered. """ type_indicator = path_spec_type.TYPE_INDICATOR if type_indicator not in cls._path_spec_types: raise KeyError('Path specification type: {0:s} not set.'.format(type_indicator)) # depends on [control=['if'], data=['type_indicator']] del cls._path_spec_types[type_indicator] if type_indicator in cls._system_level_type_indicators: del cls._system_level_type_indicators[type_indicator] # depends on [control=['if'], data=['type_indicator']]
def compute_md5_for_data_asbase64(data): # type: (obj) -> str """Compute MD5 hash for bits and encode as Base64 :param any data: data to compute MD5 for :rtype: str :return: MD5 for data """ hasher = blobxfer.util.new_md5_hasher() hasher.update(data) return blobxfer.util.base64_encode_as_string(hasher.digest())
def function[compute_md5_for_data_asbase64, parameter[data]]: constant[Compute MD5 hash for bits and encode as Base64 :param any data: data to compute MD5 for :rtype: str :return: MD5 for data ] variable[hasher] assign[=] call[name[blobxfer].util.new_md5_hasher, parameter[]] call[name[hasher].update, parameter[name[data]]] return[call[name[blobxfer].util.base64_encode_as_string, parameter[call[name[hasher].digest, parameter[]]]]]
keyword[def] identifier[compute_md5_for_data_asbase64] ( identifier[data] ): literal[string] identifier[hasher] = identifier[blobxfer] . identifier[util] . identifier[new_md5_hasher] () identifier[hasher] . identifier[update] ( identifier[data] ) keyword[return] identifier[blobxfer] . identifier[util] . identifier[base64_encode_as_string] ( identifier[hasher] . identifier[digest] ())
def compute_md5_for_data_asbase64(data): # type: (obj) -> str 'Compute MD5 hash for bits and encode as Base64\n :param any data: data to compute MD5 for\n :rtype: str\n :return: MD5 for data\n ' hasher = blobxfer.util.new_md5_hasher() hasher.update(data) return blobxfer.util.base64_encode_as_string(hasher.digest())