body
stringlengths 26
98.2k
| body_hash
int64 -9,222,864,604,528,158,000
9,221,803,474B
| docstring
stringlengths 1
16.8k
| path
stringlengths 5
230
| name
stringlengths 1
96
| repository_name
stringlengths 7
89
| lang
stringclasses 1
value | body_without_docstring
stringlengths 20
98.2k
|
---|---|---|---|---|---|---|---|
def __init__(self, context, builder):
'\n Note: Maybe called multiple times when lowering a function\n '
from numba.core import boxing
self.context = context
self.builder = builder
self.module = builder.basic_block.function.module
try:
self.module.__serialized
except AttributeError:
self.module.__serialized = {}
self.pyobj = self.context.get_argument_type(types.pyobject)
self.pyobjptr = self.pyobj.as_pointer()
self.voidptr = Type.pointer(Type.int(8))
self.long = Type.int((ctypes.sizeof(ctypes.c_long) * 8))
self.ulong = self.long
self.longlong = Type.int((ctypes.sizeof(ctypes.c_ulonglong) * 8))
self.ulonglong = self.longlong
self.double = Type.double()
self.py_ssize_t = self.context.get_value_type(types.intp)
self.cstring = Type.pointer(Type.int(8))
self.gil_state = Type.int((_helperlib.py_gil_state_size * 8))
self.py_buffer_t = ir.ArrayType(ir.IntType(8), _helperlib.py_buffer_size)
self.py_hash_t = self.py_ssize_t
self.py_unicode_1byte_kind = _helperlib.py_unicode_1byte_kind
self.py_unicode_2byte_kind = _helperlib.py_unicode_2byte_kind
self.py_unicode_4byte_kind = _helperlib.py_unicode_4byte_kind
self.py_unicode_wchar_kind = _helperlib.py_unicode_wchar_kind | 720,668,537,472,889,200 | Note: Maybe called multiple times when lowering a function | numba/core/pythonapi.py | __init__ | DrTodd13/numba | python | def __init__(self, context, builder):
'\n \n '
from numba.core import boxing
self.context = context
self.builder = builder
self.module = builder.basic_block.function.module
try:
self.module.__serialized
except AttributeError:
self.module.__serialized = {}
self.pyobj = self.context.get_argument_type(types.pyobject)
self.pyobjptr = self.pyobj.as_pointer()
self.voidptr = Type.pointer(Type.int(8))
self.long = Type.int((ctypes.sizeof(ctypes.c_long) * 8))
self.ulong = self.long
self.longlong = Type.int((ctypes.sizeof(ctypes.c_ulonglong) * 8))
self.ulonglong = self.longlong
self.double = Type.double()
self.py_ssize_t = self.context.get_value_type(types.intp)
self.cstring = Type.pointer(Type.int(8))
self.gil_state = Type.int((_helperlib.py_gil_state_size * 8))
self.py_buffer_t = ir.ArrayType(ir.IntType(8), _helperlib.py_buffer_size)
self.py_hash_t = self.py_ssize_t
self.py_unicode_1byte_kind = _helperlib.py_unicode_1byte_kind
self.py_unicode_2byte_kind = _helperlib.py_unicode_2byte_kind
self.py_unicode_4byte_kind = _helperlib.py_unicode_4byte_kind
self.py_unicode_wchar_kind = _helperlib.py_unicode_wchar_kind |
def emit_environment_sentry(self, envptr, return_pyobject=False, debug_msg=''):
'Emits LLVM code to ensure the `envptr` is not NULL\n '
is_null = cgutils.is_null(self.builder, envptr)
with cgutils.if_unlikely(self.builder, is_null):
if return_pyobject:
fnty = self.builder.function.type.pointee
assert (fnty.return_type == self.pyobj)
self.err_set_string('PyExc_RuntimeError', f'missing Environment: {debug_msg}')
self.builder.ret(self.get_null_object())
else:
self.context.call_conv.return_user_exc(self.builder, RuntimeError, (f'missing Environment: {debug_msg}',)) | -2,687,096,933,102,821,000 | Emits LLVM code to ensure the `envptr` is not NULL | numba/core/pythonapi.py | emit_environment_sentry | DrTodd13/numba | python | def emit_environment_sentry(self, envptr, return_pyobject=False, debug_msg=):
'\n '
is_null = cgutils.is_null(self.builder, envptr)
with cgutils.if_unlikely(self.builder, is_null):
if return_pyobject:
fnty = self.builder.function.type.pointee
assert (fnty.return_type == self.pyobj)
self.err_set_string('PyExc_RuntimeError', f'missing Environment: {debug_msg}')
self.builder.ret(self.get_null_object())
else:
self.context.call_conv.return_user_exc(self.builder, RuntimeError, (f'missing Environment: {debug_msg}',)) |
def raise_object(self, exc=None):
'\n Raise an arbitrary exception (type or value or (type, args)\n or None - if reraising). A reference to the argument is consumed.\n '
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name='numba_do_raise')
if (exc is None):
exc = self.make_none()
return self.builder.call(fn, (exc,)) | -6,708,990,820,519,976,000 | Raise an arbitrary exception (type or value or (type, args)
or None - if reraising). A reference to the argument is consumed. | numba/core/pythonapi.py | raise_object | DrTodd13/numba | python | def raise_object(self, exc=None):
'\n Raise an arbitrary exception (type or value or (type, args)\n or None - if reraising). A reference to the argument is consumed.\n '
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name='numba_do_raise')
if (exc is None):
exc = self.make_none()
return self.builder.call(fn, (exc,)) |
@contextlib.contextmanager
def err_push(self, keep_new=False):
'\n Temporarily push the current error indicator while the code\n block is executed. If *keep_new* is True and the code block\n raises a new error, the new error is kept, otherwise the old\n error indicator is restored at the end of the block.\n '
(pty, pval, ptb) = [cgutils.alloca_once(self.builder, self.pyobj) for i in range(3)]
self.err_fetch(pty, pval, ptb)
(yield)
ty = self.builder.load(pty)
val = self.builder.load(pval)
tb = self.builder.load(ptb)
if keep_new:
new_error = cgutils.is_not_null(self.builder, self.err_occurred())
with self.builder.if_else(new_error, likely=False) as (if_error, if_ok):
with if_error:
self.decref(ty)
self.decref(val)
self.decref(tb)
with if_ok:
self.err_restore(ty, val, tb)
else:
self.err_restore(ty, val, tb) | 613,919,192,489,648,100 | Temporarily push the current error indicator while the code
block is executed. If *keep_new* is True and the code block
raises a new error, the new error is kept, otherwise the old
error indicator is restored at the end of the block. | numba/core/pythonapi.py | err_push | DrTodd13/numba | python | @contextlib.contextmanager
def err_push(self, keep_new=False):
'\n Temporarily push the current error indicator while the code\n block is executed. If *keep_new* is True and the code block\n raises a new error, the new error is kept, otherwise the old\n error indicator is restored at the end of the block.\n '
(pty, pval, ptb) = [cgutils.alloca_once(self.builder, self.pyobj) for i in range(3)]
self.err_fetch(pty, pval, ptb)
(yield)
ty = self.builder.load(pty)
val = self.builder.load(pval)
tb = self.builder.load(ptb)
if keep_new:
new_error = cgutils.is_not_null(self.builder, self.err_occurred())
with self.builder.if_else(new_error, likely=False) as (if_error, if_ok):
with if_error:
self.decref(ty)
self.decref(val)
self.decref(tb)
with if_ok:
self.err_restore(ty, val, tb)
else:
self.err_restore(ty, val, tb) |
def get_c_object(self, name):
'\n Get a Python object through its C-accessible *name*\n (e.g. "PyExc_ValueError"). The underlying variable must be\n a `PyObject *`, and the value of that pointer is returned.\n '
return self.context.get_c_value(self.builder, self.pyobj.pointee, name, dllimport=True) | -877,358,383,236,135,400 | Get a Python object through its C-accessible *name*
(e.g. "PyExc_ValueError"). The underlying variable must be
a `PyObject *`, and the value of that pointer is returned. | numba/core/pythonapi.py | get_c_object | DrTodd13/numba | python | def get_c_object(self, name):
'\n Get a Python object through its C-accessible *name*\n (e.g. "PyExc_ValueError"). The underlying variable must be\n a `PyObject *`, and the value of that pointer is returned.\n '
return self.context.get_c_value(self.builder, self.pyobj.pointee, name, dllimport=True) |
def dict_getitem_string(self, dic, name):
'Lookup name inside dict\n\n Returns a borrowed reference\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.cstring])
fn = self._get_function(fnty, name='PyDict_GetItemString')
cstr = self.context.insert_const_string(self.module, name)
return self.builder.call(fn, [dic, cstr]) | 7,638,780,850,412,745,000 | Lookup name inside dict
Returns a borrowed reference | numba/core/pythonapi.py | dict_getitem_string | DrTodd13/numba | python | def dict_getitem_string(self, dic, name):
'Lookup name inside dict\n\n Returns a borrowed reference\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.cstring])
fn = self._get_function(fnty, name='PyDict_GetItemString')
cstr = self.context.insert_const_string(self.module, name)
return self.builder.call(fn, [dic, cstr]) |
def dict_getitem(self, dic, name):
'Lookup name inside dict\n\n Returns a borrowed reference\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyDict_GetItem')
return self.builder.call(fn, [dic, name]) | 8,226,964,351,746,974,000 | Lookup name inside dict
Returns a borrowed reference | numba/core/pythonapi.py | dict_getitem | DrTodd13/numba | python | def dict_getitem(self, dic, name):
'Lookup name inside dict\n\n Returns a borrowed reference\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyDict_GetItem')
return self.builder.call(fn, [dic, name]) |
def dict_pack(self, keyvalues):
'\n Args\n -----\n keyvalues: iterable of (str, llvm.Value of PyObject*)\n '
dictobj = self.dict_new()
with self.if_object_ok(dictobj):
for (k, v) in keyvalues:
self.dict_setitem_string(dictobj, k, v)
return dictobj | -6,079,025,890,302,506,000 | Args
-----
keyvalues: iterable of (str, llvm.Value of PyObject*) | numba/core/pythonapi.py | dict_pack | DrTodd13/numba | python | def dict_pack(self, keyvalues):
'\n Args\n -----\n keyvalues: iterable of (str, llvm.Value of PyObject*)\n '
dictobj = self.dict_new()
with self.if_object_ok(dictobj):
for (k, v) in keyvalues:
self.dict_setitem_string(dictobj, k, v)
return dictobj |
def long_as_voidptr(self, numobj):
"\n Convert the given Python integer to a void*. This is recommended\n over number_as_ssize_t as it isn't affected by signedness.\n "
fnty = Type.function(self.voidptr, [self.pyobj])
fn = self._get_function(fnty, name='PyLong_AsVoidPtr')
return self.builder.call(fn, [numobj]) | 3,385,580,637,444,274,700 | Convert the given Python integer to a void*. This is recommended
over number_as_ssize_t as it isn't affected by signedness. | numba/core/pythonapi.py | long_as_voidptr | DrTodd13/numba | python | def long_as_voidptr(self, numobj):
"\n Convert the given Python integer to a void*. This is recommended\n over number_as_ssize_t as it isn't affected by signedness.\n "
fnty = Type.function(self.voidptr, [self.pyobj])
fn = self._get_function(fnty, name='PyLong_AsVoidPtr')
return self.builder.call(fn, [numobj]) |
def long_from_signed_int(self, ival):
'\n Return a Python integer from any native integer value.\n '
bits = ival.type.width
if (bits <= self.long.width):
return self.long_from_long(self.builder.sext(ival, self.long))
elif (bits <= self.longlong.width):
return self.long_from_longlong(self.builder.sext(ival, self.longlong))
else:
raise OverflowError(('integer too big (%d bits)' % bits)) | -1,828,205,803,609,380,000 | Return a Python integer from any native integer value. | numba/core/pythonapi.py | long_from_signed_int | DrTodd13/numba | python | def long_from_signed_int(self, ival):
'\n \n '
bits = ival.type.width
if (bits <= self.long.width):
return self.long_from_long(self.builder.sext(ival, self.long))
elif (bits <= self.longlong.width):
return self.long_from_longlong(self.builder.sext(ival, self.longlong))
else:
raise OverflowError(('integer too big (%d bits)' % bits)) |
def long_from_unsigned_int(self, ival):
'\n Same as long_from_signed_int, but for unsigned values.\n '
bits = ival.type.width
if (bits <= self.ulong.width):
return self.long_from_ulong(self.builder.zext(ival, self.ulong))
elif (bits <= self.ulonglong.width):
return self.long_from_ulonglong(self.builder.zext(ival, self.ulonglong))
else:
raise OverflowError(('integer too big (%d bits)' % bits)) | 3,342,309,904,926,704,000 | Same as long_from_signed_int, but for unsigned values. | numba/core/pythonapi.py | long_from_unsigned_int | DrTodd13/numba | python | def long_from_unsigned_int(self, ival):
'\n \n '
bits = ival.type.width
if (bits <= self.ulong.width):
return self.long_from_ulong(self.builder.zext(ival, self.ulong))
elif (bits <= self.ulonglong.width):
return self.long_from_ulonglong(self.builder.zext(ival, self.ulonglong))
else:
raise OverflowError(('integer too big (%d bits)' % bits)) |
def bool_from_bool(self, bval):
'\n Get a Python bool from a LLVM boolean.\n '
longval = self.builder.zext(bval, self.long)
return self.bool_from_long(longval) | -8,639,948,724,539,030,000 | Get a Python bool from a LLVM boolean. | numba/core/pythonapi.py | bool_from_bool | DrTodd13/numba | python | def bool_from_bool(self, bval):
'\n \n '
longval = self.builder.zext(bval, self.long)
return self.bool_from_long(longval) |
def slice_as_ints(self, obj):
'\n Read the members of a slice of integers.\n\n Returns a (ok, start, stop, step) tuple where ok is a boolean and\n the following members are pointer-sized ints.\n '
pstart = cgutils.alloca_once(self.builder, self.py_ssize_t)
pstop = cgutils.alloca_once(self.builder, self.py_ssize_t)
pstep = cgutils.alloca_once(self.builder, self.py_ssize_t)
fnty = Type.function(Type.int(), ([self.pyobj] + ([self.py_ssize_t.as_pointer()] * 3)))
fn = self._get_function(fnty, name='numba_unpack_slice')
res = self.builder.call(fn, (obj, pstart, pstop, pstep))
start = self.builder.load(pstart)
stop = self.builder.load(pstop)
step = self.builder.load(pstep)
return (cgutils.is_null(self.builder, res), start, stop, step) | -8,755,883,046,290,695,000 | Read the members of a slice of integers.
Returns a (ok, start, stop, step) tuple where ok is a boolean and
the following members are pointer-sized ints. | numba/core/pythonapi.py | slice_as_ints | DrTodd13/numba | python | def slice_as_ints(self, obj):
'\n Read the members of a slice of integers.\n\n Returns a (ok, start, stop, step) tuple where ok is a boolean and\n the following members are pointer-sized ints.\n '
pstart = cgutils.alloca_once(self.builder, self.py_ssize_t)
pstop = cgutils.alloca_once(self.builder, self.py_ssize_t)
pstep = cgutils.alloca_once(self.builder, self.py_ssize_t)
fnty = Type.function(Type.int(), ([self.pyobj] + ([self.py_ssize_t.as_pointer()] * 3)))
fn = self._get_function(fnty, name='numba_unpack_slice')
res = self.builder.call(fn, (obj, pstart, pstop, pstep))
start = self.builder.load(pstart)
stop = self.builder.load(pstop)
step = self.builder.load(pstep)
return (cgutils.is_null(self.builder, res), start, stop, step) |
def list_setitem(self, lst, idx, val):
'\n Warning: Steals reference to ``val``\n '
fnty = Type.function(Type.int(), [self.pyobj, self.py_ssize_t, self.pyobj])
fn = self._get_function(fnty, name='PyList_SetItem')
return self.builder.call(fn, [lst, idx, val]) | 118,342,623,335,487,140 | Warning: Steals reference to ``val`` | numba/core/pythonapi.py | list_setitem | DrTodd13/numba | python | def list_setitem(self, lst, idx, val):
'\n \n '
fnty = Type.function(Type.int(), [self.pyobj, self.py_ssize_t, self.pyobj])
fn = self._get_function(fnty, name='PyList_SetItem')
return self.builder.call(fn, [lst, idx, val]) |
def list_getitem(self, lst, idx):
'\n Returns a borrowed reference.\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name='PyList_GetItem')
if isinstance(idx, int):
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [lst, idx]) | -6,105,378,791,583,492,000 | Returns a borrowed reference. | numba/core/pythonapi.py | list_getitem | DrTodd13/numba | python | def list_getitem(self, lst, idx):
'\n \n '
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name='PyList_GetItem')
if isinstance(idx, int):
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [lst, idx]) |
def tuple_getitem(self, tup, idx):
'\n Borrow reference\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name='PyTuple_GetItem')
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [tup, idx]) | -2,896,934,399,041,985,000 | Borrow reference | numba/core/pythonapi.py | tuple_getitem | DrTodd13/numba | python | def tuple_getitem(self, tup, idx):
'\n \n '
fnty = Type.function(self.pyobj, [self.pyobj, self.py_ssize_t])
fn = self._get_function(fnty, name='PyTuple_GetItem')
idx = self.context.get_constant(types.intp, idx)
return self.builder.call(fn, [tup, idx]) |
def tuple_setitem(self, tuple_val, index, item):
'\n Steals a reference to `item`.\n '
fnty = Type.function(Type.int(), [self.pyobj, Type.int(), self.pyobj])
setitem_fn = self._get_function(fnty, name='PyTuple_SetItem')
index = self.context.get_constant(types.int32, index)
self.builder.call(setitem_fn, [tuple_val, index, item]) | 7,715,622,929,549,538,000 | Steals a reference to `item`. | numba/core/pythonapi.py | tuple_setitem | DrTodd13/numba | python | def tuple_setitem(self, tuple_val, index, item):
'\n \n '
fnty = Type.function(Type.int(), [self.pyobj, Type.int(), self.pyobj])
setitem_fn = self._get_function(fnty, name='PyTuple_SetItem')
index = self.context.get_constant(types.int32, index)
self.builder.call(setitem_fn, [tuple_val, index, item]) |
def gil_ensure(self):
'\n Ensure the GIL is acquired.\n The returned value must be consumed by gil_release().\n '
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, 'numba_gil_ensure')
gilptr = cgutils.alloca_once(self.builder, self.gil_state)
self.builder.call(fn, [gilptr])
return gilptr | 8,383,073,972,365,726,000 | Ensure the GIL is acquired.
The returned value must be consumed by gil_release(). | numba/core/pythonapi.py | gil_ensure | DrTodd13/numba | python | def gil_ensure(self):
'\n Ensure the GIL is acquired.\n The returned value must be consumed by gil_release().\n '
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, 'numba_gil_ensure')
gilptr = cgutils.alloca_once(self.builder, self.gil_state)
self.builder.call(fn, [gilptr])
return gilptr |
def gil_release(self, gil):
'\n Release the acquired GIL by gil_ensure().\n Must be paired with a gil_ensure().\n '
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, 'numba_gil_release')
return self.builder.call(fn, [gil]) | -6,266,615,649,002,853,000 | Release the acquired GIL by gil_ensure().
Must be paired with a gil_ensure(). | numba/core/pythonapi.py | gil_release | DrTodd13/numba | python | def gil_release(self, gil):
'\n Release the acquired GIL by gil_ensure().\n Must be paired with a gil_ensure().\n '
gilptrty = Type.pointer(self.gil_state)
fnty = Type.function(Type.void(), [gilptrty])
fn = self._get_function(fnty, 'numba_gil_release')
return self.builder.call(fn, [gil]) |
def save_thread(self):
'\n Release the GIL and return the former thread state\n (an opaque non-NULL pointer).\n '
fnty = Type.function(self.voidptr, [])
fn = self._get_function(fnty, name='PyEval_SaveThread')
return self.builder.call(fn, []) | 8,673,354,977,569,480,000 | Release the GIL and return the former thread state
(an opaque non-NULL pointer). | numba/core/pythonapi.py | save_thread | DrTodd13/numba | python | def save_thread(self):
'\n Release the GIL and return the former thread state\n (an opaque non-NULL pointer).\n '
fnty = Type.function(self.voidptr, [])
fn = self._get_function(fnty, name='PyEval_SaveThread')
return self.builder.call(fn, []) |
def restore_thread(self, thread_state):
'\n Restore the given thread state by reacquiring the GIL.\n '
fnty = Type.function(Type.void(), [self.voidptr])
fn = self._get_function(fnty, name='PyEval_RestoreThread')
self.builder.call(fn, [thread_state]) | -1,338,843,926,509,931,800 | Restore the given thread state by reacquiring the GIL. | numba/core/pythonapi.py | restore_thread | DrTodd13/numba | python | def restore_thread(self, thread_state):
'\n \n '
fnty = Type.function(Type.void(), [self.voidptr])
fn = self._get_function(fnty, name='PyEval_RestoreThread')
self.builder.call(fn, [thread_state]) |
def object_richcompare(self, lhs, rhs, opstr):
'\n Refer to Python source Include/object.h for macros definition\n of the opid.\n '
ops = ['<', '<=', '==', '!=', '>', '>=']
if (opstr in ops):
opid = ops.index(opstr)
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj, Type.int()])
fn = self._get_function(fnty, name='PyObject_RichCompare')
lopid = self.context.get_constant(types.int32, opid)
return self.builder.call(fn, (lhs, rhs, lopid))
elif (opstr == 'is'):
bitflag = self.builder.icmp(lc.ICMP_EQ, lhs, rhs)
return self.bool_from_bool(bitflag)
elif (opstr == 'is not'):
bitflag = self.builder.icmp(lc.ICMP_NE, lhs, rhs)
return self.bool_from_bool(bitflag)
elif (opstr in ('in', 'not in')):
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PySequence_Contains')
status = self.builder.call(fn, (rhs, lhs))
negone = self.context.get_constant(types.int32, (- 1))
is_good = self.builder.icmp(lc.ICMP_NE, status, negone)
outptr = cgutils.alloca_once_value(self.builder, Constant.null(self.pyobj))
with cgutils.if_likely(self.builder, is_good):
if (opstr == 'not in'):
status = self.builder.not_(status)
truncated = self.builder.trunc(status, Type.int(1))
self.builder.store(self.bool_from_bool(truncated), outptr)
return self.builder.load(outptr)
else:
raise NotImplementedError('Unknown operator {op!r}'.format(op=opstr)) | -6,584,550,791,120,509,000 | Refer to Python source Include/object.h for macros definition
of the opid. | numba/core/pythonapi.py | object_richcompare | DrTodd13/numba | python | def object_richcompare(self, lhs, rhs, opstr):
'\n Refer to Python source Include/object.h for macros definition\n of the opid.\n '
ops = ['<', '<=', '==', '!=', '>', '>=']
if (opstr in ops):
opid = ops.index(opstr)
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj, Type.int()])
fn = self._get_function(fnty, name='PyObject_RichCompare')
lopid = self.context.get_constant(types.int32, opid)
return self.builder.call(fn, (lhs, rhs, lopid))
elif (opstr == 'is'):
bitflag = self.builder.icmp(lc.ICMP_EQ, lhs, rhs)
return self.bool_from_bool(bitflag)
elif (opstr == 'is not'):
bitflag = self.builder.icmp(lc.ICMP_NE, lhs, rhs)
return self.bool_from_bool(bitflag)
elif (opstr in ('in', 'not in')):
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PySequence_Contains')
status = self.builder.call(fn, (rhs, lhs))
negone = self.context.get_constant(types.int32, (- 1))
is_good = self.builder.icmp(lc.ICMP_NE, status, negone)
outptr = cgutils.alloca_once_value(self.builder, Constant.null(self.pyobj))
with cgutils.if_likely(self.builder, is_good):
if (opstr == 'not in'):
status = self.builder.not_(status)
truncated = self.builder.trunc(status, Type.int(1))
self.builder.store(self.bool_from_bool(truncated), outptr)
return self.builder.load(outptr)
else:
raise NotImplementedError('Unknown operator {op!r}'.format(op=opstr)) |
def object_getitem(self, obj, key):
'\n Return obj[key]\n '
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_GetItem')
return self.builder.call(fn, (obj, key)) | 293,015,036,151,651,100 | Return obj[key] | numba/core/pythonapi.py | object_getitem | DrTodd13/numba | python | def object_getitem(self, obj, key):
'\n \n '
fnty = Type.function(self.pyobj, [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_GetItem')
return self.builder.call(fn, (obj, key)) |
def object_setitem(self, obj, key, val):
'\n obj[key] = val\n '
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_SetItem')
return self.builder.call(fn, (obj, key, val)) | 3,589,557,668,659,497,000 | obj[key] = val | numba/core/pythonapi.py | object_setitem | DrTodd13/numba | python | def object_setitem(self, obj, key, val):
'\n \n '
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_SetItem')
return self.builder.call(fn, (obj, key, val)) |
def object_delitem(self, obj, key):
'\n del obj[key]\n '
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_DelItem')
return self.builder.call(fn, (obj, key)) | -3,989,628,594,239,658,000 | del obj[key] | numba/core/pythonapi.py | object_delitem | DrTodd13/numba | python | def object_delitem(self, obj, key):
'\n \n '
fnty = Type.function(Type.int(), [self.pyobj, self.pyobj])
fn = self._get_function(fnty, name='PyObject_DelItem')
return self.builder.call(fn, (obj, key)) |
def string_as_string_and_size(self, strobj):
'\n Returns a tuple of ``(ok, buffer, length)``.\n The ``ok`` is i1 value that is set if ok.\n The ``buffer`` is a i8* of the output buffer.\n The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer.\n '
p_length = cgutils.alloca_once(self.builder, self.py_ssize_t)
fnty = Type.function(self.cstring, [self.pyobj, self.py_ssize_t.as_pointer()])
fname = 'PyUnicode_AsUTF8AndSize'
fn = self._get_function(fnty, name=fname)
buffer = self.builder.call(fn, [strobj, p_length])
ok = self.builder.icmp_unsigned('!=', ir.Constant(buffer.type, None), buffer)
return (ok, buffer, self.builder.load(p_length)) | 7,256,895,209,335,675,000 | Returns a tuple of ``(ok, buffer, length)``.
The ``ok`` is i1 value that is set if ok.
The ``buffer`` is a i8* of the output buffer.
The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer. | numba/core/pythonapi.py | string_as_string_and_size | DrTodd13/numba | python | def string_as_string_and_size(self, strobj):
'\n Returns a tuple of ``(ok, buffer, length)``.\n The ``ok`` is i1 value that is set if ok.\n The ``buffer`` is a i8* of the output buffer.\n The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer.\n '
p_length = cgutils.alloca_once(self.builder, self.py_ssize_t)
fnty = Type.function(self.cstring, [self.pyobj, self.py_ssize_t.as_pointer()])
fname = 'PyUnicode_AsUTF8AndSize'
fn = self._get_function(fnty, name=fname)
buffer = self.builder.call(fn, [strobj, p_length])
ok = self.builder.icmp_unsigned('!=', ir.Constant(buffer.type, None), buffer)
return (ok, buffer, self.builder.load(p_length)) |
def string_as_string_size_and_kind(self, strobj):
'\n Returns a tuple of ``(ok, buffer, length, kind)``.\n The ``ok`` is i1 value that is set if ok.\n The ``buffer`` is a i8* of the output buffer.\n The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer.\n The ``kind`` is a i32 (int32) of the Unicode kind constant\n The ``hash`` is a long/uint64_t (py_hash_t) of the Unicode constant hash\n '
p_length = cgutils.alloca_once(self.builder, self.py_ssize_t)
p_kind = cgutils.alloca_once(self.builder, Type.int())
p_ascii = cgutils.alloca_once(self.builder, Type.int())
p_hash = cgutils.alloca_once(self.builder, self.py_hash_t)
fnty = Type.function(self.cstring, [self.pyobj, self.py_ssize_t.as_pointer(), Type.int().as_pointer(), Type.int().as_pointer(), self.py_hash_t.as_pointer()])
fname = 'numba_extract_unicode'
fn = self._get_function(fnty, name=fname)
buffer = self.builder.call(fn, [strobj, p_length, p_kind, p_ascii, p_hash])
ok = self.builder.icmp_unsigned('!=', ir.Constant(buffer.type, None), buffer)
return (ok, buffer, self.builder.load(p_length), self.builder.load(p_kind), self.builder.load(p_ascii), self.builder.load(p_hash)) | -7,724,148,293,254,188,000 | Returns a tuple of ``(ok, buffer, length, kind)``.
The ``ok`` is i1 value that is set if ok.
The ``buffer`` is a i8* of the output buffer.
The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer.
The ``kind`` is a i32 (int32) of the Unicode kind constant
The ``hash`` is a long/uint64_t (py_hash_t) of the Unicode constant hash | numba/core/pythonapi.py | string_as_string_size_and_kind | DrTodd13/numba | python | def string_as_string_size_and_kind(self, strobj):
'\n Returns a tuple of ``(ok, buffer, length, kind)``.\n The ``ok`` is i1 value that is set if ok.\n The ``buffer`` is a i8* of the output buffer.\n The ``length`` is a i32/i64 (py_ssize_t) of the length of the buffer.\n The ``kind`` is a i32 (int32) of the Unicode kind constant\n The ``hash`` is a long/uint64_t (py_hash_t) of the Unicode constant hash\n '
p_length = cgutils.alloca_once(self.builder, self.py_ssize_t)
p_kind = cgutils.alloca_once(self.builder, Type.int())
p_ascii = cgutils.alloca_once(self.builder, Type.int())
p_hash = cgutils.alloca_once(self.builder, self.py_hash_t)
fnty = Type.function(self.cstring, [self.pyobj, self.py_ssize_t.as_pointer(), Type.int().as_pointer(), Type.int().as_pointer(), self.py_hash_t.as_pointer()])
fname = 'numba_extract_unicode'
fn = self._get_function(fnty, name=fname)
buffer = self.builder.call(fn, [strobj, p_length, p_kind, p_ascii, p_hash])
ok = self.builder.icmp_unsigned('!=', ir.Constant(buffer.type, None), buffer)
return (ok, buffer, self.builder.load(p_length), self.builder.load(p_kind), self.builder.load(p_ascii), self.builder.load(p_hash)) |
def object_dump(self, obj):
'\n Dump a Python object on C stderr. For debugging purposes.\n '
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name='_PyObject_Dump')
return self.builder.call(fn, (obj,)) | 3,248,548,755,625,892,400 | Dump a Python object on C stderr. For debugging purposes. | numba/core/pythonapi.py | object_dump | DrTodd13/numba | python | def object_dump(self, obj):
'\n \n '
fnty = Type.function(Type.void(), [self.pyobj])
fn = self._get_function(fnty, name='_PyObject_Dump')
return self.builder.call(fn, (obj,)) |
def nrt_meminfo_new_from_pyobject(self, data, pyobj):
'\n Allocate a new MemInfo with data payload borrowed from a python\n object.\n '
mod = self.builder.module
fnty = ir.FunctionType(cgutils.voidptr_t, [cgutils.voidptr_t, cgutils.voidptr_t])
fn = mod.get_or_insert_function(fnty, name='NRT_meminfo_new_from_pyobject')
fn.args[0].add_attribute(lc.ATTR_NO_CAPTURE)
fn.args[1].add_attribute(lc.ATTR_NO_CAPTURE)
fn.return_value.add_attribute('noalias')
return self.builder.call(fn, [data, pyobj]) | -1,801,179,579,027,648,800 | Allocate a new MemInfo with data payload borrowed from a python
object. | numba/core/pythonapi.py | nrt_meminfo_new_from_pyobject | DrTodd13/numba | python | def nrt_meminfo_new_from_pyobject(self, data, pyobj):
'\n Allocate a new MemInfo with data payload borrowed from a python\n object.\n '
mod = self.builder.module
fnty = ir.FunctionType(cgutils.voidptr_t, [cgutils.voidptr_t, cgutils.voidptr_t])
fn = mod.get_or_insert_function(fnty, name='NRT_meminfo_new_from_pyobject')
fn.args[0].add_attribute(lc.ATTR_NO_CAPTURE)
fn.args[1].add_attribute(lc.ATTR_NO_CAPTURE)
fn.return_value.add_attribute('noalias')
return self.builder.call(fn, [data, pyobj]) |
def alloca_buffer(self):
'\n Return a pointer to a stack-allocated, zero-initialized Py_buffer.\n '
ptr = cgutils.alloca_once_value(self.builder, lc.Constant.null(self.py_buffer_t))
return ptr | -67,644,426,667,876,630 | Return a pointer to a stack-allocated, zero-initialized Py_buffer. | numba/core/pythonapi.py | alloca_buffer | DrTodd13/numba | python | def alloca_buffer(self):
'\n \n '
ptr = cgutils.alloca_once_value(self.builder, lc.Constant.null(self.py_buffer_t))
return ptr |
def unserialize(self, structptr):
'\n Unserialize some data. *structptr* should be a pointer to\n a {i8* data, i32 length} structure.\n '
fnty = Type.function(self.pyobj, (self.voidptr, ir.IntType(32), self.voidptr))
fn = self._get_function(fnty, name='numba_unpickle')
ptr = self.builder.extract_value(self.builder.load(structptr), 0)
n = self.builder.extract_value(self.builder.load(structptr), 1)
hashed = self.builder.extract_value(self.builder.load(structptr), 2)
return self.builder.call(fn, (ptr, n, hashed)) | -7,920,713,274,060,330,000 | Unserialize some data. *structptr* should be a pointer to
a {i8* data, i32 length} structure. | numba/core/pythonapi.py | unserialize | DrTodd13/numba | python | def unserialize(self, structptr):
'\n Unserialize some data. *structptr* should be a pointer to\n a {i8* data, i32 length} structure.\n '
fnty = Type.function(self.pyobj, (self.voidptr, ir.IntType(32), self.voidptr))
fn = self._get_function(fnty, name='numba_unpickle')
ptr = self.builder.extract_value(self.builder.load(structptr), 0)
n = self.builder.extract_value(self.builder.load(structptr), 1)
hashed = self.builder.extract_value(self.builder.load(structptr), 2)
return self.builder.call(fn, (ptr, n, hashed)) |
def serialize_uncached(self, obj):
"\n Same as serialize_object(), but don't create a global variable,\n simply return a literal {i8* data, i32 length, i8* hashbuf} structure.\n "
data = serialize.dumps(obj)
assert (len(data) < (2 ** 31))
name = ('.const.pickledata.%s' % (id(obj) if (config.DIFF_IR == 0) else 'DIFF_IR'))
bdata = cgutils.make_bytearray(data)
hashed = cgutils.make_bytearray(hashlib.sha1(data).digest())
arr = self.context.insert_unique_const(self.module, name, bdata)
hasharr = self.context.insert_unique_const(self.module, f'{name}.sha1', hashed)
struct = ir.Constant.literal_struct([arr.bitcast(self.voidptr), ir.Constant(ir.IntType(32), arr.type.pointee.count), hasharr.bitcast(self.voidptr)])
return struct | -5,732,072,445,353,545,000 | Same as serialize_object(), but don't create a global variable,
simply return a literal {i8* data, i32 length, i8* hashbuf} structure. | numba/core/pythonapi.py | serialize_uncached | DrTodd13/numba | python | def serialize_uncached(self, obj):
"\n Same as serialize_object(), but don't create a global variable,\n simply return a literal {i8* data, i32 length, i8* hashbuf} structure.\n "
data = serialize.dumps(obj)
assert (len(data) < (2 ** 31))
name = ('.const.pickledata.%s' % (id(obj) if (config.DIFF_IR == 0) else 'DIFF_IR'))
bdata = cgutils.make_bytearray(data)
hashed = cgutils.make_bytearray(hashlib.sha1(data).digest())
arr = self.context.insert_unique_const(self.module, name, bdata)
hasharr = self.context.insert_unique_const(self.module, f'{name}.sha1', hashed)
struct = ir.Constant.literal_struct([arr.bitcast(self.voidptr), ir.Constant(ir.IntType(32), arr.type.pointee.count), hasharr.bitcast(self.voidptr)])
return struct |
def serialize_object(self, obj):
'\n Serialize the given object in the bitcode, and return it\n as a pointer to a {i8* data, i32 length}, structure constant\n (suitable for passing to unserialize()).\n '
try:
gv = self.module.__serialized[obj]
except KeyError:
struct = self.serialize_uncached(obj)
name = ('.const.picklebuf.%s' % (id(obj) if (config.DIFF_IR == 0) else 'DIFF_IR'))
gv = self.context.insert_unique_const(self.module, name, struct)
self.module.__serialized[obj] = gv
return gv | -4,735,269,214,832,265,000 | Serialize the given object in the bitcode, and return it
as a pointer to a {i8* data, i32 length}, structure constant
(suitable for passing to unserialize()). | numba/core/pythonapi.py | serialize_object | DrTodd13/numba | python | def serialize_object(self, obj):
'\n Serialize the given object in the bitcode, and return it\n as a pointer to a {i8* data, i32 length}, structure constant\n (suitable for passing to unserialize()).\n '
try:
gv = self.module.__serialized[obj]
except KeyError:
struct = self.serialize_uncached(obj)
name = ('.const.picklebuf.%s' % (id(obj) if (config.DIFF_IR == 0) else 'DIFF_IR'))
gv = self.context.insert_unique_const(self.module, name, struct)
self.module.__serialized[obj] = gv
return gv |
def to_native_value(self, typ, obj):
'\n Unbox the Python object as the given Numba type.\n A NativeValue instance is returned.\n '
from numba.core.boxing import unbox_unsupported
impl = _unboxers.lookup(typ.__class__, unbox_unsupported)
c = _UnboxContext(self.context, self.builder, self)
return impl(typ, obj, c) | 3,374,574,107,451,236,400 | Unbox the Python object as the given Numba type.
A NativeValue instance is returned. | numba/core/pythonapi.py | to_native_value | DrTodd13/numba | python | def to_native_value(self, typ, obj):
'\n Unbox the Python object as the given Numba type.\n A NativeValue instance is returned.\n '
from numba.core.boxing import unbox_unsupported
impl = _unboxers.lookup(typ.__class__, unbox_unsupported)
c = _UnboxContext(self.context, self.builder, self)
return impl(typ, obj, c) |
def from_native_value(self, typ, val, env_manager=None):
'\n Box the native value of the given Numba type. A Python object\n pointer is returned (NULL if an error occurred).\n This method steals any native (NRT) reference embedded in *val*.\n '
from numba.core.boxing import box_unsupported
impl = _boxers.lookup(typ.__class__, box_unsupported)
c = _BoxContext(self.context, self.builder, self, env_manager)
return impl(typ, val, c) | 1,871,072,068,320,334,800 | Box the native value of the given Numba type. A Python object
pointer is returned (NULL if an error occurred).
This method steals any native (NRT) reference embedded in *val*. | numba/core/pythonapi.py | from_native_value | DrTodd13/numba | python | def from_native_value(self, typ, val, env_manager=None):
'\n Box the native value of the given Numba type. A Python object\n pointer is returned (NULL if an error occurred).\n This method steals any native (NRT) reference embedded in *val*.\n '
from numba.core.boxing import box_unsupported
impl = _boxers.lookup(typ.__class__, box_unsupported)
c = _BoxContext(self.context, self.builder, self, env_manager)
return impl(typ, val, c) |
def reflect_native_value(self, typ, val, env_manager=None):
'\n Reflect the native value onto its Python original, if any.\n An error bit (as an LLVM value) is returned.\n '
impl = _reflectors.lookup(typ.__class__)
if (impl is None):
return cgutils.false_bit
is_error = cgutils.alloca_once_value(self.builder, cgutils.false_bit)
c = _ReflectContext(self.context, self.builder, self, env_manager, is_error)
impl(typ, val, c)
return self.builder.load(c.is_error) | 6,999,730,225,112,190,000 | Reflect the native value onto its Python original, if any.
An error bit (as an LLVM value) is returned. | numba/core/pythonapi.py | reflect_native_value | DrTodd13/numba | python | def reflect_native_value(self, typ, val, env_manager=None):
'\n Reflect the native value onto its Python original, if any.\n An error bit (as an LLVM value) is returned.\n '
impl = _reflectors.lookup(typ.__class__)
if (impl is None):
return cgutils.false_bit
is_error = cgutils.alloca_once_value(self.builder, cgutils.false_bit)
c = _ReflectContext(self.context, self.builder, self, env_manager, is_error)
impl(typ, val, c)
return self.builder.load(c.is_error) |
def to_native_generator(self, obj, typ):
'\n Extract the generator structure pointer from a generator *obj*\n (a _dynfunc.Generator instance).\n '
gen_ptr_ty = Type.pointer(self.context.get_data_type(typ))
value = self.context.get_generator_state(self.builder, obj, gen_ptr_ty)
return NativeValue(value) | 6,247,722,867,210,801,000 | Extract the generator structure pointer from a generator *obj*
(a _dynfunc.Generator instance). | numba/core/pythonapi.py | to_native_generator | DrTodd13/numba | python | def to_native_generator(self, obj, typ):
'\n Extract the generator structure pointer from a generator *obj*\n (a _dynfunc.Generator instance).\n '
gen_ptr_ty = Type.pointer(self.context.get_data_type(typ))
value = self.context.get_generator_state(self.builder, obj, gen_ptr_ty)
return NativeValue(value) |
def from_native_generator(self, val, typ, env=None):
'\n Make a Numba generator (a _dynfunc.Generator instance) from a\n generator structure pointer *val*.\n *env* is an optional _dynfunc.Environment instance to be wrapped\n in the generator.\n '
llty = self.context.get_data_type(typ)
assert (not llty.is_pointer)
gen_struct_size = self.context.get_abi_sizeof(llty)
gendesc = self.context.get_generator_desc(typ)
genfnty = Type.function(self.pyobj, [self.pyobj, self.pyobj, self.pyobj])
genfn = self._get_function(genfnty, name=gendesc.llvm_cpython_wrapper_name)
finalizerty = Type.function(Type.void(), [self.voidptr])
if typ.has_finalizer:
finalizer = self._get_function(finalizerty, name=gendesc.llvm_finalizer_name)
else:
finalizer = Constant.null(Type.pointer(finalizerty))
fnty = Type.function(self.pyobj, [self.py_ssize_t, self.voidptr, Type.pointer(genfnty), Type.pointer(finalizerty), self.voidptr])
fn = self._get_function(fnty, name='numba_make_generator')
state_size = ir.Constant(self.py_ssize_t, gen_struct_size)
initial_state = self.builder.bitcast(val, self.voidptr)
if (env is None):
env = self.get_null_object()
env = self.builder.bitcast(env, self.voidptr)
return self.builder.call(fn, (state_size, initial_state, genfn, finalizer, env)) | -5,887,978,113,968,761,000 | Make a Numba generator (a _dynfunc.Generator instance) from a
generator structure pointer *val*.
*env* is an optional _dynfunc.Environment instance to be wrapped
in the generator. | numba/core/pythonapi.py | from_native_generator | DrTodd13/numba | python | def from_native_generator(self, val, typ, env=None):
'\n Make a Numba generator (a _dynfunc.Generator instance) from a\n generator structure pointer *val*.\n *env* is an optional _dynfunc.Environment instance to be wrapped\n in the generator.\n '
llty = self.context.get_data_type(typ)
assert (not llty.is_pointer)
gen_struct_size = self.context.get_abi_sizeof(llty)
gendesc = self.context.get_generator_desc(typ)
genfnty = Type.function(self.pyobj, [self.pyobj, self.pyobj, self.pyobj])
genfn = self._get_function(genfnty, name=gendesc.llvm_cpython_wrapper_name)
finalizerty = Type.function(Type.void(), [self.voidptr])
if typ.has_finalizer:
finalizer = self._get_function(finalizerty, name=gendesc.llvm_finalizer_name)
else:
finalizer = Constant.null(Type.pointer(finalizerty))
fnty = Type.function(self.pyobj, [self.py_ssize_t, self.voidptr, Type.pointer(genfnty), Type.pointer(finalizerty), self.voidptr])
fn = self._get_function(fnty, name='numba_make_generator')
state_size = ir.Constant(self.py_ssize_t, gen_struct_size)
initial_state = self.builder.bitcast(val, self.voidptr)
if (env is None):
env = self.get_null_object()
env = self.builder.bitcast(env, self.voidptr)
return self.builder.call(fn, (state_size, initial_state, genfn, finalizer, env)) |
def call_jit_code(self, func, sig, args):
'Calls into Numba jitted code and propagate error using the Python\n calling convention.\n\n Parameters\n ----------\n func : function\n The Python function to be compiled. This function is compiled\n in nopython-mode.\n sig : numba.typing.Signature\n The function signature for *func*.\n args : Sequence[llvmlite.binding.Value]\n LLVM values to use as arguments.\n\n Returns\n -------\n (is_error, res) : 2-tuple of llvmlite.binding.Value.\n is_error : true iff *func* raised an exception.\n res : Returned value from *func* iff *is_error* is false.\n\n If *is_error* is true, this method will adapt the nopython exception\n into a Python exception. Caller should return NULL to Python to\n indicate an error.\n '
builder = self.builder
cres = self.context.compile_subroutine(builder, func, sig)
got_retty = cres.signature.return_type
retty = sig.return_type
if (got_retty != retty):
raise errors.LoweringError(f'''mismatching signature {got_retty} != {retty}.
''')
(status, res) = self.context.call_internal_no_propagate(builder, cres.fndesc, sig, args)
is_error_ptr = cgutils.alloca_once(builder, cgutils.bool_t, zfill=True)
res_type = self.context.get_value_type(sig.return_type)
res_ptr = cgutils.alloca_once(builder, res_type, zfill=True)
with builder.if_else(status.is_error) as (has_err, no_err):
with has_err:
builder.store(status.is_error, is_error_ptr)
self.context.call_conv.raise_error(builder, self, status)
with no_err:
res = imputils.fix_returning_optional(self.context, builder, sig, status, res)
builder.store(res, res_ptr)
is_error = builder.load(is_error_ptr)
res = builder.load(res_ptr)
return (is_error, res) | 3,316,632,359,420,841,000 | Calls into Numba jitted code and propagate error using the Python
calling convention.
Parameters
----------
func : function
The Python function to be compiled. This function is compiled
in nopython-mode.
sig : numba.typing.Signature
The function signature for *func*.
args : Sequence[llvmlite.binding.Value]
LLVM values to use as arguments.
Returns
-------
(is_error, res) : 2-tuple of llvmlite.binding.Value.
is_error : true iff *func* raised an exception.
res : Returned value from *func* iff *is_error* is false.
If *is_error* is true, this method will adapt the nopython exception
into a Python exception. Caller should return NULL to Python to
indicate an error. | numba/core/pythonapi.py | call_jit_code | DrTodd13/numba | python | def call_jit_code(self, func, sig, args):
'Calls into Numba jitted code and propagate error using the Python\n calling convention.\n\n Parameters\n ----------\n func : function\n The Python function to be compiled. This function is compiled\n in nopython-mode.\n sig : numba.typing.Signature\n The function signature for *func*.\n args : Sequence[llvmlite.binding.Value]\n LLVM values to use as arguments.\n\n Returns\n -------\n (is_error, res) : 2-tuple of llvmlite.binding.Value.\n is_error : true iff *func* raised an exception.\n res : Returned value from *func* iff *is_error* is false.\n\n If *is_error* is true, this method will adapt the nopython exception\n into a Python exception. Caller should return NULL to Python to\n indicate an error.\n '
builder = self.builder
cres = self.context.compile_subroutine(builder, func, sig)
got_retty = cres.signature.return_type
retty = sig.return_type
if (got_retty != retty):
raise errors.LoweringError(f'mismatching signature {got_retty} != {retty}.
')
(status, res) = self.context.call_internal_no_propagate(builder, cres.fndesc, sig, args)
is_error_ptr = cgutils.alloca_once(builder, cgutils.bool_t, zfill=True)
res_type = self.context.get_value_type(sig.return_type)
res_ptr = cgutils.alloca_once(builder, res_type, zfill=True)
with builder.if_else(status.is_error) as (has_err, no_err):
with has_err:
builder.store(status.is_error, is_error_ptr)
self.context.call_conv.raise_error(builder, self, status)
with no_err:
res = imputils.fix_returning_optional(self.context, builder, sig, status, res)
builder.store(res, res_ptr)
is_error = builder.load(is_error_ptr)
res = builder.load(res_ptr)
return (is_error, res) |
def DescribeImageStat(self, request):
'控制台识别统计\n\n :param request: Request instance for DescribeImageStat.\n :type request: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatResponse`\n\n '
try:
params = request._serialize()
body = self.call('DescribeImageStat', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.DescribeImageStatResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | -5,802,602,804,795,391,000 | 控制台识别统计
:param request: Request instance for DescribeImageStat.
:type request: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatRequest`
:rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatResponse` | tencentcloud/ims/v20200713/ims_client.py | DescribeImageStat | HelloBarry/tencent_cloud_ops | python | def DescribeImageStat(self, request):
'控制台识别统计\n\n :param request: Request instance for DescribeImageStat.\n :type request: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImageStatResponse`\n\n '
try:
params = request._serialize()
body = self.call('DescribeImageStat', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.DescribeImageStatResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) |
def DescribeImsList(self, request):
'图片机器审核明细\n\n :param request: Request instance for DescribeImsList.\n :type request: :class:`tencentcloud.ims.v20200713.models.DescribeImsListRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImsListResponse`\n\n '
try:
params = request._serialize()
body = self.call('DescribeImsList', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.DescribeImsListResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | 1,024,091,555,683,964,200 | 图片机器审核明细
:param request: Request instance for DescribeImsList.
:type request: :class:`tencentcloud.ims.v20200713.models.DescribeImsListRequest`
:rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImsListResponse` | tencentcloud/ims/v20200713/ims_client.py | DescribeImsList | HelloBarry/tencent_cloud_ops | python | def DescribeImsList(self, request):
'图片机器审核明细\n\n :param request: Request instance for DescribeImsList.\n :type request: :class:`tencentcloud.ims.v20200713.models.DescribeImsListRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.DescribeImsListResponse`\n\n '
try:
params = request._serialize()
body = self.call('DescribeImsList', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.DescribeImsListResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) |
def ImageModeration(self, request):
'图片内容检测服务(Image Moderation, IM)能自动扫描图片,识别可能令人反感、不安全或不适宜的内容,同时支持用户配置图片黑名单,打击自定义识别类型的图片。\n\n <div class="rno-api-explorer" style="margin-bottom:20px">\n <div class="rno-api-explorer-inner">\n <div class="rno-api-explorer-hd">\n <div class="rno-api-explorer-title">\n 关于版本迭代的描述\n </div>\n </div>\n <div class="rno-api-explorer-body">\n <div class="rno-api-explorer-cont">\n <p>当前页面版本为图片内容安全2020版本,2020.11.3日前接入的图片内容安全接口为2019版本,在此时间前接入的用户可直接访问以下链接进行维护操作:<a href="https://cloud.tencent.com/document/product/1125/38206" target="_blank">图片内容安全-2019版本</a></p>\n <p>2020版本相对2019版本进行了升级,支持更灵活的多场景业务策略配置以及更丰富的识别回调信息,满足不同业务的识别需求,建议按照2020版本接入指引进行接口升级;同时,2019版本也会持续维护直至用户不再使用为止。</p>\n </div>\n </div>\n </div>\n </div>\n\n :param request: Request instance for ImageModeration.\n :type request: :class:`tencentcloud.ims.v20200713.models.ImageModerationRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.ImageModerationResponse`\n\n '
try:
params = request._serialize()
body = self.call('ImageModeration', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.ImageModerationResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) | 2,960,955,142,353,879,600 | 图片内容检测服务(Image Moderation, IM)能自动扫描图片,识别可能令人反感、不安全或不适宜的内容,同时支持用户配置图片黑名单,打击自定义识别类型的图片。
<div class="rno-api-explorer" style="margin-bottom:20px">
<div class="rno-api-explorer-inner">
<div class="rno-api-explorer-hd">
<div class="rno-api-explorer-title">
关于版本迭代的描述
</div>
</div>
<div class="rno-api-explorer-body">
<div class="rno-api-explorer-cont">
<p>当前页面版本为图片内容安全2020版本,2020.11.3日前接入的图片内容安全接口为2019版本,在此时间前接入的用户可直接访问以下链接进行维护操作:<a href="https://cloud.tencent.com/document/product/1125/38206" target="_blank">图片内容安全-2019版本</a></p>
<p>2020版本相对2019版本进行了升级,支持更灵活的多场景业务策略配置以及更丰富的识别回调信息,满足不同业务的识别需求,建议按照2020版本接入指引进行接口升级;同时,2019版本也会持续维护直至用户不再使用为止。</p>
</div>
</div>
</div>
</div>
:param request: Request instance for ImageModeration.
:type request: :class:`tencentcloud.ims.v20200713.models.ImageModerationRequest`
:rtype: :class:`tencentcloud.ims.v20200713.models.ImageModerationResponse` | tencentcloud/ims/v20200713/ims_client.py | ImageModeration | HelloBarry/tencent_cloud_ops | python | def ImageModeration(self, request):
'图片内容检测服务(Image Moderation, IM)能自动扫描图片,识别可能令人反感、不安全或不适宜的内容,同时支持用户配置图片黑名单,打击自定义识别类型的图片。\n\n <div class="rno-api-explorer" style="margin-bottom:20px">\n <div class="rno-api-explorer-inner">\n <div class="rno-api-explorer-hd">\n <div class="rno-api-explorer-title">\n 关于版本迭代的描述\n </div>\n </div>\n <div class="rno-api-explorer-body">\n <div class="rno-api-explorer-cont">\n <p>当前页面版本为图片内容安全2020版本,2020.11.3日前接入的图片内容安全接口为2019版本,在此时间前接入的用户可直接访问以下链接进行维护操作:<a href="https://cloud.tencent.com/document/product/1125/38206" target="_blank">图片内容安全-2019版本</a></p>\n <p>2020版本相对2019版本进行了升级,支持更灵活的多场景业务策略配置以及更丰富的识别回调信息,满足不同业务的识别需求,建议按照2020版本接入指引进行接口升级;同时,2019版本也会持续维护直至用户不再使用为止。</p>\n </div>\n </div>\n </div>\n </div>\n\n :param request: Request instance for ImageModeration.\n :type request: :class:`tencentcloud.ims.v20200713.models.ImageModerationRequest`\n :rtype: :class:`tencentcloud.ims.v20200713.models.ImageModerationResponse`\n\n '
try:
params = request._serialize()
body = self.call('ImageModeration', params)
response = json.loads(body)
if ('Error' not in response['Response']):
model = models.ImageModerationResponse()
model._deserialize(response['Response'])
return model
else:
code = response['Response']['Error']['Code']
message = response['Response']['Error']['Message']
reqid = response['Response']['RequestId']
raise TencentCloudSDKException(code, message, reqid)
except Exception as e:
if isinstance(e, TencentCloudSDKException):
raise
else:
raise TencentCloudSDKException(e.message, e.message) |
def perform_create(self, serializer):
'Sets the patient profile to the logged in user'
serializer.save(user_profile=self.request.user) | -2,440,925,486,062,093,300 | Sets the patient profile to the logged in user | backend/accounts/views.py | perform_create | eliefrancois/project2-diabetesapplication-api | python | def perform_create(self, serializer):
serializer.save(user_profile=self.request.user) |
def run_model(exe_name, namefile, model_ws='./', silent=False, pause=False, report=False, normal_msg='normal termination', use_async=False, cargs=None):
"\n This function will run the model using subprocess.Popen. It\n communicates with the model's stdout asynchronously and reports\n progress to the screen with timestamps\n\n Parameters\n ----------\n exe_name : str\n Executable name (with path, if necessary) to run.\n namefile : str\n Namefile of model to run. The namefile must be the\n filename of the namefile without the path. Namefile can be None\n to allow programs that do not require a control file (name file)\n to be passed as a command line argument.\n model_ws : str\n Path to the location of the namefile. (default is the\n current working directory - './')\n silent : boolean\n Echo run information to screen (default is True).\n pause : boolean, optional\n Pause upon completion (default is False).\n report : boolean, optional\n Save stdout lines to a list (buff) which is returned\n by the method . (default is False).\n normal_msg : str or list\n Normal termination message used to determine if the\n run terminated normally. More than one message can be provided using\n a list. (Default is 'normal termination')\n use_async : boolean\n asynchronously read model stdout and report with timestamps. good for\n models that take long time to run. not good for models that run\n really fast\n cargs : str or list of strings\n additional command line arguments to pass to the executable.\n Default is None\n Returns\n -------\n (success, buff)\n success : boolean\n buff : list of lines of stdout\n\n "
success = False
buff = []
if isinstance(normal_msg, str):
normal_msg = [normal_msg]
for (idx, s) in enumerate(normal_msg):
normal_msg[idx] = s.lower()
exe = which(exe_name)
if (exe is None):
import platform
if (platform.system() in 'Windows'):
if (not exe_name.lower().endswith('.exe')):
exe = which((exe_name + '.exe'))
elif exe_name.lower().endswith('.exe'):
exe = which(exe_name[:(- 4)])
if (exe is None):
raise Exception(f'The program {exe_name} does not exist or is not executable.')
elif (not silent):
print(f'FloPy is using the following executable to run the model: {exe}')
if (namefile is not None):
if (not os.path.isfile(os.path.join(model_ws, namefile))):
raise Exception(f'The namefile for this model does not exists: {namefile}')
def q_output(output, q):
for line in iter(output.readline, b''):
q.put(line)
argv = [exe_name]
if (namefile is not None):
argv.append(namefile)
if (cargs is not None):
if isinstance(cargs, str):
cargs = [cargs]
for t in cargs:
argv.append(t)
proc = Popen(argv, stdout=PIPE, stderr=STDOUT, cwd=model_ws)
if (not use_async):
while True:
line = proc.stdout.readline().decode('utf-8')
if ((line == '') and (proc.poll() is not None)):
break
if line:
for msg in normal_msg:
if (msg in line.lower()):
success = True
break
line = line.rstrip('\r\n')
if (not silent):
print(line)
if report:
buff.append(line)
else:
break
return (success, buff)
q = Queue.Queue()
thread = threading.Thread(target=q_output, args=(proc.stdout, q))
thread.daemon = True
thread.start()
failed_words = ['fail', 'error']
last = datetime.now()
lastsec = 0.0
while True:
try:
line = q.get_nowait()
except Queue.Empty:
pass
else:
if (line == ''):
break
line = line.decode().lower().strip()
if (line != ''):
now = datetime.now()
dt = (now - last)
tsecs = (dt.total_seconds() - lastsec)
line = f'(elapsed:{tsecs})-->{line}'
lastsec = (tsecs + lastsec)
buff.append(line)
if (not silent):
print(line)
for fword in failed_words:
if (fword in line):
success = False
break
if (proc.poll() is not None):
break
proc.wait()
thread.join(timeout=1)
buff.extend(proc.stdout.readlines())
proc.stdout.close()
for line in buff:
for msg in normal_msg:
if (msg in line.lower()):
print('success')
success = True
break
if pause:
input('Press Enter to continue...')
return (success, buff) | -5,725,676,336,444,749,000 | This function will run the model using subprocess.Popen. It
communicates with the model's stdout asynchronously and reports
progress to the screen with timestamps
Parameters
----------
exe_name : str
Executable name (with path, if necessary) to run.
namefile : str
Namefile of model to run. The namefile must be the
filename of the namefile without the path. Namefile can be None
to allow programs that do not require a control file (name file)
to be passed as a command line argument.
model_ws : str
Path to the location of the namefile. (default is the
current working directory - './')
silent : boolean
Echo run information to screen (default is True).
pause : boolean, optional
Pause upon completion (default is False).
report : boolean, optional
Save stdout lines to a list (buff) which is returned
by the method . (default is False).
normal_msg : str or list
Normal termination message used to determine if the
run terminated normally. More than one message can be provided using
a list. (Default is 'normal termination')
use_async : boolean
asynchronously read model stdout and report with timestamps. good for
models that take long time to run. not good for models that run
really fast
cargs : str or list of strings
additional command line arguments to pass to the executable.
Default is None
Returns
-------
(success, buff)
success : boolean
buff : list of lines of stdout | flopy/mbase.py | run_model | andrewcalderwood/flopy | python | def run_model(exe_name, namefile, model_ws='./', silent=False, pause=False, report=False, normal_msg='normal termination', use_async=False, cargs=None):
"\n This function will run the model using subprocess.Popen. It\n communicates with the model's stdout asynchronously and reports\n progress to the screen with timestamps\n\n Parameters\n ----------\n exe_name : str\n Executable name (with path, if necessary) to run.\n namefile : str\n Namefile of model to run. The namefile must be the\n filename of the namefile without the path. Namefile can be None\n to allow programs that do not require a control file (name file)\n to be passed as a command line argument.\n model_ws : str\n Path to the location of the namefile. (default is the\n current working directory - './')\n silent : boolean\n Echo run information to screen (default is True).\n pause : boolean, optional\n Pause upon completion (default is False).\n report : boolean, optional\n Save stdout lines to a list (buff) which is returned\n by the method . (default is False).\n normal_msg : str or list\n Normal termination message used to determine if the\n run terminated normally. More than one message can be provided using\n a list. (Default is 'normal termination')\n use_async : boolean\n asynchronously read model stdout and report with timestamps. good for\n models that take long time to run. not good for models that run\n really fast\n cargs : str or list of strings\n additional command line arguments to pass to the executable.\n Default is None\n Returns\n -------\n (success, buff)\n success : boolean\n buff : list of lines of stdout\n\n "
success = False
buff = []
if isinstance(normal_msg, str):
normal_msg = [normal_msg]
for (idx, s) in enumerate(normal_msg):
normal_msg[idx] = s.lower()
exe = which(exe_name)
if (exe is None):
import platform
if (platform.system() in 'Windows'):
if (not exe_name.lower().endswith('.exe')):
exe = which((exe_name + '.exe'))
elif exe_name.lower().endswith('.exe'):
exe = which(exe_name[:(- 4)])
if (exe is None):
raise Exception(f'The program {exe_name} does not exist or is not executable.')
elif (not silent):
print(f'FloPy is using the following executable to run the model: {exe}')
if (namefile is not None):
if (not os.path.isfile(os.path.join(model_ws, namefile))):
raise Exception(f'The namefile for this model does not exists: {namefile}')
def q_output(output, q):
for line in iter(output.readline, b):
q.put(line)
argv = [exe_name]
if (namefile is not None):
argv.append(namefile)
if (cargs is not None):
if isinstance(cargs, str):
cargs = [cargs]
for t in cargs:
argv.append(t)
proc = Popen(argv, stdout=PIPE, stderr=STDOUT, cwd=model_ws)
if (not use_async):
while True:
line = proc.stdout.readline().decode('utf-8')
if ((line == ) and (proc.poll() is not None)):
break
if line:
for msg in normal_msg:
if (msg in line.lower()):
success = True
break
line = line.rstrip('\r\n')
if (not silent):
print(line)
if report:
buff.append(line)
else:
break
return (success, buff)
q = Queue.Queue()
thread = threading.Thread(target=q_output, args=(proc.stdout, q))
thread.daemon = True
thread.start()
failed_words = ['fail', 'error']
last = datetime.now()
lastsec = 0.0
while True:
try:
line = q.get_nowait()
except Queue.Empty:
pass
else:
if (line == ):
break
line = line.decode().lower().strip()
if (line != ):
now = datetime.now()
dt = (now - last)
tsecs = (dt.total_seconds() - lastsec)
line = f'(elapsed:{tsecs})-->{line}'
lastsec = (tsecs + lastsec)
buff.append(line)
if (not silent):
print(line)
for fword in failed_words:
if (fword in line):
success = False
break
if (proc.poll() is not None):
break
proc.wait()
thread.join(timeout=1)
buff.extend(proc.stdout.readlines())
proc.stdout.close()
for line in buff:
for msg in normal_msg:
if (msg in line.lower()):
print('success')
success = True
break
if pause:
input('Press Enter to continue...')
return (success, buff) |
def __init__(self, error, location=''):
'Initialize exception.'
self.message = error
super().__init__(f'{error} ({location})') | 4,871,760,088,630,620,000 | Initialize exception. | flopy/mbase.py | __init__ | andrewcalderwood/flopy | python | def __init__(self, error, location=):
self.message = error
super().__init__(f'{error} ({location})') |
def get_package_list(self, ftype=None):
"\n Get a list of all the package names.\n\n Parameters\n ----------\n ftype : str\n Type of package, 'RIV', 'LPF', etc.\n\n Returns\n -------\n val : list of strings\n Can be used to see what packages are in the model, and can then\n be used with get_package to pull out individual packages.\n\n "
val = []
for pp in self.packagelist:
if (ftype is None):
val.append(pp.name[0].upper())
elif (pp.package_type.lower() == ftype):
val.append(pp.name[0].upper())
return val | 2,463,778,479,065,689,600 | Get a list of all the package names.
Parameters
----------
ftype : str
Type of package, 'RIV', 'LPF', etc.
Returns
-------
val : list of strings
Can be used to see what packages are in the model, and can then
be used with get_package to pull out individual packages. | flopy/mbase.py | get_package_list | andrewcalderwood/flopy | python | def get_package_list(self, ftype=None):
"\n Get a list of all the package names.\n\n Parameters\n ----------\n ftype : str\n Type of package, 'RIV', 'LPF', etc.\n\n Returns\n -------\n val : list of strings\n Can be used to see what packages are in the model, and can then\n be used with get_package to pull out individual packages.\n\n "
val = []
for pp in self.packagelist:
if (ftype is None):
val.append(pp.name[0].upper())
elif (pp.package_type.lower() == ftype):
val.append(pp.name[0].upper())
return val |
def _check(self, chk, level=1):
"\n Check model data for common errors.\n\n Parameters\n ----------\n f : str or file handle\n String defining file name or file handle for summary file\n of check method output. If a string is passed a file handle\n is created. If f is None, check method does not write\n results to a summary file. (default is None)\n verbose : bool\n Boolean flag used to determine if check method results are\n written to the screen\n level : int\n Check method analysis level. If level=0, summary checks are\n performed. If level=1, full checks are performed.\n summarize : bool\n Boolean flag used to determine if summary of results is written\n to the screen\n\n Returns\n -------\n None\n\n Examples\n --------\n\n >>> import flopy\n >>> m = flopy.modflow.Modflow.load('model.nam')\n >>> m.check()\n "
results = {}
for p in self.packagelist:
if (chk.package_check_levels.get(p.name[0].lower(), 0) <= level):
results[p.name[0]] = p.check(f=None, verbose=False, level=(level - 1), checktype=chk.__class__)
if (self.version in chk.solver_packages.keys()):
solvers = set(chk.solver_packages[self.version]).intersection(set(self.get_package_list()))
if (not solvers):
chk._add_to_summary('Error', desc='\r No solver package', package='model')
elif (len(list(solvers)) > 1):
for s in solvers:
chk._add_to_summary('Error', desc='\r Multiple solver packages', package=s)
else:
chk.passed.append('Compatible solver package')
for r in results.values():
if ((r is not None) and (r.summary_array is not None)):
chk.summary_array = np.append(chk.summary_array, r.summary_array).view(np.recarray)
chk.passed += [f'{r.package.name[0]} package: {psd}' for psd in r.passed]
chk.summarize()
return chk | 2,392,933,745,744,231,400 | Check model data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a string is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
summarize : bool
Boolean flag used to determine if summary of results is written
to the screen
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.check() | flopy/mbase.py | _check | andrewcalderwood/flopy | python | def _check(self, chk, level=1):
"\n Check model data for common errors.\n\n Parameters\n ----------\n f : str or file handle\n String defining file name or file handle for summary file\n of check method output. If a string is passed a file handle\n is created. If f is None, check method does not write\n results to a summary file. (default is None)\n verbose : bool\n Boolean flag used to determine if check method results are\n written to the screen\n level : int\n Check method analysis level. If level=0, summary checks are\n performed. If level=1, full checks are performed.\n summarize : bool\n Boolean flag used to determine if summary of results is written\n to the screen\n\n Returns\n -------\n None\n\n Examples\n --------\n\n >>> import flopy\n >>> m = flopy.modflow.Modflow.load('model.nam')\n >>> m.check()\n "
results = {}
for p in self.packagelist:
if (chk.package_check_levels.get(p.name[0].lower(), 0) <= level):
results[p.name[0]] = p.check(f=None, verbose=False, level=(level - 1), checktype=chk.__class__)
if (self.version in chk.solver_packages.keys()):
solvers = set(chk.solver_packages[self.version]).intersection(set(self.get_package_list()))
if (not solvers):
chk._add_to_summary('Error', desc='\r No solver package', package='model')
elif (len(list(solvers)) > 1):
for s in solvers:
chk._add_to_summary('Error', desc='\r Multiple solver packages', package=s)
else:
chk.passed.append('Compatible solver package')
for r in results.values():
if ((r is not None) and (r.summary_array is not None)):
chk.summary_array = np.append(chk.summary_array, r.summary_array).view(np.recarray)
chk.passed += [f'{r.package.name[0]} package: {psd}' for psd in r.passed]
chk.summarize()
return chk |
def __init__(self, modelname='modflowtest', namefile_ext='nam', exe_name='mf2k.exe', model_ws=None, structured=True, verbose=False, **kwargs):
'Initialize BaseModel.'
super().__init__()
self.__name = modelname
self.namefile_ext = (namefile_ext or '')
self._namefile = ((self.__name + '.') + self.namefile_ext)
self._packagelist = []
self.heading = ''
self.exe_name = exe_name
self._verbose = verbose
self.external_path = None
self.external_extension = 'ref'
if (model_ws is None):
model_ws = os.getcwd()
if (not os.path.exists(model_ws)):
try:
os.makedirs(model_ws)
except:
print(f'''
{model_ws} not valid, workspace-folder was changed to {os.getcwd()}
''')
model_ws = os.getcwd()
self._model_ws = model_ws
self.structured = structured
self.pop_key_list = []
self.cl_params = ''
xll = kwargs.pop('xll', None)
yll = kwargs.pop('yll', None)
self._xul = kwargs.pop('xul', None)
self._yul = kwargs.pop('yul', None)
self._rotation = kwargs.pop('rotation', 0.0)
self._proj4_str = kwargs.pop('proj4_str', None)
self._start_datetime = kwargs.pop('start_datetime', '1-1-1970')
self._modelgrid = Grid(proj4=self._proj4_str, xoff=xll, yoff=yll, angrot=self._rotation)
self._modeltime = None
self.__onunit__ = 10
self.array_free_format = True
self.free_format_input = True
self.parameter_load = False
self.array_format = None
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.external_output = []
self.package_units = []
self._next_ext_unit = None
self.output_fnames = []
self.output_units = []
self.output_binflag = []
self.output_packages = []
return | -2,770,028,243,060,520,400 | Initialize BaseModel. | flopy/mbase.py | __init__ | andrewcalderwood/flopy | python | def __init__(self, modelname='modflowtest', namefile_ext='nam', exe_name='mf2k.exe', model_ws=None, structured=True, verbose=False, **kwargs):
super().__init__()
self.__name = modelname
self.namefile_ext = (namefile_ext or )
self._namefile = ((self.__name + '.') + self.namefile_ext)
self._packagelist = []
self.heading =
self.exe_name = exe_name
self._verbose = verbose
self.external_path = None
self.external_extension = 'ref'
if (model_ws is None):
model_ws = os.getcwd()
if (not os.path.exists(model_ws)):
try:
os.makedirs(model_ws)
except:
print(f'
{model_ws} not valid, workspace-folder was changed to {os.getcwd()}
')
model_ws = os.getcwd()
self._model_ws = model_ws
self.structured = structured
self.pop_key_list = []
self.cl_params =
xll = kwargs.pop('xll', None)
yll = kwargs.pop('yll', None)
self._xul = kwargs.pop('xul', None)
self._yul = kwargs.pop('yul', None)
self._rotation = kwargs.pop('rotation', 0.0)
self._proj4_str = kwargs.pop('proj4_str', None)
self._start_datetime = kwargs.pop('start_datetime', '1-1-1970')
self._modelgrid = Grid(proj4=self._proj4_str, xoff=xll, yoff=yll, angrot=self._rotation)
self._modeltime = None
self.__onunit__ = 10
self.array_free_format = True
self.free_format_input = True
self.parameter_load = False
self.array_format = None
self.external_fnames = []
self.external_units = []
self.external_binflag = []
self.external_output = []
self.package_units = []
self._next_ext_unit = None
self.output_fnames = []
self.output_units = []
self.output_binflag = []
self.output_packages = []
return |
def next_ext_unit(self):
'\n Function to encapsulate next_ext_unit attribute\n\n '
next_unit = (self._next_ext_unit + 1)
self._next_ext_unit += 1
return next_unit | 1,073,153,636,386,303,000 | Function to encapsulate next_ext_unit attribute | flopy/mbase.py | next_ext_unit | andrewcalderwood/flopy | python | def next_ext_unit(self):
'\n \n\n '
next_unit = (self._next_ext_unit + 1)
self._next_ext_unit += 1
return next_unit |
def export(self, f, **kwargs):
'\n Method to export a model to netcdf or shapefile based on the\n extension of the file name (.shp for shapefile, .nc for netcdf)\n\n Parameters\n ----------\n f : str\n filename\n kwargs : keyword arguments\n modelgrid : flopy.discretization.Grid instance\n user supplied modelgrid which can be used for exporting\n in lieu of the modelgrid associated with the model object\n\n Returns\n -------\n None or Netcdf object\n\n '
from .export import utils
return utils.model_export(f, self, **kwargs) | -9,079,781,501,208,759,000 | Method to export a model to netcdf or shapefile based on the
extension of the file name (.shp for shapefile, .nc for netcdf)
Parameters
----------
f : str
filename
kwargs : keyword arguments
modelgrid : flopy.discretization.Grid instance
user supplied modelgrid which can be used for exporting
in lieu of the modelgrid associated with the model object
Returns
-------
None or Netcdf object | flopy/mbase.py | export | andrewcalderwood/flopy | python | def export(self, f, **kwargs):
'\n Method to export a model to netcdf or shapefile based on the\n extension of the file name (.shp for shapefile, .nc for netcdf)\n\n Parameters\n ----------\n f : str\n filename\n kwargs : keyword arguments\n modelgrid : flopy.discretization.Grid instance\n user supplied modelgrid which can be used for exporting\n in lieu of the modelgrid associated with the model object\n\n Returns\n -------\n None or Netcdf object\n\n '
from .export import utils
return utils.model_export(f, self, **kwargs) |
def add_package(self, p):
'\n Add a package.\n\n Parameters\n ----------\n p : Package object\n\n '
for (idx, u) in enumerate(p.unit_number):
if (u != 0):
if ((u in self.package_units) or (u in self.external_units)):
try:
pn = p.name[idx]
except:
pn = p.name
if self.verbose:
print(f'''
WARNING:
unit {u} of package {pn} already in use.''')
self.package_units.append(u)
for (i, pp) in enumerate(self.packagelist):
if pp.allowDuplicates:
continue
elif isinstance(p, type(pp)):
if self.verbose:
print(f'''
WARNING:
Two packages of the same type, Replacing existing '{p.name[0]}' package.''')
self.packagelist[i] = p
return
if self.verbose:
print('adding Package: ', p.name[0])
self.packagelist.append(p) | 7,081,921,362,581,270,000 | Add a package.
Parameters
----------
p : Package object | flopy/mbase.py | add_package | andrewcalderwood/flopy | python | def add_package(self, p):
'\n Add a package.\n\n Parameters\n ----------\n p : Package object\n\n '
for (idx, u) in enumerate(p.unit_number):
if (u != 0):
if ((u in self.package_units) or (u in self.external_units)):
try:
pn = p.name[idx]
except:
pn = p.name
if self.verbose:
print(f'
WARNING:
unit {u} of package {pn} already in use.')
self.package_units.append(u)
for (i, pp) in enumerate(self.packagelist):
if pp.allowDuplicates:
continue
elif isinstance(p, type(pp)):
if self.verbose:
print(f'
WARNING:
Two packages of the same type, Replacing existing '{p.name[0]}' package.')
self.packagelist[i] = p
return
if self.verbose:
print('adding Package: ', p.name[0])
self.packagelist.append(p) |
def remove_package(self, pname):
"\n Remove a package from this model\n\n Parameters\n ----------\n pname : string\n Name of the package, such as 'RIV', 'BAS6', etc.\n\n "
for (i, pp) in enumerate(self.packagelist):
if (pname.upper() in pp.name):
if self.verbose:
print('removing Package: ', pp.name)
p = self.packagelist.pop(i)
for iu in p.unit_number:
if (iu in self.package_units):
self.package_units.remove(iu)
return
raise StopIteration((('Package name ' + pname) + ' not found in Package list')) | 1,808,959,202,091,257,300 | Remove a package from this model
Parameters
----------
pname : string
Name of the package, such as 'RIV', 'BAS6', etc. | flopy/mbase.py | remove_package | andrewcalderwood/flopy | python | def remove_package(self, pname):
"\n Remove a package from this model\n\n Parameters\n ----------\n pname : string\n Name of the package, such as 'RIV', 'BAS6', etc.\n\n "
for (i, pp) in enumerate(self.packagelist):
if (pname.upper() in pp.name):
if self.verbose:
print('removing Package: ', pp.name)
p = self.packagelist.pop(i)
for iu in p.unit_number:
if (iu in self.package_units):
self.package_units.remove(iu)
return
raise StopIteration((('Package name ' + pname) + ' not found in Package list')) |
def __getattr__(self, item):
'\n __getattr__ - syntactic sugar\n\n Parameters\n ----------\n item : str\n 3 character package name (case insensitive) or "sr" to access\n the SpatialReference instance of the ModflowDis object\n\n\n Returns\n -------\n sr : SpatialReference instance\n pp : Package object\n Package object of type :class:`flopy.pakbase.Package`\n\n Note\n ----\n if self.dis is not None, then the spatial reference instance is updated\n using self.dis.delr, self.dis.delc, and self.dis.lenuni before being\n returned\n '
if ((item == 'output_packages') or (not hasattr(self, 'output_packages'))):
raise AttributeError(item)
if (item == 'tr'):
if (self.dis is not None):
return self.dis.tr
else:
return None
if (item == 'nper'):
if (self.dis is not None):
return self.dis.nper
else:
return 0
if (item == 'start_datetime'):
if (self.dis is not None):
return self.dis.start_datetime
else:
return None
if ((item == '_packagelist') or (item == 'packagelist')):
raise AttributeError(item)
pckg = self.get_package(item)
if ((pckg is not None) or (item in self.mfnam_packages)):
return pckg
if (item == 'modelgrid'):
return
raise AttributeError(item) | 8,095,475,259,741,164,000 | __getattr__ - syntactic sugar
Parameters
----------
item : str
3 character package name (case insensitive) or "sr" to access
the SpatialReference instance of the ModflowDis object
Returns
-------
sr : SpatialReference instance
pp : Package object
Package object of type :class:`flopy.pakbase.Package`
Note
----
if self.dis is not None, then the spatial reference instance is updated
using self.dis.delr, self.dis.delc, and self.dis.lenuni before being
returned | flopy/mbase.py | __getattr__ | andrewcalderwood/flopy | python | def __getattr__(self, item):
'\n __getattr__ - syntactic sugar\n\n Parameters\n ----------\n item : str\n 3 character package name (case insensitive) or "sr" to access\n the SpatialReference instance of the ModflowDis object\n\n\n Returns\n -------\n sr : SpatialReference instance\n pp : Package object\n Package object of type :class:`flopy.pakbase.Package`\n\n Note\n ----\n if self.dis is not None, then the spatial reference instance is updated\n using self.dis.delr, self.dis.delc, and self.dis.lenuni before being\n returned\n '
if ((item == 'output_packages') or (not hasattr(self, 'output_packages'))):
raise AttributeError(item)
if (item == 'tr'):
if (self.dis is not None):
return self.dis.tr
else:
return None
if (item == 'nper'):
if (self.dis is not None):
return self.dis.nper
else:
return 0
if (item == 'start_datetime'):
if (self.dis is not None):
return self.dis.start_datetime
else:
return None
if ((item == '_packagelist') or (item == 'packagelist')):
raise AttributeError(item)
pckg = self.get_package(item)
if ((pckg is not None) or (item in self.mfnam_packages)):
return pckg
if (item == 'modelgrid'):
return
raise AttributeError(item) |
def add_output_file(self, unit, fname=None, extension='cbc', binflag=True, package=None):
'\n Add an ascii or binary output file for a package\n\n Parameters\n ----------\n unit : int\n unit number of external array\n fname : str\n filename of external array. (default is None)\n extension : str\n extension to use for the cell-by-cell file. Only used if fname\n is None. (default is cbc)\n binflag : bool\n boolean flag indicating if the output file is a binary file.\n Default is True\n package : str\n string that defines the package the output file is attached to.\n Default is None\n\n '
add_cbc = False
if (unit > 0):
add_cbc = True
if (abs(unit) in self.external_units):
idx = self.external_units.index(abs(unit))
if (fname is None):
fname = os.path.basename(self.external_fnames[idx])
binflag = self.external_binflag[idx]
self.remove_external(unit=abs(unit))
if (abs(unit) in self.output_units):
add_cbc = False
idx = self.output_units.index(abs(unit))
if (binflag is not self.output_binflag[idx]):
add_cbc = True
if add_cbc:
self.remove_output(unit=abs(unit))
elif (package is not None):
self.output_packages[idx].append(package)
if add_cbc:
if (fname is None):
fname = f'{self.name}.{extension}'
if (fname in self.output_fnames):
idx = self.output_fnames.index(fname)
iut = self.output_units[idx]
if (iut != unit):
if (package is None):
fname = f'{self.name}.{unit}.{extension}'
else:
fname = f'{self.name}.{package}.{extension}'
else:
fname = os.path.basename(fname)
self.add_output(fname, unit, binflag=binflag, package=package)
return | -1,661,629,637,262,426,400 | Add an ascii or binary output file for a package
Parameters
----------
unit : int
unit number of external array
fname : str
filename of external array. (default is None)
extension : str
extension to use for the cell-by-cell file. Only used if fname
is None. (default is cbc)
binflag : bool
boolean flag indicating if the output file is a binary file.
Default is True
package : str
string that defines the package the output file is attached to.
Default is None | flopy/mbase.py | add_output_file | andrewcalderwood/flopy | python | def add_output_file(self, unit, fname=None, extension='cbc', binflag=True, package=None):
'\n Add an ascii or binary output file for a package\n\n Parameters\n ----------\n unit : int\n unit number of external array\n fname : str\n filename of external array. (default is None)\n extension : str\n extension to use for the cell-by-cell file. Only used if fname\n is None. (default is cbc)\n binflag : bool\n boolean flag indicating if the output file is a binary file.\n Default is True\n package : str\n string that defines the package the output file is attached to.\n Default is None\n\n '
add_cbc = False
if (unit > 0):
add_cbc = True
if (abs(unit) in self.external_units):
idx = self.external_units.index(abs(unit))
if (fname is None):
fname = os.path.basename(self.external_fnames[idx])
binflag = self.external_binflag[idx]
self.remove_external(unit=abs(unit))
if (abs(unit) in self.output_units):
add_cbc = False
idx = self.output_units.index(abs(unit))
if (binflag is not self.output_binflag[idx]):
add_cbc = True
if add_cbc:
self.remove_output(unit=abs(unit))
elif (package is not None):
self.output_packages[idx].append(package)
if add_cbc:
if (fname is None):
fname = f'{self.name}.{extension}'
if (fname in self.output_fnames):
idx = self.output_fnames.index(fname)
iut = self.output_units[idx]
if (iut != unit):
if (package is None):
fname = f'{self.name}.{unit}.{extension}'
else:
fname = f'{self.name}.{package}.{extension}'
else:
fname = os.path.basename(fname)
self.add_output(fname, unit, binflag=binflag, package=package)
return |
def add_output(self, fname, unit, binflag=False, package=None):
'\n Assign an external array so that it will be listed as a DATA or\n DATA(BINARY) entry in the name file. This will allow an outside\n file package to refer to it.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n binflag : boolean\n binary or not. (default is False)\n\n '
if (fname in self.output_fnames):
if self.verbose:
print(f'BaseModel.add_output() warning: replacing existing filename {fname}')
idx = self.output_fnames.index(fname)
if self.verbose:
self._output_msg(idx, add=False)
self.output_fnames.pop(idx)
self.output_units.pop(idx)
self.output_binflag.pop(idx)
self.output_packages.pop(idx)
self.output_fnames.append(fname)
self.output_units.append(unit)
self.output_binflag.append(binflag)
if (package is not None):
self.output_packages.append([package])
else:
self.output_packages.append([])
if self.verbose:
self._output_msg((- 1), add=True)
return | 3,642,976,816,825,424,400 | Assign an external array so that it will be listed as a DATA or
DATA(BINARY) entry in the name file. This will allow an outside
file package to refer to it.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array
binflag : boolean
binary or not. (default is False) | flopy/mbase.py | add_output | andrewcalderwood/flopy | python | def add_output(self, fname, unit, binflag=False, package=None):
'\n Assign an external array so that it will be listed as a DATA or\n DATA(BINARY) entry in the name file. This will allow an outside\n file package to refer to it.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n binflag : boolean\n binary or not. (default is False)\n\n '
if (fname in self.output_fnames):
if self.verbose:
print(f'BaseModel.add_output() warning: replacing existing filename {fname}')
idx = self.output_fnames.index(fname)
if self.verbose:
self._output_msg(idx, add=False)
self.output_fnames.pop(idx)
self.output_units.pop(idx)
self.output_binflag.pop(idx)
self.output_packages.pop(idx)
self.output_fnames.append(fname)
self.output_units.append(unit)
self.output_binflag.append(binflag)
if (package is not None):
self.output_packages.append([package])
else:
self.output_packages.append([])
if self.verbose:
self._output_msg((- 1), add=True)
return |
def remove_output(self, fname=None, unit=None):
'\n Remove an output file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
else:
msg = ' either fname or unit must be passed to remove_output()'
raise Exception(msg)
return | -5,755,566,078,614,364,000 | Remove an output file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array | flopy/mbase.py | remove_output | andrewcalderwood/flopy | python | def remove_output(self, fname=None, unit=None):
'\n Remove an output file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
if self.verbose:
self._output_msg(i, add=False)
self.output_fnames.pop(i)
self.output_units.pop(i)
self.output_binflag.pop(i)
self.output_packages.pop(i)
else:
msg = ' either fname or unit must be passed to remove_output()'
raise Exception(msg)
return |
def get_output(self, fname=None, unit=None):
'\n Get an output file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
return self.output_units[i]
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
return self.output_fnames[i]
return None
else:
msg = ' either fname or unit must be passed to get_output()'
raise Exception(msg)
return | 7,864,843,972,654,850,000 | Get an output file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array | flopy/mbase.py | get_output | andrewcalderwood/flopy | python | def get_output(self, fname=None, unit=None):
'\n Get an output file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
return self.output_units[i]
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
return self.output_fnames[i]
return None
else:
msg = ' either fname or unit must be passed to get_output()'
raise Exception(msg)
return |
def set_output_attribute(self, fname=None, unit=None, attr=None):
'\n Set a variable in an output file from the model by specifying either\n the file name or the unit number and a dictionary with attributes\n to change.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
idx = None
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
idx = i
break
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
idx = i
break
else:
msg = ' either fname or unit must be passed to set_output_attribute()'
raise Exception(msg)
if (attr is not None):
if (idx is not None):
for (key, value) in attr.items:
if (key == 'binflag'):
self.output_binflag[idx] = value
elif (key == 'fname'):
self.output_fnames[idx] = value
elif (key == 'unit'):
self.output_units[idx] = value
return | 8,292,622,623,377,403,000 | Set a variable in an output file from the model by specifying either
the file name or the unit number and a dictionary with attributes
to change.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array | flopy/mbase.py | set_output_attribute | andrewcalderwood/flopy | python | def set_output_attribute(self, fname=None, unit=None, attr=None):
'\n Set a variable in an output file from the model by specifying either\n the file name or the unit number and a dictionary with attributes\n to change.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
idx = None
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
idx = i
break
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
idx = i
break
else:
msg = ' either fname or unit must be passed to set_output_attribute()'
raise Exception(msg)
if (attr is not None):
if (idx is not None):
for (key, value) in attr.items:
if (key == 'binflag'):
self.output_binflag[idx] = value
elif (key == 'fname'):
self.output_fnames[idx] = value
elif (key == 'unit'):
self.output_units[idx] = value
return |
def get_output_attribute(self, fname=None, unit=None, attr=None):
'\n Get a attribute for an output file from the model by specifying either\n the file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
idx = None
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
idx = i
break
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
idx = i
break
else:
raise Exception(' either fname or unit must be passed to set_output_attribute()')
v = None
if (attr is not None):
if (idx is not None):
if (attr == 'binflag'):
v = self.output_binflag[idx]
elif (attr == 'fname'):
v = self.output_fnames[idx]
elif (attr == 'unit'):
v = self.output_units[idx]
return v | -6,229,907,047,886,764,000 | Get a attribute for an output file from the model by specifying either
the file name or the unit number.
Parameters
----------
fname : str
filename of output array
unit : int
unit number of output array | flopy/mbase.py | get_output_attribute | andrewcalderwood/flopy | python | def get_output_attribute(self, fname=None, unit=None, attr=None):
'\n Get a attribute for an output file from the model by specifying either\n the file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of output array\n unit : int\n unit number of output array\n\n '
idx = None
if (fname is not None):
for (i, e) in enumerate(self.output_fnames):
if (fname in e):
idx = i
break
return None
elif (unit is not None):
for (i, u) in enumerate(self.output_units):
if (u == unit):
idx = i
break
else:
raise Exception(' either fname or unit must be passed to set_output_attribute()')
v = None
if (attr is not None):
if (idx is not None):
if (attr == 'binflag'):
v = self.output_binflag[idx]
elif (attr == 'fname'):
v = self.output_fnames[idx]
elif (attr == 'unit'):
v = self.output_units[idx]
return v |
def add_external(self, fname, unit, binflag=False, output=False):
'\n Assign an external array so that it will be listed as a DATA or\n DATA(BINARY) entry in the name file. This will allow an outside\n file package to refer to it.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n binflag : boolean\n binary or not. (default is False)\n\n '
if (fname in self.external_fnames):
if self.verbose:
print(f'BaseModel.add_external() warning: replacing existing filename {fname}')
idx = self.external_fnames.index(fname)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
if (unit in self.external_units):
if self.verbose:
msg = f'BaseModel.add_external() warning: replacing existing unit {unit}'
print(msg)
idx = self.external_units.index(unit)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
self.external_fnames.append(fname)
self.external_units.append(unit)
self.external_binflag.append(binflag)
self.external_output.append(output)
return | 8,922,479,497,916,789,000 | Assign an external array so that it will be listed as a DATA or
DATA(BINARY) entry in the name file. This will allow an outside
file package to refer to it.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array
binflag : boolean
binary or not. (default is False) | flopy/mbase.py | add_external | andrewcalderwood/flopy | python | def add_external(self, fname, unit, binflag=False, output=False):
'\n Assign an external array so that it will be listed as a DATA or\n DATA(BINARY) entry in the name file. This will allow an outside\n file package to refer to it.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n binflag : boolean\n binary or not. (default is False)\n\n '
if (fname in self.external_fnames):
if self.verbose:
print(f'BaseModel.add_external() warning: replacing existing filename {fname}')
idx = self.external_fnames.index(fname)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
if (unit in self.external_units):
if self.verbose:
msg = f'BaseModel.add_external() warning: replacing existing unit {unit}'
print(msg)
idx = self.external_units.index(unit)
self.external_fnames.pop(idx)
self.external_units.pop(idx)
self.external_binflag.pop(idx)
self.external_output.pop(idx)
self.external_fnames.append(fname)
self.external_units.append(unit)
self.external_binflag.append(binflag)
self.external_output.append(output)
return |
def remove_external(self, fname=None, unit=None):
'\n Remove an external file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n\n '
plist = []
if (fname is not None):
for (i, e) in enumerate(self.external_fnames):
if (fname in e):
plist.append(i)
elif (unit is not None):
for (i, u) in enumerate(self.external_units):
if (u == unit):
plist.append(i)
else:
msg = ' either fname or unit must be passed to remove_external()'
raise Exception(msg)
j = 0
for i in plist:
ipos = (i - j)
self.external_fnames.pop(ipos)
self.external_units.pop(ipos)
self.external_binflag.pop(ipos)
self.external_output.pop(ipos)
j += 1
return | 3,337,016,843,183,099,400 | Remove an external file from the model by specifying either the
file name or the unit number.
Parameters
----------
fname : str
filename of external array
unit : int
unit number of external array | flopy/mbase.py | remove_external | andrewcalderwood/flopy | python | def remove_external(self, fname=None, unit=None):
'\n Remove an external file from the model by specifying either the\n file name or the unit number.\n\n Parameters\n ----------\n fname : str\n filename of external array\n unit : int\n unit number of external array\n\n '
plist = []
if (fname is not None):
for (i, e) in enumerate(self.external_fnames):
if (fname in e):
plist.append(i)
elif (unit is not None):
for (i, u) in enumerate(self.external_units):
if (u == unit):
plist.append(i)
else:
msg = ' either fname or unit must be passed to remove_external()'
raise Exception(msg)
j = 0
for i in plist:
ipos = (i - j)
self.external_fnames.pop(ipos)
self.external_units.pop(ipos)
self.external_binflag.pop(ipos)
self.external_output.pop(ipos)
j += 1
return |
def add_existing_package(self, filename, ptype=None, copy_to_model_ws=True):
'\n Add an existing package to a model instance.\n\n Parameters\n ----------\n\n filename : str\n the name of the file to add as a package\n ptype : optional\n the model package type (e.g. "lpf", "wel", etc). If None,\n then the file extension of the filename arg is used\n copy_to_model_ws : bool\n flag to copy the package file into the model_ws directory.\n\n Returns\n -------\n None\n\n '
if (ptype is None):
ptype = filename.split('.')[(- 1)]
ptype = str(ptype).upper()
class Obj():
pass
fake_package = Obj()
fake_package.write_file = (lambda : None)
fake_package.name = [ptype]
fake_package.extension = [filename.split('.')[(- 1)]]
fake_package.unit_number = [self.next_ext_unit()]
if copy_to_model_ws:
base_filename = os.path.split(filename)[(- 1)]
fake_package.file_name = [base_filename]
shutil.copy2(filename, os.path.join(self.model_ws, base_filename))
else:
fake_package.file_name = [filename]
fake_package.allowDuplicates = True
self.add_package(fake_package) | 3,980,745,397,456,637,400 | Add an existing package to a model instance.
Parameters
----------
filename : str
the name of the file to add as a package
ptype : optional
the model package type (e.g. "lpf", "wel", etc). If None,
then the file extension of the filename arg is used
copy_to_model_ws : bool
flag to copy the package file into the model_ws directory.
Returns
-------
None | flopy/mbase.py | add_existing_package | andrewcalderwood/flopy | python | def add_existing_package(self, filename, ptype=None, copy_to_model_ws=True):
'\n Add an existing package to a model instance.\n\n Parameters\n ----------\n\n filename : str\n the name of the file to add as a package\n ptype : optional\n the model package type (e.g. "lpf", "wel", etc). If None,\n then the file extension of the filename arg is used\n copy_to_model_ws : bool\n flag to copy the package file into the model_ws directory.\n\n Returns\n -------\n None\n\n '
if (ptype is None):
ptype = filename.split('.')[(- 1)]
ptype = str(ptype).upper()
class Obj():
pass
fake_package = Obj()
fake_package.write_file = (lambda : None)
fake_package.name = [ptype]
fake_package.extension = [filename.split('.')[(- 1)]]
fake_package.unit_number = [self.next_ext_unit()]
if copy_to_model_ws:
base_filename = os.path.split(filename)[(- 1)]
fake_package.file_name = [base_filename]
shutil.copy2(filename, os.path.join(self.model_ws, base_filename))
else:
fake_package.file_name = [filename]
fake_package.allowDuplicates = True
self.add_package(fake_package) |
def get_name_file_entries(self):
'\n Get a string representation of the name file.\n\n Parameters\n ----------\n\n '
lines = []
for p in self.packagelist:
for i in range(len(p.name)):
if (p.unit_number[i] == 0):
continue
s = f'{p.name[i]:14s} {p.unit_number[i]:5d} {p.file_name[i]}'
lines.append(s)
return ('\n'.join(lines) + '\n') | 163,448,600,108,196,480 | Get a string representation of the name file.
Parameters
---------- | flopy/mbase.py | get_name_file_entries | andrewcalderwood/flopy | python | def get_name_file_entries(self):
'\n Get a string representation of the name file.\n\n Parameters\n ----------\n\n '
lines = []
for p in self.packagelist:
for i in range(len(p.name)):
if (p.unit_number[i] == 0):
continue
s = f'{p.name[i]:14s} {p.unit_number[i]:5d} {p.file_name[i]}'
lines.append(s)
return ('\n'.join(lines) + '\n') |
def has_package(self, name):
"\n Check if package name is in package list.\n\n Parameters\n ----------\n name : str\n Name of the package, 'DIS', 'BAS6', etc. (case-insensitive).\n\n Returns\n -------\n bool\n True if package name exists, otherwise False if not found.\n\n "
if (not name):
raise ValueError('invalid package name')
name = name.upper()
for p in self.packagelist:
for pn in p.name:
if (pn.upper() == name):
return True
return False | 9,068,293,731,091,111,000 | Check if package name is in package list.
Parameters
----------
name : str
Name of the package, 'DIS', 'BAS6', etc. (case-insensitive).
Returns
-------
bool
True if package name exists, otherwise False if not found. | flopy/mbase.py | has_package | andrewcalderwood/flopy | python | def has_package(self, name):
"\n Check if package name is in package list.\n\n Parameters\n ----------\n name : str\n Name of the package, 'DIS', 'BAS6', etc. (case-insensitive).\n\n Returns\n -------\n bool\n True if package name exists, otherwise False if not found.\n\n "
if (not name):
raise ValueError('invalid package name')
name = name.upper()
for p in self.packagelist:
for pn in p.name:
if (pn.upper() == name):
return True
return False |
def get_package(self, name):
"\n Get a package.\n\n Parameters\n ----------\n name : str\n Name of the package, 'RIV', 'LPF', etc. (case-insensitive).\n\n Returns\n -------\n pp : Package object\n Package object of type :class:`flopy.pakbase.Package`\n\n "
if (not name):
raise ValueError('invalid package name')
name = name.upper()
for pp in self.packagelist:
if (pp.name[0].upper() == name):
return pp
return None | -3,867,293,366,743,963,000 | Get a package.
Parameters
----------
name : str
Name of the package, 'RIV', 'LPF', etc. (case-insensitive).
Returns
-------
pp : Package object
Package object of type :class:`flopy.pakbase.Package` | flopy/mbase.py | get_package | andrewcalderwood/flopy | python | def get_package(self, name):
"\n Get a package.\n\n Parameters\n ----------\n name : str\n Name of the package, 'RIV', 'LPF', etc. (case-insensitive).\n\n Returns\n -------\n pp : Package object\n Package object of type :class:`flopy.pakbase.Package`\n\n "
if (not name):
raise ValueError('invalid package name')
name = name.upper()
for pp in self.packagelist:
if (pp.name[0].upper() == name):
return pp
return None |
def change_model_ws(self, new_pth=None, reset_external=False):
'\n Change the model work space.\n\n Parameters\n ----------\n new_pth : str\n Location of new model workspace. If this path does not exist,\n it will be created. (default is None, which will be assigned to\n the present working directory).\n\n Returns\n -------\n val : list of strings\n Can be used to see what packages are in the model, and can then\n be used with get_package to pull out individual packages.\n\n '
if (new_pth is None):
new_pth = os.getcwd()
if (not os.path.exists(new_pth)):
try:
print(f'''
creating model workspace...
{new_pth}''')
os.makedirs(new_pth)
except:
raise OSError(f'{new_pth} not valid, workspace-folder')
old_pth = self._model_ws
self._model_ws = new_pth
if self.verbose:
print(f'''
changing model workspace...
{new_pth}''')
for pp in self.packagelist:
pp.fn_path = os.path.join(self.model_ws, pp.file_name[0])
if (hasattr(self, 'external_path') and (self.external_path is not None) and (not os.path.exists(os.path.join(self._model_ws, self.external_path)))):
pth = os.path.join(self._model_ws, self.external_path)
os.makedirs(pth)
if reset_external:
self._reset_external(pth, old_pth)
elif reset_external:
self._reset_external(self._model_ws, old_pth)
return None | 3,764,117,741,947,156,000 | Change the model work space.
Parameters
----------
new_pth : str
Location of new model workspace. If this path does not exist,
it will be created. (default is None, which will be assigned to
the present working directory).
Returns
-------
val : list of strings
Can be used to see what packages are in the model, and can then
be used with get_package to pull out individual packages. | flopy/mbase.py | change_model_ws | andrewcalderwood/flopy | python | def change_model_ws(self, new_pth=None, reset_external=False):
'\n Change the model work space.\n\n Parameters\n ----------\n new_pth : str\n Location of new model workspace. If this path does not exist,\n it will be created. (default is None, which will be assigned to\n the present working directory).\n\n Returns\n -------\n val : list of strings\n Can be used to see what packages are in the model, and can then\n be used with get_package to pull out individual packages.\n\n '
if (new_pth is None):
new_pth = os.getcwd()
if (not os.path.exists(new_pth)):
try:
print(f'
creating model workspace...
{new_pth}')
os.makedirs(new_pth)
except:
raise OSError(f'{new_pth} not valid, workspace-folder')
old_pth = self._model_ws
self._model_ws = new_pth
if self.verbose:
print(f'
changing model workspace...
{new_pth}')
for pp in self.packagelist:
pp.fn_path = os.path.join(self.model_ws, pp.file_name[0])
if (hasattr(self, 'external_path') and (self.external_path is not None) and (not os.path.exists(os.path.join(self._model_ws, self.external_path)))):
pth = os.path.join(self._model_ws, self.external_path)
os.makedirs(pth)
if reset_external:
self._reset_external(pth, old_pth)
elif reset_external:
self._reset_external(self._model_ws, old_pth)
return None |
def _set_name(self, value):
'\n Set model name\n\n Parameters\n ----------\n value : str\n Name to assign to model.\n\n '
self.__name = str(value)
self.namefile = ((self.__name + '.') + self.namefile_ext)
for p in self.packagelist:
for i in range(len(p.extension)):
p.file_name[i] = ((self.__name + '.') + p.extension[i])
p.fn_path = os.path.join(self.model_ws, p.file_name[0]) | -533,643,582,773,746,240 | Set model name
Parameters
----------
value : str
Name to assign to model. | flopy/mbase.py | _set_name | andrewcalderwood/flopy | python | def _set_name(self, value):
'\n Set model name\n\n Parameters\n ----------\n value : str\n Name to assign to model.\n\n '
self.__name = str(value)
self.namefile = ((self.__name + '.') + self.namefile_ext)
for p in self.packagelist:
for i in range(len(p.extension)):
p.file_name[i] = ((self.__name + '.') + p.extension[i])
p.fn_path = os.path.join(self.model_ws, p.file_name[0]) |
def run_model(self, silent=False, pause=False, report=False, normal_msg='normal termination'):
"\n This method will run the model using subprocess.Popen.\n\n Parameters\n ----------\n silent : boolean\n Echo run information to screen (default is True).\n pause : boolean, optional\n Pause upon completion (default is False).\n report : boolean, optional\n Save stdout lines to a list (buff) which is returned\n by the method . (default is False).\n normal_msg : str\n Normal termination message used to determine if the\n run terminated normally. (default is 'normal termination')\n\n Returns\n -------\n (success, buff)\n success : boolean\n buff : list of lines of stdout\n\n "
return run_model(self.exe_name, self.namefile, model_ws=self.model_ws, silent=silent, pause=pause, report=report, normal_msg=normal_msg) | 741,409,845,867,963,900 | This method will run the model using subprocess.Popen.
Parameters
----------
silent : boolean
Echo run information to screen (default is True).
pause : boolean, optional
Pause upon completion (default is False).
report : boolean, optional
Save stdout lines to a list (buff) which is returned
by the method . (default is False).
normal_msg : str
Normal termination message used to determine if the
run terminated normally. (default is 'normal termination')
Returns
-------
(success, buff)
success : boolean
buff : list of lines of stdout | flopy/mbase.py | run_model | andrewcalderwood/flopy | python | def run_model(self, silent=False, pause=False, report=False, normal_msg='normal termination'):
"\n This method will run the model using subprocess.Popen.\n\n Parameters\n ----------\n silent : boolean\n Echo run information to screen (default is True).\n pause : boolean, optional\n Pause upon completion (default is False).\n report : boolean, optional\n Save stdout lines to a list (buff) which is returned\n by the method . (default is False).\n normal_msg : str\n Normal termination message used to determine if the\n run terminated normally. (default is 'normal termination')\n\n Returns\n -------\n (success, buff)\n success : boolean\n buff : list of lines of stdout\n\n "
return run_model(self.exe_name, self.namefile, model_ws=self.model_ws, silent=silent, pause=pause, report=report, normal_msg=normal_msg) |
def write_input(self, SelPackList=False, check=False):
'\n Write the input.\n\n Parameters\n ----------\n SelPackList : False or list of packages\n\n '
if check:
self.check(f=f'{self.name}.chk', verbose=self.verbose, level=1)
if (self.parameter_load and (not self.free_format_input)):
if self.verbose:
print('\nResetting free_format_input to True to preserve the precision of the parameter data.')
self.free_format_input = True
if self.verbose:
print('\nWriting packages:')
if (SelPackList == False):
for p in self.packagelist:
if self.verbose:
print(' Package: ', p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
else:
for pon in SelPackList:
for (i, p) in enumerate(self.packagelist):
if (pon in p.name):
if self.verbose:
print(' Package: ', p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
break
if self.verbose:
print(' ')
self.write_name_file()
return | -1,098,992,733,097,818,900 | Write the input.
Parameters
----------
SelPackList : False or list of packages | flopy/mbase.py | write_input | andrewcalderwood/flopy | python | def write_input(self, SelPackList=False, check=False):
'\n Write the input.\n\n Parameters\n ----------\n SelPackList : False or list of packages\n\n '
if check:
self.check(f=f'{self.name}.chk', verbose=self.verbose, level=1)
if (self.parameter_load and (not self.free_format_input)):
if self.verbose:
print('\nResetting free_format_input to True to preserve the precision of the parameter data.')
self.free_format_input = True
if self.verbose:
print('\nWriting packages:')
if (SelPackList == False):
for p in self.packagelist:
if self.verbose:
print(' Package: ', p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
else:
for pon in SelPackList:
for (i, p) in enumerate(self.packagelist):
if (pon in p.name):
if self.verbose:
print(' Package: ', p.name[0])
try:
p.write_file(check=False)
except TypeError:
p.write_file()
break
if self.verbose:
print(' ')
self.write_name_file()
return |
def write_name_file(self):
'\n Every Package needs its own writenamefile function\n\n '
raise Exception('IMPLEMENTATION ERROR: writenamefile must be overloaded') | -2,690,977,166,812,218,000 | Every Package needs its own writenamefile function | flopy/mbase.py | write_name_file | andrewcalderwood/flopy | python | def write_name_file(self):
'\n \n\n '
raise Exception('IMPLEMENTATION ERROR: writenamefile must be overloaded') |
def set_model_units(self):
'\n Every model needs its own set_model_units method\n\n '
raise Exception('IMPLEMENTATION ERROR: set_model_units must be overloaded') | 6,248,661,952,445,160,000 | Every model needs its own set_model_units method | flopy/mbase.py | set_model_units | andrewcalderwood/flopy | python | def set_model_units(self):
'\n \n\n '
raise Exception('IMPLEMENTATION ERROR: set_model_units must be overloaded') |
@property
def name(self):
'\n Get model name\n\n Returns\n -------\n name : str\n name of model\n\n '
return copy.deepcopy(self.__name) | 7,491,200,860,821,816,000 | Get model name
Returns
-------
name : str
name of model | flopy/mbase.py | name | andrewcalderwood/flopy | python | @property
def name(self):
'\n Get model name\n\n Returns\n -------\n name : str\n name of model\n\n '
return copy.deepcopy(self.__name) |
def add_pop_key_list(self, key):
'\n Add a external file unit number to a list that will be used to remove\n model output (typically binary) files from ext_unit_dict.\n\n Parameters\n ----------\n key : int\n file unit number\n\n Returns\n -------\n\n Examples\n --------\n\n '
if (key not in self.pop_key_list):
self.pop_key_list.append(key) | 2,541,391,672,276,510,700 | Add a external file unit number to a list that will be used to remove
model output (typically binary) files from ext_unit_dict.
Parameters
----------
key : int
file unit number
Returns
-------
Examples
-------- | flopy/mbase.py | add_pop_key_list | andrewcalderwood/flopy | python | def add_pop_key_list(self, key):
'\n Add a external file unit number to a list that will be used to remove\n model output (typically binary) files from ext_unit_dict.\n\n Parameters\n ----------\n key : int\n file unit number\n\n Returns\n -------\n\n Examples\n --------\n\n '
if (key not in self.pop_key_list):
self.pop_key_list.append(key) |
def check(self, f=None, verbose=True, level=1):
"\n Check model data for common errors.\n\n Parameters\n ----------\n f : str or file handle\n String defining file name or file handle for summary file\n of check method output. If a string is passed a file handle\n is created. If f is None, check method does not write\n results to a summary file. (default is None)\n verbose : bool\n Boolean flag used to determine if check method results are\n written to the screen\n level : int\n Check method analysis level. If level=0, summary checks are\n performed. If level=1, full checks are performed.\n\n Returns\n -------\n None\n\n Examples\n --------\n\n >>> import flopy\n >>> m = flopy.modflow.Modflow.load('model.nam')\n >>> m.check()\n "
chk = utils.check(self, f=f, verbose=verbose, level=level)
package_units = {}
duplicate_units = {}
for p in self.packagelist:
for i in range(len(p.name)):
if (p.unit_number[i] != 0):
if (p.unit_number[i] in package_units.values()):
duplicate_units[p.name[i]] = p.unit_number[i]
otherpackage = [k for (k, v) in package_units.items() if (v == p.unit_number[i])][0]
duplicate_units[otherpackage] = p.unit_number[i]
if (len(duplicate_units) > 0):
for (k, v) in duplicate_units.items():
chk._add_to_summary('Error', package=k, value=v, desc='unit number conflict')
else:
chk.passed.append('Unit number conflicts')
return self._check(chk, level) | -1,067,797,224,698,142,700 | Check model data for common errors.
Parameters
----------
f : str or file handle
String defining file name or file handle for summary file
of check method output. If a string is passed a file handle
is created. If f is None, check method does not write
results to a summary file. (default is None)
verbose : bool
Boolean flag used to determine if check method results are
written to the screen
level : int
Check method analysis level. If level=0, summary checks are
performed. If level=1, full checks are performed.
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow.load('model.nam')
>>> m.check() | flopy/mbase.py | check | andrewcalderwood/flopy | python | def check(self, f=None, verbose=True, level=1):
"\n Check model data for common errors.\n\n Parameters\n ----------\n f : str or file handle\n String defining file name or file handle for summary file\n of check method output. If a string is passed a file handle\n is created. If f is None, check method does not write\n results to a summary file. (default is None)\n verbose : bool\n Boolean flag used to determine if check method results are\n written to the screen\n level : int\n Check method analysis level. If level=0, summary checks are\n performed. If level=1, full checks are performed.\n\n Returns\n -------\n None\n\n Examples\n --------\n\n >>> import flopy\n >>> m = flopy.modflow.Modflow.load('model.nam')\n >>> m.check()\n "
chk = utils.check(self, f=f, verbose=verbose, level=level)
package_units = {}
duplicate_units = {}
for p in self.packagelist:
for i in range(len(p.name)):
if (p.unit_number[i] != 0):
if (p.unit_number[i] in package_units.values()):
duplicate_units[p.name[i]] = p.unit_number[i]
otherpackage = [k for (k, v) in package_units.items() if (v == p.unit_number[i])][0]
duplicate_units[otherpackage] = p.unit_number[i]
if (len(duplicate_units) > 0):
for (k, v) in duplicate_units.items():
chk._add_to_summary('Error', package=k, value=v, desc='unit number conflict')
else:
chk.passed.append('Unit number conflicts')
return self._check(chk, level) |
def plot(self, SelPackList=None, **kwargs):
"\n Plot 2-D, 3-D, transient 2-D, and stress period list (MfList)\n model input data\n\n Parameters\n ----------\n SelPackList : bool or list\n List of of packages to plot. If SelPackList=None all packages\n are plotted. (default is None)\n **kwargs : dict\n filename_base : str\n Base file name that will be used to automatically generate file\n names for output image files. Plots will be exported as image\n files if file_name_base is not None. (default is None)\n file_extension : str\n Valid matplotlib.pyplot file extension for savefig(). Only used\n if filename_base is not None. (default is 'png')\n mflay : int\n MODFLOW zero-based layer number to return. If None, then all\n all layers will be included. (default is None)\n kper : int\n MODFLOW zero-based stress period number to return.\n (default is zero)\n key : str\n MfList dictionary key. (default is None)\n\n Returns\n ----------\n axes : list\n Empty list is returned if filename_base is not None. Otherwise\n a list of matplotlib.pyplot.axis are returned.\n\n See Also\n --------\n\n Notes\n -----\n\n Examples\n --------\n >>> import flopy\n >>> ml = flopy.modflow.Modflow.load('test.nam')\n >>> ml.plot()\n\n "
from flopy.plot import PlotUtilities
axes = PlotUtilities._plot_model_helper(self, SelPackList=SelPackList, **kwargs)
return axes | -8,959,228,950,895,840,000 | Plot 2-D, 3-D, transient 2-D, and stress period list (MfList)
model input data
Parameters
----------
SelPackList : bool or list
List of of packages to plot. If SelPackList=None all packages
are plotted. (default is None)
**kwargs : dict
filename_base : str
Base file name that will be used to automatically generate file
names for output image files. Plots will be exported as image
files if file_name_base is not None. (default is None)
file_extension : str
Valid matplotlib.pyplot file extension for savefig(). Only used
if filename_base is not None. (default is 'png')
mflay : int
MODFLOW zero-based layer number to return. If None, then all
all layers will be included. (default is None)
kper : int
MODFLOW zero-based stress period number to return.
(default is zero)
key : str
MfList dictionary key. (default is None)
Returns
----------
axes : list
Empty list is returned if filename_base is not None. Otherwise
a list of matplotlib.pyplot.axis are returned.
See Also
--------
Notes
-----
Examples
--------
>>> import flopy
>>> ml = flopy.modflow.Modflow.load('test.nam')
>>> ml.plot() | flopy/mbase.py | plot | andrewcalderwood/flopy | python | def plot(self, SelPackList=None, **kwargs):
"\n Plot 2-D, 3-D, transient 2-D, and stress period list (MfList)\n model input data\n\n Parameters\n ----------\n SelPackList : bool or list\n List of of packages to plot. If SelPackList=None all packages\n are plotted. (default is None)\n **kwargs : dict\n filename_base : str\n Base file name that will be used to automatically generate file\n names for output image files. Plots will be exported as image\n files if file_name_base is not None. (default is None)\n file_extension : str\n Valid matplotlib.pyplot file extension for savefig(). Only used\n if filename_base is not None. (default is 'png')\n mflay : int\n MODFLOW zero-based layer number to return. If None, then all\n all layers will be included. (default is None)\n kper : int\n MODFLOW zero-based stress period number to return.\n (default is zero)\n key : str\n MfList dictionary key. (default is None)\n\n Returns\n ----------\n axes : list\n Empty list is returned if filename_base is not None. Otherwise\n a list of matplotlib.pyplot.axis are returned.\n\n See Also\n --------\n\n Notes\n -----\n\n Examples\n --------\n >>> import flopy\n >>> ml = flopy.modflow.Modflow.load('test.nam')\n >>> ml.plot()\n\n "
from flopy.plot import PlotUtilities
axes = PlotUtilities._plot_model_helper(self, SelPackList=SelPackList, **kwargs)
return axes |
def to_shapefile(self, filename, package_names=None, **kwargs):
'\n Wrapper function for writing a shapefile for the model grid. If\n package_names is not None, then search through the requested packages\n looking for arrays that can be added to the shapefile as attributes\n\n Parameters\n ----------\n filename : string\n name of the shapefile to write\n package_names : list of package names (e.g. ["dis","lpf"])\n Packages to export data arrays to shapefile. (default is None)\n\n Returns\n -------\n None\n\n Examples\n --------\n >>> import flopy\n >>> m = flopy.modflow.Modflow()\n >>> m.to_shapefile(\'model.shp\', SelPackList)\n\n '
warnings.warn('to_shapefile() is deprecated. use .export()')
self.export(filename, package_names=package_names)
return | -2,578,493,498,774,151,700 | Wrapper function for writing a shapefile for the model grid. If
package_names is not None, then search through the requested packages
looking for arrays that can be added to the shapefile as attributes
Parameters
----------
filename : string
name of the shapefile to write
package_names : list of package names (e.g. ["dis","lpf"])
Packages to export data arrays to shapefile. (default is None)
Returns
-------
None
Examples
--------
>>> import flopy
>>> m = flopy.modflow.Modflow()
>>> m.to_shapefile('model.shp', SelPackList) | flopy/mbase.py | to_shapefile | andrewcalderwood/flopy | python | def to_shapefile(self, filename, package_names=None, **kwargs):
'\n Wrapper function for writing a shapefile for the model grid. If\n package_names is not None, then search through the requested packages\n looking for arrays that can be added to the shapefile as attributes\n\n Parameters\n ----------\n filename : string\n name of the shapefile to write\n package_names : list of package names (e.g. ["dis","lpf"])\n Packages to export data arrays to shapefile. (default is None)\n\n Returns\n -------\n None\n\n Examples\n --------\n >>> import flopy\n >>> m = flopy.modflow.Modflow()\n >>> m.to_shapefile(\'model.shp\', SelPackList)\n\n '
warnings.warn('to_shapefile() is deprecated. use .export()')
self.export(filename, package_names=package_names)
return |
def make_docstring_obj(docstr, default='google', template_order=False):
"Detect docstring style and create a Docstring object\n\n Parameters:\n docstr (str): source docstring\n default (str, class): 'google', 'numpy' or subclass\n of Docstring\n template_order (bool, optional): iff True, reorder the\n sections to match the order they appear in the template\n\n Returns:\n subclass of Docstring\n "
typ = detect_style(docstr)
logger.info('[make_docstring_obj] from {} to {}'.format((typ.__name__ if (typ is not None) else None), default.__name__))
if (typ is None):
if issubclass(default, Docstring):
typ = default
else:
typ = STYLE_LOOKUP[default.lower()]
return typ(docstr, template_order=template_order) | -3,128,637,298,797,223,000 | Detect docstring style and create a Docstring object
Parameters:
docstr (str): source docstring
default (str, class): 'google', 'numpy' or subclass
of Docstring
template_order (bool, optional): iff True, reorder the
sections to match the order they appear in the template
Returns:
subclass of Docstring | docstring_styles.py | make_docstring_obj | KristoforMaynard/SublimeAutoDocstring | python | def make_docstring_obj(docstr, default='google', template_order=False):
"Detect docstring style and create a Docstring object\n\n Parameters:\n docstr (str): source docstring\n default (str, class): 'google', 'numpy' or subclass\n of Docstring\n template_order (bool, optional): iff True, reorder the\n sections to match the order they appear in the template\n\n Returns:\n subclass of Docstring\n "
typ = detect_style(docstr)
logger.info('[make_docstring_obj] from {} to {}'.format((typ.__name__ if (typ is not None) else None), default.__name__))
if (typ is None):
if issubclass(default, Docstring):
typ = default
else:
typ = STYLE_LOOKUP[default.lower()]
return typ(docstr, template_order=template_order) |
def detect_style(docstr):
'Detect docstr style from existing docstring\n\n Parameters:\n docstr (str): docstring whose style we want to know\n\n Returns:\n class: one of [GoogleDocstring, NumpyDocstring, None]; None\n means no match\n '
docstr = dedent_docstr(docstr)
for c in STYLE_LOOKUP.values():
if c.detect_style(docstr):
return c
return None | 4,776,598,432,322,184,000 | Detect docstr style from existing docstring
Parameters:
docstr (str): docstring whose style we want to know
Returns:
class: one of [GoogleDocstring, NumpyDocstring, None]; None
means no match | docstring_styles.py | detect_style | KristoforMaynard/SublimeAutoDocstring | python | def detect_style(docstr):
'Detect docstr style from existing docstring\n\n Parameters:\n docstr (str): docstring whose style we want to know\n\n Returns:\n class: one of [GoogleDocstring, NumpyDocstring, None]; None\n means no match\n '
docstr = dedent_docstr(docstr)
for c in STYLE_LOOKUP.values():
if c.detect_style(docstr):
return c
return None |
def dedent_docstr(s, n=1):
'Dedent all lines except first n lines\n\n Args:\n s (type): some text to dedent\n n (int): number of lines to skip, (n == 0 is a normal dedent,\n n == 1 is useful for whole docstrings)\n '
lines = s.splitlines(keepends=True)
if lines:
first_n_lines = ''.join([l.lstrip(' \t') for l in lines[:n]])
dedented = dedent(''.join(lines[n:]))
return (first_n_lines + dedented)
else:
return '' | -1,250,882,212,853,893,400 | Dedent all lines except first n lines
Args:
s (type): some text to dedent
n (int): number of lines to skip, (n == 0 is a normal dedent,
n == 1 is useful for whole docstrings) | docstring_styles.py | dedent_docstr | KristoforMaynard/SublimeAutoDocstring | python | def dedent_docstr(s, n=1):
'Dedent all lines except first n lines\n\n Args:\n s (type): some text to dedent\n n (int): number of lines to skip, (n == 0 is a normal dedent,\n n == 1 is useful for whole docstrings)\n '
lines = s.splitlines(keepends=True)
if lines:
first_n_lines = .join([l.lstrip(' \t') for l in lines[:n]])
dedented = dedent(.join(lines[n:]))
return (first_n_lines + dedented)
else:
return |
def indent_docstr(s, indent, n=1, trim=True):
"Add common indentation to all lines except first\n\n Args:\n s (str): docstring starting at indentation level 0\n indent (str): text used for indentation, in practice\n this will be the level of the declaration + 1\n n (int): don't indent first n lines\n trim (bool): trim whitespace (' \t') out of blank lines\n\n Returns:\n s with common indentation applied\n "
lines = s.splitlines(keepends=True)
for i in range(n, len(lines)):
if (lines[i].strip() or (not trim)):
lines[i] = '{0}{1}'.format(indent, lines[i])
else:
lines[i] = lines[i].strip(' \t')
return ''.join(lines) | 732,585,054,161,065,600 | Add common indentation to all lines except first
Args:
s (str): docstring starting at indentation level 0
indent (str): text used for indentation, in practice
this will be the level of the declaration + 1
n (int): don't indent first n lines
trim (bool): trim whitespace (' ') out of blank lines
Returns:
s with common indentation applied | docstring_styles.py | indent_docstr | KristoforMaynard/SublimeAutoDocstring | python | def indent_docstr(s, indent, n=1, trim=True):
"Add common indentation to all lines except first\n\n Args:\n s (str): docstring starting at indentation level 0\n indent (str): text used for indentation, in practice\n this will be the level of the declaration + 1\n n (int): don't indent first n lines\n trim (bool): trim whitespace (' \t') out of blank lines\n\n Returns:\n s with common indentation applied\n "
lines = s.splitlines(keepends=True)
for i in range(n, len(lines)):
if (lines[i].strip() or (not trim)):
lines[i] = '{0}{1}'.format(indent, lines[i])
else:
lines[i] = lines[i].strip(' \t')
return .join(lines) |
def count_leading_newlines(s):
'count number of leading newlines\n\n this includes newlines that are separated by other whitespace\n '
return s[:(- len(s.lstrip()))].count('\n') | -4,997,131,412,159,201,000 | count number of leading newlines
this includes newlines that are separated by other whitespace | docstring_styles.py | count_leading_newlines | KristoforMaynard/SublimeAutoDocstring | python | def count_leading_newlines(s):
'count number of leading newlines\n\n this includes newlines that are separated by other whitespace\n '
return s[:(- len(s.lstrip()))].count('\n') |
def count_trailing_newlines(s):
'count number of trailing newlines\n\n this includes newlines that are separated by other whitespace\n '
return s[len(s.rstrip()):].count('\n') | 3,625,316,195,696,609,000 | count number of trailing newlines
this includes newlines that are separated by other whitespace | docstring_styles.py | count_trailing_newlines | KristoforMaynard/SublimeAutoDocstring | python | def count_trailing_newlines(s):
'count number of trailing newlines\n\n this includes newlines that are separated by other whitespace\n '
return s[len(s.rstrip()):].count('\n') |
def with_bounding_newlines(s, nleading=0, ntrailing=0, nl='\n'):
'return s with at least # leading and # trailing newlines\n\n this includes newlines that are separated by other whitespace\n '
return '{0}{1}{2}'.format((nl * (nleading - count_leading_newlines(s))), s, (nl * (ntrailing - count_trailing_newlines(s)))) | -1,088,108,019,192,048,300 | return s with at least # leading and # trailing newlines
this includes newlines that are separated by other whitespace | docstring_styles.py | with_bounding_newlines | KristoforMaynard/SublimeAutoDocstring | python | def with_bounding_newlines(s, nleading=0, ntrailing=0, nl='\n'):
'return s with at least # leading and # trailing newlines\n\n this includes newlines that are separated by other whitespace\n '
return '{0}{1}{2}'.format((nl * (nleading - count_leading_newlines(s))), s, (nl * (ntrailing - count_trailing_newlines(s)))) |
def strip_newlines(s, nleading=0, ntrailing=0):
'strip at most nleading and ntrailing newlines from s'
for _ in range(nleading):
if (s.lstrip(' \t')[0] == '\n'):
s = s.lstrip(' \t')[1:]
elif (s.lstrip(' \t')[0] == '\r\n'):
s = s.lstrip(' \t')[2:]
for _ in range(ntrailing):
if (s.rstrip(' \t')[(- 2):] == '\r\n'):
s = s.rstrip(' \t')[:(- 2)]
elif (s.rstrip(' \t')[(- 1):] == '\n'):
s = s.rstrip(' \t')[:(- 1)]
return s | 2,890,142,498,404,516,000 | strip at most nleading and ntrailing newlines from s | docstring_styles.py | strip_newlines | KristoforMaynard/SublimeAutoDocstring | python | def strip_newlines(s, nleading=0, ntrailing=0):
for _ in range(nleading):
if (s.lstrip(' \t')[0] == '\n'):
s = s.lstrip(' \t')[1:]
elif (s.lstrip(' \t')[0] == '\r\n'):
s = s.lstrip(' \t')[2:]
for _ in range(ntrailing):
if (s.rstrip(' \t')[(- 2):] == '\r\n'):
s = s.rstrip(' \t')[:(- 2)]
elif (s.rstrip(' \t')[(- 1):] == '\n'):
s = s.rstrip(' \t')[:(- 1)]
return s |
def __init__(self, names, types, description, tag=None, descr_only=False, annotated=False, **kwargs):
'\n Args:\n names (list): list of names\n types (str): string describing data types\n description (str): description text\n tag (int): some meaningful index? not fleshed out yet\n descr_only (bool): only description is useful\n **kwargs: Description\n '
assert (names is not None)
if (description is None):
description = ''
self.names = names
self.types = types
self.description = description
self.tag = tag
self.descr_only = descr_only
self.annotated = annotated
self.meta = kwargs | -8,682,166,393,122,643,000 | Args:
names (list): list of names
types (str): string describing data types
description (str): description text
tag (int): some meaningful index? not fleshed out yet
descr_only (bool): only description is useful
**kwargs: Description | docstring_styles.py | __init__ | KristoforMaynard/SublimeAutoDocstring | python | def __init__(self, names, types, description, tag=None, descr_only=False, annotated=False, **kwargs):
'\n Args:\n names (list): list of names\n types (str): string describing data types\n description (str): description text\n tag (int): some meaningful index? not fleshed out yet\n descr_only (bool): only description is useful\n **kwargs: Description\n '
assert (names is not None)
if (description is None):
description =
self.names = names
self.types = types
self.description = description
self.tag = tag
self.descr_only = descr_only
self.annotated = annotated
self.meta = kwargs |
def __init__(self, heading, text='', indent=None, **kwargs):
'\n Args:\n heading (str): heading of the section (should be title case)\n text (str, optional): section text\n indent (str, optional): used by some formatters\n '
self.heading = heading
self.alias = self.resolve_alias(heading)
if (self.alias in self.PARSERS):
(parser, formatter) = self.PARSERS[self.alias]
self.args_parser = parser
self.args_formatter = formatter
self.is_formatted = True
else:
self.is_formatted = False
if (indent is not None):
self.indent = indent
self.text = text
self.meta = kwargs
logger.debug("create section '{}' ({}) with args : '{}'".format(self.heading, self.alias, self.args)) | -7,361,021,358,198,873,000 | Args:
heading (str): heading of the section (should be title case)
text (str, optional): section text
indent (str, optional): used by some formatters | docstring_styles.py | __init__ | KristoforMaynard/SublimeAutoDocstring | python | def __init__(self, heading, text=, indent=None, **kwargs):
'\n Args:\n heading (str): heading of the section (should be title case)\n text (str, optional): section text\n indent (str, optional): used by some formatters\n '
self.heading = heading
self.alias = self.resolve_alias(heading)
if (self.alias in self.PARSERS):
(parser, formatter) = self.PARSERS[self.alias]
self.args_parser = parser
self.args_formatter = formatter
self.is_formatted = True
else:
self.is_formatted = False
if (indent is not None):
self.indent = indent
self.text = text
self.meta = kwargs
logger.debug("create section '{}' ({}) with args : '{}'".format(self.heading, self.alias, self.args)) |
@staticmethod
def finalize_param(s, tag):
'\n Args:\n s (type): Description\n tag (int): index of param? not fleshed out yet\n '
meta = {}
_r = '([^,\\s]+(?:\\s*,\\s*[^,\\s]+)*\\s*)(?:\\((.*)\\))?\\s*:\\s*(.*)'
m = re.match(_r, s, (re.DOTALL | re.MULTILINE))
if m:
(names, typ, descr) = m.groups()
names = [n.strip() for n in names.split(',')]
(meta['indent'], descr) = dedent_verbose(descr, n=1)
descr_only = False
else:
names = ['{0}'.format(tag)]
typ = ''
descr = s
descr_only = True
return Parameter(names, typ, descr, tag=tag, descr_only=descr_only, **meta) | -357,955,744,281,116,600 | Args:
s (type): Description
tag (int): index of param? not fleshed out yet | docstring_styles.py | finalize_param | KristoforMaynard/SublimeAutoDocstring | python | @staticmethod
def finalize_param(s, tag):
'\n Args:\n s (type): Description\n tag (int): index of param? not fleshed out yet\n '
meta = {}
_r = '([^,\\s]+(?:\\s*,\\s*[^,\\s]+)*\\s*)(?:\\((.*)\\))?\\s*:\\s*(.*)'
m = re.match(_r, s, (re.DOTALL | re.MULTILINE))
if m:
(names, typ, descr) = m.groups()
names = [n.strip() for n in names.split(',')]
(meta['indent'], descr) = dedent_verbose(descr, n=1)
descr_only = False
else:
names = ['{0}'.format(tag)]
typ =
descr = s
descr_only = True
return Parameter(names, typ, descr, tag=tag, descr_only=descr_only, **meta) |
def __init__(self, docstr, template_order=False):
'\n Parameters:\n docstr (Docstring or str): some existing docstring\n template_order (bool, optional): iff True, reorder the\n sections to match the order they appear in the template\n '
if isinstance(docstr, Docstring):
self.sections = docstr.sections
self.trailing_newlines = docstr.trailing_newlines
if (not isinstance(docstr, type(self))):
make_new_sec = self.SECTION_STYLE.from_section
for (sec_name, sec) in docstr.sections.items():
if sec:
docstr.sections[sec_name] = make_new_sec(sec)
else:
del docstr.sections[sec_name]
if ('Parameters' in docstr.sections):
self.get_section('Parameters').heading = self.PREFERRED_PARAMS_ALIAS
for arg in self.get_section('Parameters').args.values():
arg.meta['indent'] = self.get_section('Parameters').indent
if ('Returns' in docstr.sections):
for arg in self.get_section('Returns').args.values():
arg.meta['indent'] = self.get_section('Returns').indent
if ('Yields' in docstr.sections):
for arg in self.get_section('Yields').args.values():
arg.meta['indent'] = self.get_section('Yields').indent
elif isinstance(docstr, string_types):
if template_order:
self.sections = self.TEMPLATE.copy()
else:
self.sections = OrderedDict()
self._parse(docstr) | 3,052,008,837,252,113,000 | Parameters:
docstr (Docstring or str): some existing docstring
template_order (bool, optional): iff True, reorder the
sections to match the order they appear in the template | docstring_styles.py | __init__ | KristoforMaynard/SublimeAutoDocstring | python | def __init__(self, docstr, template_order=False):
'\n Parameters:\n docstr (Docstring or str): some existing docstring\n template_order (bool, optional): iff True, reorder the\n sections to match the order they appear in the template\n '
if isinstance(docstr, Docstring):
self.sections = docstr.sections
self.trailing_newlines = docstr.trailing_newlines
if (not isinstance(docstr, type(self))):
make_new_sec = self.SECTION_STYLE.from_section
for (sec_name, sec) in docstr.sections.items():
if sec:
docstr.sections[sec_name] = make_new_sec(sec)
else:
del docstr.sections[sec_name]
if ('Parameters' in docstr.sections):
self.get_section('Parameters').heading = self.PREFERRED_PARAMS_ALIAS
for arg in self.get_section('Parameters').args.values():
arg.meta['indent'] = self.get_section('Parameters').indent
if ('Returns' in docstr.sections):
for arg in self.get_section('Returns').args.values():
arg.meta['indent'] = self.get_section('Returns').indent
if ('Yields' in docstr.sections):
for arg in self.get_section('Yields').args.values():
arg.meta['indent'] = self.get_section('Yields').indent
elif isinstance(docstr, string_types):
if template_order:
self.sections = self.TEMPLATE.copy()
else:
self.sections = OrderedDict()
self._parse(docstr) |
def _parse(self, s):
'Parse docstring into meta data\n\n Parameters:\n s (str): docstring\n '
raise NotImplementedError('_parse is an abstract method') | -1,244,231,716,986,868,000 | Parse docstring into meta data
Parameters:
s (str): docstring | docstring_styles.py | _parse | KristoforMaynard/SublimeAutoDocstring | python | def _parse(self, s):
'Parse docstring into meta data\n\n Parameters:\n s (str): docstring\n '
raise NotImplementedError('_parse is an abstract method') |
def format(self, top_indent):
'Format docstring into a string\n\n Parameters:\n top_indent (str): indentation added to all but the first\n lines\n\n Returns:\n str: properly formatted\n '
raise NotImplementedError('format is an abstract method') | -4,791,040,278,329,210,000 | Format docstring into a string
Parameters:
top_indent (str): indentation added to all but the first
lines
Returns:
str: properly formatted | docstring_styles.py | format | KristoforMaynard/SublimeAutoDocstring | python | def format(self, top_indent):
'Format docstring into a string\n\n Parameters:\n top_indent (str): indentation added to all but the first\n lines\n\n Returns:\n str: properly formatted\n '
raise NotImplementedError('format is an abstract method') |
def update_attributes(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
raise NotImplementedError('update_attributes is an abstract method') | 8,362,530,383,108,397,000 | Args:
params (OrderedDict): params objects keyed by their names | docstring_styles.py | update_attributes | KristoforMaynard/SublimeAutoDocstring | python | def update_attributes(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
raise NotImplementedError('update_attributes is an abstract method') |
def update_exceptions(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
raise NotImplementedError('update_exceptions is an abstract method') | 4,752,826,982,414,909,000 | Args:
params (OrderedDict): params objects keyed by their names | docstring_styles.py | update_exceptions | KristoforMaynard/SublimeAutoDocstring | python | def update_exceptions(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
raise NotImplementedError('update_exceptions is an abstract method') |
def finalize_section(self, heading, text):
'\n Args:\n heading (type): Description\n text (type): Description\n '
section = self.SECTION_STYLE(heading, text)
self.sections[section.alias] = section | 7,268,294,569,221,583,000 | Args:
heading (type): Description
text (type): Description | docstring_styles.py | finalize_section | KristoforMaynard/SublimeAutoDocstring | python | def finalize_section(self, heading, text):
'\n Args:\n heading (type): Description\n text (type): Description\n '
section = self.SECTION_STYLE(heading, text)
self.sections[section.alias] = section |
def section_exists(self, section_name):
'returns True iff section exists, and was finalized'
sec = None
if (section_name in self.sections):
sec = self.sections[section_name]
elif (section_name in self.SECTION_STYLE.ALIASES):
alias = self.SECTION_STYLE.resolve_alias(section_name)
if (alias in self.sections):
sec = self.sections[alias]
if (sec is not None):
return True
return False | 508,379,813,229,340,800 | returns True iff section exists, and was finalized | docstring_styles.py | section_exists | KristoforMaynard/SublimeAutoDocstring | python | def section_exists(self, section_name):
sec = None
if (section_name in self.sections):
sec = self.sections[section_name]
elif (section_name in self.SECTION_STYLE.ALIASES):
alias = self.SECTION_STYLE.resolve_alias(section_name)
if (alias in self.sections):
sec = self.sections[alias]
if (sec is not None):
return True
return False |
def _parse(self, s):
'\n Args:\n s (type): Description\n '
logger.info('[NapoleonDocstring] starts parsing text')
self.trailing_newlines = count_trailing_newlines(s)
s = dedent_docstr(s)
sec_starts = [(m.start(), m.end(), m.string[m.start():m.end()]) for m in re.finditer(self.SECTION_RE, s, re.MULTILINE)]
sec_starts.insert(0, (0, 0, 'Summary'))
sec_starts.append((len(s), len(s), ''))
for (current_sec, next_sec) in zip(sec_starts[:(- 1)], sec_starts[1:]):
sec_name = self._extract_section_name(current_sec[2])
sec_body = s[current_sec[1]:next_sec[0]]
self.finalize_section(sec_name, sec_body) | 9,075,237,344,088,898,000 | Args:
s (type): Description | docstring_styles.py | _parse | KristoforMaynard/SublimeAutoDocstring | python | def _parse(self, s):
'\n Args:\n s (type): Description\n '
logger.info('[NapoleonDocstring] starts parsing text')
self.trailing_newlines = count_trailing_newlines(s)
s = dedent_docstr(s)
sec_starts = [(m.start(), m.end(), m.string[m.start():m.end()]) for m in re.finditer(self.SECTION_RE, s, re.MULTILINE)]
sec_starts.insert(0, (0, 0, 'Summary'))
sec_starts.append((len(s), len(s), ))
for (current_sec, next_sec) in zip(sec_starts[:(- 1)], sec_starts[1:]):
sec_name = self._extract_section_name(current_sec[2])
sec_body = s[current_sec[1]:next_sec[0]]
self.finalize_section(sec_name, sec_body) |
def format(self, top_indent):
'\n Args:\n top_indent (type): Description\n\n '
logger.info('[NapoleonDocstring] starts formatting')
s = ''
if self.section_exists('Summary'):
sec_text = self.get_section('Summary').text
if sec_text.strip():
s += with_bounding_newlines(sec_text, nleading=0, ntrailing=1)
for (_, section) in islice(self.sections.items(), 1, None):
if (section is None):
continue
sec_body = indent_docstr(section.text, section.section_indent, n=0)
sec_text = self._format_section_text(section.heading, sec_body)
s += with_bounding_newlines(sec_text, nleading=1, ntrailing=1)
if self.trailing_newlines:
s = with_bounding_newlines(s, ntrailing=self.trailing_newlines)
s = indent_docstr(s, top_indent)
return s | -4,111,122,369,448,023,600 | Args:
top_indent (type): Description | docstring_styles.py | format | KristoforMaynard/SublimeAutoDocstring | python | def format(self, top_indent):
'\n Args:\n top_indent (type): Description\n\n '
logger.info('[NapoleonDocstring] starts formatting')
s =
if self.section_exists('Summary'):
sec_text = self.get_section('Summary').text
if sec_text.strip():
s += with_bounding_newlines(sec_text, nleading=0, ntrailing=1)
for (_, section) in islice(self.sections.items(), 1, None):
if (section is None):
continue
sec_body = indent_docstr(section.text, section.section_indent, n=0)
sec_text = self._format_section_text(section.heading, sec_body)
s += with_bounding_newlines(sec_text, nleading=1, ntrailing=1)
if self.trailing_newlines:
s = with_bounding_newlines(s, ntrailing=self.trailing_newlines)
s = indent_docstr(s, top_indent)
return s |
def _update_section(self, params, sec_name, sec_alias=None, del_prefix='Deleted ', alpha_order=False, other_sections=()):
'Update section to add / remove params\n\n As a failsafe, params that are removed are placed in a\n "Deleted ..." section\n\n Args:\n params (OrderedDict): dict of Parameter objects\n sec_name (str): generic section name\n sec_alias (str): section name that appears in teh docstring\n del_prefix (str): prefix for section that holds params that\n no longer exist.\n alpha_order (bool): whether or not to alphabetically sort\n the params\n '
if (not sec_alias):
sec_alias = sec_name
if ((not self.section_exists(sec_name)) and (len(params) == 0)):
return None
elif (not self.section_exists(sec_name)):
self.finalize_section(sec_alias, '')
_other = []
for _secname in other_sections:
if self.section_exists(_secname):
_other.append(self.get_section(_secname))
other_sections = _other
if alpha_order:
sorted_params = OrderedDict()
for k in sorted(list(params.keys()), key=str.lower):
sorted_params[k] = params[k]
params = sorted_params
current_dict = self.get_section(sec_name).args
tags_seen = dict()
new = OrderedDict()
for (name, param) in params.items():
if (name in current_dict):
def_param = param
param = current_dict.pop(name)
if (param.tag in tags_seen):
param = None
else:
tags_seen[param.tag] = True
if def_param.annotated:
param.types = def_param.types
else:
for sec in other_sections:
if (name in sec.args):
if param.annotated:
sec.args[name].types = param.types
param = None
if param:
new[name] = param
for (key, param) in current_dict.items():
if param.descr_only:
new[key] = current_dict.pop(key)
if ('' in current_dict):
del current_dict['']
if len(current_dict):
del_sec_name = (del_prefix + sec_name)
del_sec_alias = (del_prefix + sec_alias)
logger.warn('killing parameters named: {}'.format(current_dict.keys()))
if (not self.section_exists(self.SECTION_STYLE.resolve_alias(del_sec_name))):
self.finalize_section(del_sec_name, '')
deled_params = self.get_section(del_sec_name)
deleted_tags = dict()
for (key, val) in current_dict.items():
if (key in deled_params.args):
logger.warn("Stronger Warning: Killing old deleted param: '{0}'".format(key))
val.names.remove(key)
if (val.tag in deleted_tags):
deleted_tags[val.tag].names.append(key)
else:
new_val = Parameter([key], val.types, val.description)
deleted_tags[val.tag] = new_val
deled_params.args[key] = new_val
if (len(new) == 0):
self.sections[sec_name] = None
else:
self.sections[sec_name].args = new | 8,557,606,910,817,434,000 | Update section to add / remove params
As a failsafe, params that are removed are placed in a
"Deleted ..." section
Args:
params (OrderedDict): dict of Parameter objects
sec_name (str): generic section name
sec_alias (str): section name that appears in teh docstring
del_prefix (str): prefix for section that holds params that
no longer exist.
alpha_order (bool): whether or not to alphabetically sort
the params | docstring_styles.py | _update_section | KristoforMaynard/SublimeAutoDocstring | python | def _update_section(self, params, sec_name, sec_alias=None, del_prefix='Deleted ', alpha_order=False, other_sections=()):
'Update section to add / remove params\n\n As a failsafe, params that are removed are placed in a\n "Deleted ..." section\n\n Args:\n params (OrderedDict): dict of Parameter objects\n sec_name (str): generic section name\n sec_alias (str): section name that appears in teh docstring\n del_prefix (str): prefix for section that holds params that\n no longer exist.\n alpha_order (bool): whether or not to alphabetically sort\n the params\n '
if (not sec_alias):
sec_alias = sec_name
if ((not self.section_exists(sec_name)) and (len(params) == 0)):
return None
elif (not self.section_exists(sec_name)):
self.finalize_section(sec_alias, )
_other = []
for _secname in other_sections:
if self.section_exists(_secname):
_other.append(self.get_section(_secname))
other_sections = _other
if alpha_order:
sorted_params = OrderedDict()
for k in sorted(list(params.keys()), key=str.lower):
sorted_params[k] = params[k]
params = sorted_params
current_dict = self.get_section(sec_name).args
tags_seen = dict()
new = OrderedDict()
for (name, param) in params.items():
if (name in current_dict):
def_param = param
param = current_dict.pop(name)
if (param.tag in tags_seen):
param = None
else:
tags_seen[param.tag] = True
if def_param.annotated:
param.types = def_param.types
else:
for sec in other_sections:
if (name in sec.args):
if param.annotated:
sec.args[name].types = param.types
param = None
if param:
new[name] = param
for (key, param) in current_dict.items():
if param.descr_only:
new[key] = current_dict.pop(key)
if ( in current_dict):
del current_dict[]
if len(current_dict):
del_sec_name = (del_prefix + sec_name)
del_sec_alias = (del_prefix + sec_alias)
logger.warn('killing parameters named: {}'.format(current_dict.keys()))
if (not self.section_exists(self.SECTION_STYLE.resolve_alias(del_sec_name))):
self.finalize_section(del_sec_name, )
deled_params = self.get_section(del_sec_name)
deleted_tags = dict()
for (key, val) in current_dict.items():
if (key in deled_params.args):
logger.warn("Stronger Warning: Killing old deleted param: '{0}'".format(key))
val.names.remove(key)
if (val.tag in deleted_tags):
deleted_tags[val.tag].names.append(key)
else:
new_val = Parameter([key], val.types, val.description)
deleted_tags[val.tag] = new_val
deled_params.args[key] = new_val
if (len(new) == 0):
self.sections[sec_name] = None
else:
self.sections[sec_name].args = new |
def update_parameters(self, params):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update parameters')
other_sections = ['Other Parameters', 'Keyword Parameters']
self._update_section(params, 'Parameters', self.PREFERRED_PARAMS_ALIAS, other_sections=other_sections) | -3,149,674,930,633,215,500 | Args:
params (OrderedDict): params objects keyed by their names | docstring_styles.py | update_parameters | KristoforMaynard/SublimeAutoDocstring | python | def update_parameters(self, params):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update parameters')
other_sections = ['Other Parameters', 'Keyword Parameters']
self._update_section(params, 'Parameters', self.PREFERRED_PARAMS_ALIAS, other_sections=other_sections) |
def update_attributes(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update attributes')
self._update_section(attribs, 'Attributes', alpha_order=alpha_order) | 192,604,979,378,733,470 | Args:
params (OrderedDict): params objects keyed by their names | docstring_styles.py | update_attributes | KristoforMaynard/SublimeAutoDocstring | python | def update_attributes(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update attributes')
self._update_section(attribs, 'Attributes', alpha_order=alpha_order) |
def update_exceptions(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update exceptions')
self._update_section(attribs, 'Raises', del_prefix='No Longer ', alpha_order=alpha_order) | -4,194,534,135,168,882,000 | Args:
params (OrderedDict): params objects keyed by their names | docstring_styles.py | update_exceptions | KristoforMaynard/SublimeAutoDocstring | python | def update_exceptions(self, attribs, alpha_order=True):
'\n Args:\n params (OrderedDict): params objects keyed by their names\n '
logger.info('[NapoleonDocstring] update exceptions')
self._update_section(attribs, 'Raises', del_prefix='No Longer ', alpha_order=alpha_order) |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.