hexsha
stringlengths 40
40
| size
int64 1
1.03M
| ext
stringclasses 10
values | lang
stringclasses 1
value | max_stars_repo_path
stringlengths 3
239
| max_stars_repo_name
stringlengths 5
130
| max_stars_repo_head_hexsha
stringlengths 40
78
| max_stars_repo_licenses
sequencelengths 1
10
| max_stars_count
int64 1
191k
⌀ | max_stars_repo_stars_event_min_datetime
stringlengths 24
24
⌀ | max_stars_repo_stars_event_max_datetime
stringlengths 24
24
⌀ | max_issues_repo_path
stringlengths 3
239
| max_issues_repo_name
stringlengths 5
130
| max_issues_repo_head_hexsha
stringlengths 40
78
| max_issues_repo_licenses
sequencelengths 1
10
| max_issues_count
int64 1
67k
⌀ | max_issues_repo_issues_event_min_datetime
stringlengths 24
24
⌀ | max_issues_repo_issues_event_max_datetime
stringlengths 24
24
⌀ | max_forks_repo_path
stringlengths 3
239
| max_forks_repo_name
stringlengths 5
130
| max_forks_repo_head_hexsha
stringlengths 40
78
| max_forks_repo_licenses
sequencelengths 1
10
| max_forks_count
int64 1
105k
⌀ | max_forks_repo_forks_event_min_datetime
stringlengths 24
24
⌀ | max_forks_repo_forks_event_max_datetime
stringlengths 24
24
⌀ | content
stringlengths 1
1.03M
| avg_line_length
float64 1
958k
| max_line_length
int64 1
1.03M
| alphanum_fraction
float64 0
1
|
---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|---|
4a1ebec65b56e62e91bc61cc5cc4368c35e03e3c | 633 | py | Python | setup.py | Arkh42/GreatPublicationPlanner | d8745c2c1586fda82a4c2d852ee64c3c3417982d | [
"MIT"
] | 1 | 2020-01-22T07:41:25.000Z | 2020-01-22T07:41:25.000Z | setup.py | Arkh42/GreatPublicationPlanner | d8745c2c1586fda82a4c2d852ee64c3c3417982d | [
"MIT"
] | 9 | 2020-01-15T16:22:32.000Z | 2020-02-17T14:15:21.000Z | setup.py | Arkh42/GreatPublicationPlanner | d8745c2c1586fda82a4c2d852ee64c3c3417982d | [
"MIT"
] | null | null | null |
import setuptools
with open("README.md", "r") as fh:
long_description = fh.read()
setuptools.setup(
name="GreatPublicationPlanner",
version="0.1.8",
author="Alexandre Quenon",
author_email="[email protected]",
description="A package to display target publications and allow to choose wisely",
long_description=long_description,
long_description_content_type="text/markdown",
url="https://github.com/Arkh42/GreatPublicationPlanner",
packages=setuptools.find_packages(),
classifiers=[
"Programming Language :: Python :: 3",
"Operating System :: OS Independent",
],
)
| 28.772727 | 86 | 0.699842 |
4a1ebf08dcf1ec58842efa384d9e895b769490b0 | 265 | py | Python | Lib/test/test_compiler/sbs_code_tests/94_strformat.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 1,886 | 2021-05-03T23:58:43.000Z | 2022-03-31T19:15:58.000Z | Lib/test/test_compiler/sbs_code_tests/94_strformat.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 70 | 2021-05-04T23:25:35.000Z | 2022-03-31T18:42:08.000Z | Lib/test/test_compiler/sbs_code_tests/94_strformat.py | diogommartins/cinder | 79103e9119cbecef3b085ccf2878f00c26e1d175 | [
"CNRI-Python-GPL-Compatible"
] | 52 | 2021-05-04T21:26:03.000Z | 2022-03-08T18:02:56.000Z | def f(name, args):
return f"foo.{name}({', '.join(args)})"
# EXPECTED:
[
...,
CODE_START('f'),
LOAD_CONST('foo.'),
...,
FORMAT_VALUE(0),
LOAD_CONST('('),
...,
FORMAT_VALUE(0),
LOAD_CONST(')'),
BUILD_STRING(5),
...,
]
| 15.588235 | 43 | 0.471698 |
4a1ebf66c03ec7336dee4697aac71bf88215c3cf | 4,290 | py | Python | sdk/python/feast/core/JobService_pb2_grpc.py | davidheryanto/feast | 72cc8bd2cd0040f7bc44df255f95bad00cacd720 | [
"Apache-2.0"
] | null | null | null | sdk/python/feast/core/JobService_pb2_grpc.py | davidheryanto/feast | 72cc8bd2cd0040f7bc44df255f95bad00cacd720 | [
"Apache-2.0"
] | null | null | null | sdk/python/feast/core/JobService_pb2_grpc.py | davidheryanto/feast | 72cc8bd2cd0040f7bc44df255f95bad00cacd720 | [
"Apache-2.0"
] | 2 | 2020-05-20T22:07:11.000Z | 2021-07-25T17:28:24.000Z | # Generated by the gRPC Python protocol compiler plugin. DO NOT EDIT!
import grpc
from feast.core import JobService_pb2 as feast_dot_core_dot_JobService__pb2
from google.protobuf import empty_pb2 as google_dot_protobuf_dot_empty__pb2
class JobServiceStub(object):
# missing associated documentation comment in .proto file
pass
def __init__(self, channel):
"""Constructor.
Args:
channel: A grpc.Channel.
"""
self.SubmitJob = channel.unary_unary(
'/feast.core.JobService/SubmitJob',
request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobRequest.SerializeToString,
response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobResponse.FromString,
)
self.ListJobs = channel.unary_unary(
'/feast.core.JobService/ListJobs',
request_serializer=google_dot_protobuf_dot_empty__pb2.Empty.SerializeToString,
response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.ListJobsResponse.FromString,
)
self.GetJob = channel.unary_unary(
'/feast.core.JobService/GetJob',
request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobRequest.SerializeToString,
response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobResponse.FromString,
)
self.AbortJob = channel.unary_unary(
'/feast.core.JobService/AbortJob',
request_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobRequest.SerializeToString,
response_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobResponse.FromString,
)
class JobServiceServicer(object):
# missing associated documentation comment in .proto file
pass
def SubmitJob(self, request, context):
"""Submit a job to feast to run. Returns the job id.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def ListJobs(self, request, context):
"""List all jobs submitted to feast.
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def GetJob(self, request, context):
"""Get Job with ID
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def AbortJob(self, request, context):
"""Abort job with given ID
"""
context.set_code(grpc.StatusCode.UNIMPLEMENTED)
context.set_details('Method not implemented!')
raise NotImplementedError('Method not implemented!')
def add_JobServiceServicer_to_server(servicer, server):
rpc_method_handlers = {
'SubmitJob': grpc.unary_unary_rpc_method_handler(
servicer.SubmitJob,
request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobRequest.FromString,
response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.SubmitImportJobResponse.SerializeToString,
),
'ListJobs': grpc.unary_unary_rpc_method_handler(
servicer.ListJobs,
request_deserializer=google_dot_protobuf_dot_empty__pb2.Empty.FromString,
response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.ListJobsResponse.SerializeToString,
),
'GetJob': grpc.unary_unary_rpc_method_handler(
servicer.GetJob,
request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobRequest.FromString,
response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.GetJobResponse.SerializeToString,
),
'AbortJob': grpc.unary_unary_rpc_method_handler(
servicer.AbortJob,
request_deserializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobRequest.FromString,
response_serializer=feast_dot_core_dot_JobService__pb2.JobServiceTypes.AbortJobResponse.SerializeToString,
),
}
generic_handler = grpc.method_handlers_generic_handler(
'feast.core.JobService', rpc_method_handlers)
server.add_generic_rpc_handlers((generic_handler,))
| 43.333333 | 123 | 0.771562 |
4a1ec03a059a0230a3bb045a4d1a4b0092c2b5ca | 33,730 | py | Python | git_cache.py | superisaac/depot_tools | ea1884b651e69975e897b15cf2063f36317538a4 | [
"BSD-3-Clause"
] | null | null | null | git_cache.py | superisaac/depot_tools | ea1884b651e69975e897b15cf2063f36317538a4 | [
"BSD-3-Clause"
] | null | null | null | git_cache.py | superisaac/depot_tools | ea1884b651e69975e897b15cf2063f36317538a4 | [
"BSD-3-Clause"
] | null | null | null | #!/usr/bin/env python
# Copyright 2014 The Chromium Authors. All rights reserved.
# Use of this source code is governed by a BSD-style license that can be
# found in the LICENSE file.
"""A git command for managing a local cache of git repositories."""
from __future__ import print_function
import contextlib
import errno
import logging
import optparse
import os
import re
import subprocess
import sys
import tempfile
import threading
import time
try:
import urlparse
except ImportError: # For Py3 compatibility
import urllib.parse as urlparse
from download_from_google_storage import Gsutil
import gclient_utils
import subcommand
# Analogous to gc.autopacklimit git config.
GC_AUTOPACKLIMIT = 50
GIT_CACHE_CORRUPT_MESSAGE = 'WARNING: The Git cache is corrupt.'
try:
# pylint: disable=undefined-variable
WinErr = WindowsError
except NameError:
class WinErr(Exception):
pass
class LockError(Exception):
pass
class ClobberNeeded(Exception):
pass
def exponential_backoff_retry(fn, excs=(Exception,), name=None, count=10,
sleep_time=0.25, printerr=None):
"""Executes |fn| up to |count| times, backing off exponentially.
Args:
fn (callable): The function to execute. If this raises a handled
exception, the function will retry with exponential backoff.
excs (tuple): A tuple of Exception types to handle. If one of these is
raised by |fn|, a retry will be attempted. If |fn| raises an Exception
that is not in this list, it will immediately pass through. If |excs|
is empty, the Exception base class will be used.
name (str): Optional operation name to print in the retry string.
count (int): The number of times to try before allowing the exception to
pass through.
sleep_time (float): The initial number of seconds to sleep in between
retries. This will be doubled each retry.
printerr (callable): Function that will be called with the error string upon
failures. If None, |logging.warning| will be used.
Returns: The return value of the successful fn.
"""
printerr = printerr or logging.warning
for i in range(count):
try:
return fn()
except excs as e:
if (i+1) >= count:
raise
printerr('Retrying %s in %.2f second(s) (%d / %d attempts): %s' % (
(name or 'operation'), sleep_time, (i+1), count, e))
time.sleep(sleep_time)
sleep_time *= 2
class Lockfile(object):
"""Class to represent a cross-platform process-specific lockfile."""
def __init__(self, path, timeout=0):
self.path = os.path.abspath(path)
self.timeout = timeout
self.lockfile = self.path + ".lock"
self.pid = os.getpid()
def _read_pid(self):
"""Read the pid stored in the lockfile.
Note: This method is potentially racy. By the time it returns the lockfile
may have been unlocked, removed, or stolen by some other process.
"""
try:
with open(self.lockfile, 'r') as f:
pid = int(f.readline().strip())
except (IOError, ValueError):
pid = None
return pid
def _make_lockfile(self):
"""Safely creates a lockfile containing the current pid."""
open_flags = (os.O_CREAT | os.O_EXCL | os.O_WRONLY)
fd = os.open(self.lockfile, open_flags, 0o644)
f = os.fdopen(fd, 'w')
print(self.pid, file=f)
f.close()
def _remove_lockfile(self):
"""Delete the lockfile. Complains (implicitly) if it doesn't exist.
See gclient_utils.py:rmtree docstring for more explanation on the
windows case.
"""
if sys.platform == 'win32':
lockfile = os.path.normcase(self.lockfile)
def delete():
exitcode = subprocess.call(['cmd.exe', '/c',
'del', '/f', '/q', lockfile])
if exitcode != 0:
raise LockError('Failed to remove lock: %s' % (lockfile,))
exponential_backoff_retry(
delete,
excs=(LockError,),
name='del [%s]' % (lockfile,))
else:
os.remove(self.lockfile)
def lock(self):
"""Acquire the lock.
This will block with a deadline of self.timeout seconds.
"""
elapsed = 0
while True:
try:
self._make_lockfile()
return
except OSError as e:
if elapsed < self.timeout:
sleep_time = max(10, min(3, self.timeout - elapsed))
logging.info('Could not create git cache lockfile; '
'will retry after sleep(%d).', sleep_time);
elapsed += sleep_time
time.sleep(sleep_time)
continue
if e.errno == errno.EEXIST:
raise LockError("%s is already locked" % self.path)
else:
raise LockError("Failed to create %s (err %s)" % (self.path, e.errno))
def unlock(self):
"""Release the lock."""
try:
if not self.is_locked():
raise LockError("%s is not locked" % self.path)
if not self.i_am_locking():
raise LockError("%s is locked, but not by me" % self.path)
self._remove_lockfile()
except WinErr:
# Windows is unreliable when it comes to file locking. YMMV.
pass
def break_lock(self):
"""Remove the lock, even if it was created by someone else."""
try:
self._remove_lockfile()
return True
except OSError as exc:
if exc.errno == errno.ENOENT:
return False
else:
raise
def is_locked(self):
"""Test if the file is locked by anyone.
Note: This method is potentially racy. By the time it returns the lockfile
may have been unlocked, removed, or stolen by some other process.
"""
return os.path.exists(self.lockfile)
def i_am_locking(self):
"""Test if the file is locked by this process."""
return self.is_locked() and self.pid == self._read_pid()
class Mirror(object):
git_exe = 'git.bat' if sys.platform.startswith('win') else 'git'
gsutil_exe = os.path.join(
os.path.dirname(os.path.abspath(__file__)), 'gsutil.py')
cachepath_lock = threading.Lock()
UNSET_CACHEPATH = object()
# Used for tests
_GIT_CONFIG_LOCATION = []
@staticmethod
def parse_fetch_spec(spec):
"""Parses and canonicalizes a fetch spec.
Returns (fetchspec, value_regex), where value_regex can be used
with 'git config --replace-all'.
"""
parts = spec.split(':', 1)
src = parts[0].lstrip('+').rstrip('/')
if not src.startswith('refs/'):
src = 'refs/heads/%s' % src
dest = parts[1].rstrip('/') if len(parts) > 1 else src
regex = r'\+%s:.*' % src.replace('*', r'\*')
return ('+%s:%s' % (src, dest), regex)
def __init__(self, url, refs=None, print_func=None):
self.url = url
self.fetch_specs = set([self.parse_fetch_spec(ref) for ref in (refs or [])])
self.basedir = self.UrlToCacheDir(url)
self.mirror_path = os.path.join(self.GetCachePath(), self.basedir)
if print_func:
self.print = self.print_without_file
self.print_func = print_func
else:
self.print = print
def print_without_file(self, message, **_kwargs):
self.print_func(message)
@contextlib.contextmanager
def print_duration_of(self, what):
start = time.time()
try:
yield
finally:
self.print('%s took %.1f minutes' % (what, (time.time() - start) / 60.0))
@property
def bootstrap_bucket(self):
b = os.getenv('OVERRIDE_BOOTSTRAP_BUCKET')
if b:
return b
u = urlparse.urlparse(self.url)
if u.netloc == 'chromium.googlesource.com':
return 'chromium-git-cache'
# TODO(tandrii): delete once LUCI migration is completed.
# Only public hosts will be supported going forward.
elif u.netloc == 'chrome-internal.googlesource.com':
return 'chrome-git-cache'
# Not recognized.
return None
@property
def _gs_path(self):
return 'gs://%s/v2/%s' % (self.bootstrap_bucket, self.basedir)
@classmethod
def FromPath(cls, path):
return cls(cls.CacheDirToUrl(path))
@staticmethod
def UrlToCacheDir(url):
"""Convert a git url to a normalized form for the cache dir path."""
parsed = urlparse.urlparse(url)
# Get rid of the port. This is only needed for Windows tests, since tests
# serve git from git://localhost:port/git, but Windows doesn't like ':' in
# paths.
netloc = parsed.netloc
if ':' in netloc:
netloc = netloc.split(':', 1)[0]
norm_url = netloc + parsed.path
if norm_url.endswith('.git'):
norm_url = norm_url[:-len('.git')]
# Use the same dir for authenticated URLs and unauthenticated URLs.
norm_url = norm_url.replace('googlesource.com/a/', 'googlesource.com/')
return norm_url.replace('-', '--').replace('/', '-').lower()
@staticmethod
def CacheDirToUrl(path):
"""Convert a cache dir path to its corresponding url."""
netpath = re.sub(r'\b-\b', '/', os.path.basename(path)).replace('--', '-')
return 'https://%s' % netpath
@classmethod
def SetCachePath(cls, cachepath):
with cls.cachepath_lock:
setattr(cls, 'cachepath', cachepath)
@classmethod
def GetCachePath(cls):
with cls.cachepath_lock:
if not hasattr(cls, 'cachepath'):
try:
cachepath = subprocess.check_output(
[cls.git_exe, 'config'] +
cls._GIT_CONFIG_LOCATION +
['cache.cachepath']).strip()
except subprocess.CalledProcessError:
cachepath = os.environ.get('GIT_CACHE_PATH', cls.UNSET_CACHEPATH)
setattr(cls, 'cachepath', cachepath)
ret = getattr(cls, 'cachepath')
if ret is cls.UNSET_CACHEPATH:
raise RuntimeError('No cache.cachepath git configuration or '
'$GIT_CACHE_PATH is set.')
return ret
@staticmethod
def _GetMostRecentCacheDirectory(ls_out_set):
ready_file_pattern = re.compile(r'.*/(\d+).ready$')
ready_dirs = []
for name in ls_out_set:
m = ready_file_pattern.match(name)
# Given <path>/<number>.ready,
# we are interested in <path>/<number> directory
if m and (name[:-len('.ready')] + '/') in ls_out_set:
ready_dirs.append((int(m.group(1)), name[:-len('.ready')]))
if not ready_dirs:
return None
return max(ready_dirs)[1]
def Rename(self, src, dst):
# This is somehow racy on Windows.
# Catching OSError because WindowsError isn't portable and
# pylint complains.
exponential_backoff_retry(
lambda: os.rename(src, dst),
excs=(OSError,),
name='rename [%s] => [%s]' % (src, dst),
printerr=self.print)
def RunGit(self, cmd, **kwargs):
"""Run git in a subprocess."""
cwd = kwargs.setdefault('cwd', self.mirror_path)
kwargs.setdefault('print_stdout', False)
kwargs.setdefault('filter_fn', self.print)
env = kwargs.get('env') or kwargs.setdefault('env', os.environ.copy())
env.setdefault('GIT_ASKPASS', 'true')
env.setdefault('SSH_ASKPASS', 'true')
self.print('running "git %s" in "%s"' % (' '.join(cmd), cwd))
gclient_utils.CheckCallAndFilter([self.git_exe] + cmd, **kwargs)
def config(self, cwd=None, reset_fetch_config=False):
if cwd is None:
cwd = self.mirror_path
# Print diagnostics and ignore errors.
try:
self.print('git exe: %s' % (self.git_exe,))
self.RunGit(['version'], cwd=cwd)
self.RunGit(['config', 'protocol.version'], cwd=cwd)
except subprocess.CalledProcessError as e:
pass
if reset_fetch_config:
try:
self.RunGit(['config', '--unset-all', 'remote.origin.fetch'], cwd=cwd)
except subprocess.CalledProcessError as e:
# If exit code was 5, it means we attempted to unset a config that
# didn't exist. Ignore it.
if e.returncode != 5:
raise
# Don't run git-gc in a daemon. Bad things can happen if it gets killed.
try:
self.RunGit(['config', 'gc.autodetach', '0'], cwd=cwd)
except subprocess.CalledProcessError:
# Hard error, need to clobber.
raise ClobberNeeded()
# Don't combine pack files into one big pack file. It's really slow for
# repositories, and there's no way to track progress and make sure it's
# not stuck.
if self.supported_project():
self.RunGit(['config', 'gc.autopacklimit', '0'], cwd=cwd)
# Allocate more RAM for cache-ing delta chains, for better performance
# of "Resolving deltas".
self.RunGit(['config', 'core.deltaBaseCacheLimit',
gclient_utils.DefaultDeltaBaseCacheLimit()], cwd=cwd)
self.RunGit(['config', 'remote.origin.url', self.url], cwd=cwd)
self.RunGit(['config', '--replace-all', 'remote.origin.fetch',
'+refs/heads/*:refs/heads/*', r'\+refs/heads/\*:.*'], cwd=cwd)
for spec, value_regex in self.fetch_specs:
self.RunGit(
['config', '--replace-all', 'remote.origin.fetch', spec, value_regex],
cwd=cwd)
def bootstrap_repo(self, directory):
"""Bootstrap the repo from Google Storage if possible.
More apt-ly named bootstrap_repo_from_cloud_if_possible_else_do_nothing().
"""
if not self.bootstrap_bucket:
return False
gsutil = Gsutil(self.gsutil_exe, boto_path=None)
# Get the most recent version of the directory.
# This is determined from the most recent version of a .ready file.
# The .ready file is only uploaded when an entire directory has been
# uploaded to GS.
_, ls_out, ls_err = gsutil.check_call('ls', self._gs_path)
ls_out_set = set(ls_out.strip().splitlines())
latest_dir = self._GetMostRecentCacheDirectory(ls_out_set)
if not latest_dir:
self.print('No bootstrap file for %s found in %s, stderr:\n %s' %
(self.mirror_path, self.bootstrap_bucket,
' '.join((ls_err or '').splitlines(True))))
return False
try:
# create new temporary directory locally
tempdir = tempfile.mkdtemp(prefix='_cache_tmp', dir=self.GetCachePath())
self.RunGit(['init', '--bare'], cwd=tempdir)
self.print('Downloading files in %s/* into %s.' %
(latest_dir, tempdir))
with self.print_duration_of('download'):
code = gsutil.call('-m', 'cp', '-r', latest_dir + "/*",
tempdir)
if code:
return False
except Exception as e:
self.print('Encountered error: %s' % str(e), file=sys.stderr)
gclient_utils.rmtree(tempdir)
return False
# delete the old directory
if os.path.exists(directory):
gclient_utils.rmtree(directory)
self.Rename(tempdir, directory)
return True
def contains_revision(self, revision):
if not self.exists():
return False
if sys.platform.startswith('win'):
# Windows .bat scripts use ^ as escape sequence, which means we have to
# escape it with itself for every .bat invocation.
needle = '%s^^^^{commit}' % revision
else:
needle = '%s^{commit}' % revision
try:
# cat-file exits with 0 on success, that is git object of given hash was
# found.
self.RunGit(['cat-file', '-e', needle])
return True
except subprocess.CalledProcessError:
return False
def exists(self):
return os.path.isfile(os.path.join(self.mirror_path, 'config'))
def supported_project(self):
"""Returns true if this repo is known to have a bootstrap zip file."""
u = urlparse.urlparse(self.url)
return u.netloc in [
'chromium.googlesource.com',
'chrome-internal.googlesource.com']
def _preserve_fetchspec(self):
"""Read and preserve remote.origin.fetch from an existing mirror.
This modifies self.fetch_specs.
"""
if not self.exists():
return
try:
config_fetchspecs = subprocess.check_output(
[self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
cwd=self.mirror_path)
for fetchspec in config_fetchspecs.splitlines():
self.fetch_specs.add(self.parse_fetch_spec(fetchspec))
except subprocess.CalledProcessError:
logging.warn('Tried and failed to preserve remote.origin.fetch from the '
'existing cache directory. You may need to manually edit '
'%s and "git cache fetch" again.'
% os.path.join(self.mirror_path, 'config'))
def _ensure_bootstrapped(self, depth, bootstrap, force=False):
pack_dir = os.path.join(self.mirror_path, 'objects', 'pack')
pack_files = []
if os.path.isdir(pack_dir):
pack_files = [f for f in os.listdir(pack_dir) if f.endswith('.pack')]
self.print('%s has %d .pack files, re-bootstrapping if >%d' %
(self.mirror_path, len(pack_files), GC_AUTOPACKLIMIT))
should_bootstrap = (force or
not self.exists() or
len(pack_files) > GC_AUTOPACKLIMIT)
if not should_bootstrap:
if depth and os.path.exists(os.path.join(self.mirror_path, 'shallow')):
logging.warn(
'Shallow fetch requested, but repo cache already exists.')
return
if self.exists():
# Re-bootstrapping an existing mirror; preserve existing fetch spec.
self._preserve_fetchspec()
else:
if os.path.exists(self.mirror_path):
# If the mirror path exists but self.exists() returns false, we're
# in an unexpected state. Nuke the previous mirror directory and
# start fresh.
gclient_utils.rmtree(self.mirror_path)
os.mkdir(self.mirror_path)
bootstrapped = (not depth and bootstrap and
self.bootstrap_repo(self.mirror_path))
if not bootstrapped:
if not self.exists() or not self.supported_project():
# Bootstrap failed due to:
# 1. No previous cache.
# 2. Project doesn't have a bootstrap folder.
# Start with a bare git dir.
self.RunGit(['init', '--bare'], cwd=self.mirror_path)
else:
# Bootstrap failed, previous cache exists; warn and continue.
logging.warn(
'Git cache has a lot of pack files (%d). Tried to re-bootstrap '
'but failed. Continuing with non-optimized repository.'
% len(pack_files))
def _fetch(self, rundir, verbose, depth, no_fetch_tags, reset_fetch_config):
self.config(rundir, reset_fetch_config)
v = []
d = []
t = []
if verbose:
v = ['-v', '--progress']
if depth:
d = ['--depth', str(depth)]
if no_fetch_tags:
t = ['--no-tags']
fetch_cmd = ['fetch'] + v + d + t + ['origin']
fetch_specs = subprocess.check_output(
[self.git_exe, 'config', '--get-all', 'remote.origin.fetch'],
cwd=rundir).strip().splitlines()
for spec in fetch_specs:
spec = spec.decode()
try:
self.print('Fetching %s' % spec)
env = os.environ.copy()
env.update({
'GIT_TRACE_PACKET': '1',
'GIT_TR2_EVENT': '1',
'GIT_TRACE2_EVENT': '1',
'GIT_TRACE_CURL': '1',
'GIT_TRACE_CURL_NO_DATA': '1'
})
# Only print first 30000 packets. We can use nonlocal keyword once we
# switch to python 3.
packet_count = [0]
def FilterPacket(log_line):
if 'packet:' in log_line:
packet_count[0] += 1
if packet_count[0] == 30000:
self.print('Truncating remaining packets')
if packet_count[0] >= 30000:
return
self.print(log_line)
with self.print_duration_of('fetch %s' % spec):
self.RunGit(
fetch_cmd + [spec],
cwd=rundir,
retry=True,
env=env,
filter_fn=FilterPacket)
except subprocess.CalledProcessError:
if spec == '+refs/heads/*:refs/heads/*':
raise ClobberNeeded() # Corrupted cache.
logging.warn('Fetch of %s failed' % spec)
def populate(self,
depth=None,
no_fetch_tags=False,
shallow=False,
bootstrap=False,
verbose=False,
ignore_lock=False,
lock_timeout=0,
reset_fetch_config=False):
assert self.GetCachePath()
if shallow and not depth:
depth = 10000
gclient_utils.safe_makedirs(self.GetCachePath())
lockfile = Lockfile(self.mirror_path, lock_timeout)
if not ignore_lock:
lockfile.lock()
try:
self._ensure_bootstrapped(depth, bootstrap)
self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
reset_fetch_config)
except ClobberNeeded:
# This is a major failure, we need to clean and force a bootstrap.
gclient_utils.rmtree(self.mirror_path)
self.print(GIT_CACHE_CORRUPT_MESSAGE)
self._ensure_bootstrapped(depth, bootstrap, force=True)
self._fetch(self.mirror_path, verbose, depth, no_fetch_tags,
reset_fetch_config)
finally:
if not ignore_lock:
lockfile.unlock()
def update_bootstrap(self, prune=False, gc_aggressive=False):
# The folder is <git number>
gen_number = subprocess.check_output(
[self.git_exe, 'number', 'master'], cwd=self.mirror_path).strip()
gsutil = Gsutil(path=self.gsutil_exe, boto_path=None)
src_name = self.mirror_path
dest_prefix = '%s/%s' % (self._gs_path, gen_number)
# ls_out lists contents in the format: gs://blah/blah/123...
_, ls_out, _ = gsutil.check_call('ls', self._gs_path)
# Check to see if folder already exists in gs
ls_out_set = set(ls_out.strip().splitlines())
if (dest_prefix + '/' in ls_out_set and
dest_prefix + '.ready' in ls_out_set):
print('Cache %s already exists.' % dest_prefix)
return
# Run Garbage Collect to compress packfile.
gc_args = ['gc', '--prune=all']
if gc_aggressive:
gc_args.append('--aggressive')
self.RunGit(gc_args)
gsutil.call('-m', 'cp', '-r', src_name, dest_prefix)
# Create .ready file and upload
_, ready_file_name = tempfile.mkstemp(suffix='.ready')
try:
gsutil.call('cp', ready_file_name, '%s.ready' % (dest_prefix))
finally:
os.remove(ready_file_name)
# remove all other directory/.ready files in the same gs_path
# except for the directory/.ready file previously created
# which can be used for bootstrapping while the current one is
# being uploaded
if not prune:
return
prev_dest_prefix = self._GetMostRecentCacheDirectory(ls_out_set)
if not prev_dest_prefix:
return
for path in ls_out_set:
if (path == prev_dest_prefix + '/' or
path == prev_dest_prefix + '.ready'):
continue
if path.endswith('.ready'):
gsutil.call('rm', path)
continue
gsutil.call('-m', 'rm', '-r', path)
@staticmethod
def DeleteTmpPackFiles(path):
pack_dir = os.path.join(path, 'objects', 'pack')
if not os.path.isdir(pack_dir):
return
pack_files = [f for f in os.listdir(pack_dir) if
f.startswith('.tmp-') or f.startswith('tmp_pack_')]
for f in pack_files:
f = os.path.join(pack_dir, f)
try:
os.remove(f)
logging.warn('Deleted stale temporary pack file %s' % f)
except OSError:
logging.warn('Unable to delete temporary pack file %s' % f)
@classmethod
def BreakLocks(cls, path):
did_unlock = False
lf = Lockfile(path)
if lf.break_lock():
did_unlock = True
# Look for lock files that might have been left behind by an interrupted
# git process.
lf = os.path.join(path, 'config.lock')
if os.path.exists(lf):
os.remove(lf)
did_unlock = True
cls.DeleteTmpPackFiles(path)
return did_unlock
def unlock(self):
return self.BreakLocks(self.mirror_path)
@classmethod
def UnlockAll(cls):
cachepath = cls.GetCachePath()
if not cachepath:
return
dirlist = os.listdir(cachepath)
repo_dirs = set([os.path.join(cachepath, path) for path in dirlist
if os.path.isdir(os.path.join(cachepath, path))])
for dirent in dirlist:
if dirent.startswith('_cache_tmp') or dirent.startswith('tmp'):
gclient_utils.rm_file_or_tree(os.path.join(cachepath, dirent))
elif (dirent.endswith('.lock') and
os.path.isfile(os.path.join(cachepath, dirent))):
repo_dirs.add(os.path.join(cachepath, dirent[:-5]))
unlocked_repos = []
for repo_dir in repo_dirs:
if cls.BreakLocks(repo_dir):
unlocked_repos.append(repo_dir)
return unlocked_repos
@subcommand.usage('[url of repo to check for caching]')
def CMDexists(parser, args):
"""Check to see if there already is a cache of the given repo."""
_, args = parser.parse_args(args)
if not len(args) == 1:
parser.error('git cache exists only takes exactly one repo url.')
url = args[0]
mirror = Mirror(url)
if mirror.exists():
print(mirror.mirror_path)
return 0
return 1
@subcommand.usage('[url of repo to create a bootstrap zip file]')
def CMDupdate_bootstrap(parser, args):
"""Create and uploads a bootstrap tarball."""
# Lets just assert we can't do this on Windows.
if sys.platform.startswith('win'):
print('Sorry, update bootstrap will not work on Windows.', file=sys.stderr)
return 1
parser.add_option('--skip-populate', action='store_true',
help='Skips "populate" step if mirror already exists.')
parser.add_option('--gc-aggressive', action='store_true',
help='Run aggressive repacking of the repo.')
parser.add_option('--prune', action='store_true',
help='Prune all other cached bundles of the same repo.')
populate_args = args[:]
options, args = parser.parse_args(args)
url = args[0]
mirror = Mirror(url)
if not options.skip_populate or not mirror.exists():
CMDpopulate(parser, populate_args)
else:
print('Skipped populate step.')
# Get the repo directory.
options, args = parser.parse_args(args)
url = args[0]
mirror = Mirror(url)
mirror.update_bootstrap(options.prune, options.gc_aggressive)
return 0
@subcommand.usage('[url of repo to add to or update in cache]')
def CMDpopulate(parser, args):
"""Ensure that the cache has all up-to-date objects for the given repo."""
parser.add_option('--depth', type='int',
help='Only cache DEPTH commits of history')
parser.add_option(
'--no-fetch-tags',
action='store_true',
help=('Don\'t fetch tags from the server. This can speed up '
'fetch considerably when there are many tags.'))
parser.add_option('--shallow', '-s', action='store_true',
help='Only cache 10000 commits of history')
parser.add_option('--ref', action='append',
help='Specify additional refs to be fetched')
parser.add_option('--no_bootstrap', '--no-bootstrap',
action='store_true',
help='Don\'t bootstrap from Google Storage')
parser.add_option('--ignore_locks', '--ignore-locks',
action='store_true',
help='Don\'t try to lock repository')
parser.add_option('--break-locks',
action='store_true',
help='Break any existing lock instead of just ignoring it')
parser.add_option('--reset-fetch-config', action='store_true', default=False,
help='Reset the fetch config before populating the cache.')
options, args = parser.parse_args(args)
if not len(args) == 1:
parser.error('git cache populate only takes exactly one repo url.')
url = args[0]
mirror = Mirror(url, refs=options.ref)
if options.break_locks:
mirror.unlock()
kwargs = {
'no_fetch_tags': options.no_fetch_tags,
'verbose': options.verbose,
'shallow': options.shallow,
'bootstrap': not options.no_bootstrap,
'ignore_lock': options.ignore_locks,
'lock_timeout': options.timeout,
'reset_fetch_config': options.reset_fetch_config,
}
if options.depth:
kwargs['depth'] = options.depth
mirror.populate(**kwargs)
@subcommand.usage('Fetch new commits into cache and current checkout')
def CMDfetch(parser, args):
"""Update mirror, and fetch in cwd."""
parser.add_option('--all', action='store_true', help='Fetch all remotes')
parser.add_option('--no_bootstrap', '--no-bootstrap',
action='store_true',
help='Don\'t (re)bootstrap from Google Storage')
parser.add_option(
'--no-fetch-tags',
action='store_true',
help=('Don\'t fetch tags from the server. This can speed up '
'fetch considerably when there are many tags.'))
options, args = parser.parse_args(args)
# Figure out which remotes to fetch. This mimics the behavior of regular
# 'git fetch'. Note that in the case of "stacked" or "pipelined" branches,
# this will NOT try to traverse up the branching structure to find the
# ultimate remote to update.
remotes = []
if options.all:
assert not args, 'fatal: fetch --all does not take a repository argument'
remotes = subprocess.check_output([Mirror.git_exe, 'remote']).splitlines()
elif args:
remotes = args
else:
current_branch = subprocess.check_output(
[Mirror.git_exe, 'rev-parse', '--abbrev-ref', 'HEAD']).strip()
if current_branch != 'HEAD':
upstream = subprocess.check_output(
[Mirror.git_exe, 'config', 'branch.%s.remote' % current_branch]
).strip()
if upstream and upstream != '.':
remotes = [upstream]
if not remotes:
remotes = ['origin']
cachepath = Mirror.GetCachePath()
git_dir = os.path.abspath(subprocess.check_output(
[Mirror.git_exe, 'rev-parse', '--git-dir']))
git_dir = os.path.abspath(git_dir)
if git_dir.startswith(cachepath):
mirror = Mirror.FromPath(git_dir)
mirror.populate(
bootstrap=not options.no_bootstrap,
no_fetch_tags=options.no_fetch_tags,
lock_timeout=options.timeout)
return 0
for remote in remotes:
remote_url = subprocess.check_output(
[Mirror.git_exe, 'config', 'remote.%s.url' % remote]).strip()
if remote_url.startswith(cachepath):
mirror = Mirror.FromPath(remote_url)
mirror.print = lambda *args: None
print('Updating git cache...')
mirror.populate(
bootstrap=not options.no_bootstrap,
no_fetch_tags=options.no_fetch_tags,
lock_timeout=options.timeout)
subprocess.check_call([Mirror.git_exe, 'fetch', remote])
return 0
@subcommand.usage('[url of repo to unlock, or -a|--all]')
def CMDunlock(parser, args):
"""Unlock one or all repos if their lock files are still around."""
parser.add_option('--force', '-f', action='store_true',
help='Actually perform the action')
parser.add_option('--all', '-a', action='store_true',
help='Unlock all repository caches')
options, args = parser.parse_args(args)
if len(args) > 1 or (len(args) == 0 and not options.all):
parser.error('git cache unlock takes exactly one repo url, or --all')
if not options.force:
cachepath = Mirror.GetCachePath()
lockfiles = [os.path.join(cachepath, path)
for path in os.listdir(cachepath)
if path.endswith('.lock') and os.path.isfile(path)]
parser.error('git cache unlock requires -f|--force to do anything. '
'Refusing to unlock the following repo caches: '
', '.join(lockfiles))
unlocked_repos = []
if options.all:
unlocked_repos.extend(Mirror.UnlockAll())
else:
m = Mirror(args[0])
if m.unlock():
unlocked_repos.append(m.mirror_path)
if unlocked_repos:
logging.info('Broke locks on these caches:\n %s' % '\n '.join(
unlocked_repos))
class OptionParser(optparse.OptionParser):
"""Wrapper class for OptionParser to handle global options."""
def __init__(self, *args, **kwargs):
optparse.OptionParser.__init__(self, *args, prog='git cache', **kwargs)
self.add_option('-c', '--cache-dir',
help=(
'Path to the directory containing the caches. Normally '
'deduced from git config cache.cachepath or '
'$GIT_CACHE_PATH.'))
self.add_option('-v', '--verbose', action='count', default=1,
help='Increase verbosity (can be passed multiple times)')
self.add_option('-q', '--quiet', action='store_true',
help='Suppress all extraneous output')
self.add_option('--timeout', type='int', default=0,
help='Timeout for acquiring cache lock, in seconds')
def parse_args(self, args=None, values=None):
options, args = optparse.OptionParser.parse_args(self, args, values)
if options.quiet:
options.verbose = 0
levels = [logging.ERROR, logging.WARNING, logging.INFO, logging.DEBUG]
logging.basicConfig(level=levels[min(options.verbose, len(levels) - 1)])
try:
global_cache_dir = Mirror.GetCachePath()
except RuntimeError:
global_cache_dir = None
if options.cache_dir:
if global_cache_dir and (
os.path.abspath(options.cache_dir) !=
os.path.abspath(global_cache_dir)):
logging.warn('Overriding globally-configured cache directory.')
Mirror.SetCachePath(options.cache_dir)
return options, args
def main(argv):
dispatcher = subcommand.CommandDispatcher(__name__)
return dispatcher.execute(OptionParser(), argv)
if __name__ == '__main__':
try:
sys.exit(main(sys.argv[1:]))
except KeyboardInterrupt:
sys.stderr.write('interrupted\n')
sys.exit(1)
| 34.737384 | 80 | 0.637682 |
4a1ec05df32efd93a4f3adaef7ef33ca809e8fb9 | 44,118 | py | Python | virtual/lib/python3.8/site-packages/pip/_vendor/distlib/wheel.py | Barrack-coder/News | 1bfb1b441cb2abb3d5edeebfefafc3b624a01e6c | [
"MIT"
] | 32 | 2021-05-03T09:03:57.000Z | 2022-03-17T09:18:59.000Z | virtual/lib/python3.8/site-packages/pip/_vendor/distlib/wheel.py | Barrack-coder/News | 1bfb1b441cb2abb3d5edeebfefafc3b624a01e6c | [
"MIT"
] | 4 | 2021-05-29T20:42:52.000Z | 2022-03-16T03:01:12.000Z | virtual/lib/python3.8/site-packages/pip/_vendor/distlib/wheel.py | Barrack-coder/News | 1bfb1b441cb2abb3d5edeebfefafc3b624a01e6c | [
"MIT"
] | 27 | 2021-11-10T08:44:10.000Z | 2022-03-30T08:19:46.000Z | # -*- coding: utf-8 -*-
#
# Copyright (C) 2013-2020 Vinay Sajip.
# Licensed to the Python Software Foundation under a contributor agreement.
# See LICENSE.txt and CONTRIBUTORS.txt.
#
from __future__ import unicode_literals
import base64
import codecs
import datetime
from email import message_from_file
import hashlib
import imp
import json
import logging
import os
import posixpath
import re
import shutil
import sys
import tempfile
import zipfile
from . import __version__, DistlibException
from .compat import sysconfig, ZipFile, fsdecode, text_type, filter
from .database import InstalledDistribution
from .metadata import (Metadata, METADATA_FILENAME, WHEEL_METADATA_FILENAME,
LEGACY_METADATA_FILENAME)
from .util import (FileOperator, convert_path, CSVReader, CSVWriter, Cache,
cached_property, get_cache_base, read_exports, tempdir,
get_platform)
from .version import NormalizedVersion, UnsupportedVersionError
logger = logging.getLogger(__name__)
cache = None # created when needed
if hasattr(sys, 'pypy_version_info'): # pragma: no cover
IMP_PREFIX = 'pp'
elif sys.platform.startswith('java'): # pragma: no cover
IMP_PREFIX = 'jy'
elif sys.platform == 'cli': # pragma: no cover
IMP_PREFIX = 'ip'
else:
IMP_PREFIX = 'cp'
VER_SUFFIX = sysconfig.get_config_var('py_version_nodot')
if not VER_SUFFIX: # pragma: no cover
if sys.version_info[1] >= 10:
VER_SUFFIX = '%s_%s' % sys.version_info[:2] # PEP 641 (draft)
else:
VER_SUFFIX = '%s%s' % sys.version_info[:2]
PYVER = 'py' + VER_SUFFIX
IMPVER = IMP_PREFIX + VER_SUFFIX
ARCH = get_platform().replace('-', '_').replace('.', '_')
ABI = sysconfig.get_config_var('SOABI')
if ABI and ABI.startswith('cpython-'):
ABI = ABI.replace('cpython-', 'cp').split('-')[0]
else:
def _derive_abi():
parts = ['cp', VER_SUFFIX]
if sysconfig.get_config_var('Py_DEBUG'):
parts.append('d')
if sysconfig.get_config_var('WITH_PYMALLOC'):
parts.append('m')
if sysconfig.get_config_var('Py_UNICODE_SIZE') == 4:
parts.append('u')
return ''.join(parts)
ABI = _derive_abi()
del _derive_abi
FILENAME_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?
-(?P<py>\w+\d+(\.\w+\d+)*)
-(?P<bi>\w+)
-(?P<ar>\w+(\.\w+)*)
\.whl$
''', re.IGNORECASE | re.VERBOSE)
NAME_VERSION_RE = re.compile(r'''
(?P<nm>[^-]+)
-(?P<vn>\d+[^-]*)
(-(?P<bn>\d+[^-]*))?$
''', re.IGNORECASE | re.VERBOSE)
SHEBANG_RE = re.compile(br'\s*#![^\r\n]*')
SHEBANG_DETAIL_RE = re.compile(br'^(\s*#!("[^"]+"|\S+))\s+(.*)$')
SHEBANG_PYTHON = b'#!python'
SHEBANG_PYTHONW = b'#!pythonw'
if os.sep == '/':
to_posix = lambda o: o
else:
to_posix = lambda o: o.replace(os.sep, '/')
class Mounter(object):
def __init__(self):
self.impure_wheels = {}
self.libs = {}
def add(self, pathname, extensions):
self.impure_wheels[pathname] = extensions
self.libs.update(extensions)
def remove(self, pathname):
extensions = self.impure_wheels.pop(pathname)
for k, v in extensions:
if k in self.libs:
del self.libs[k]
def find_module(self, fullname, path=None):
if fullname in self.libs:
result = self
else:
result = None
return result
def load_module(self, fullname):
if fullname in sys.modules:
result = sys.modules[fullname]
else:
if fullname not in self.libs:
raise ImportError('unable to find extension for %s' % fullname)
result = imp.load_dynamic(fullname, self.libs[fullname])
result.__loader__ = self
parts = fullname.rsplit('.', 1)
if len(parts) > 1:
result.__package__ = parts[0]
return result
_hook = Mounter()
class Wheel(object):
"""
Class to build and install from Wheel files (PEP 427).
"""
wheel_version = (1, 1)
hash_kind = 'sha256'
def __init__(self, filename=None, sign=False, verify=False):
"""
Initialise an instance using a (valid) filename.
"""
self.sign = sign
self.should_verify = verify
self.buildver = ''
self.pyver = [PYVER]
self.abi = ['none']
self.arch = ['any']
self.dirname = os.getcwd()
if filename is None:
self.name = 'dummy'
self.version = '0.1'
self._filename = self.filename
else:
m = NAME_VERSION_RE.match(filename)
if m:
info = m.groupdict('')
self.name = info['nm']
# Reinstate the local version separator
self.version = info['vn'].replace('_', '-')
self.buildver = info['bn']
self._filename = self.filename
else:
dirname, filename = os.path.split(filename)
m = FILENAME_RE.match(filename)
if not m:
raise DistlibException('Invalid name or '
'filename: %r' % filename)
if dirname:
self.dirname = os.path.abspath(dirname)
self._filename = filename
info = m.groupdict('')
self.name = info['nm']
self.version = info['vn']
self.buildver = info['bn']
self.pyver = info['py'].split('.')
self.abi = info['bi'].split('.')
self.arch = info['ar'].split('.')
@property
def filename(self):
"""
Build and return a filename from the various components.
"""
if self.buildver:
buildver = '-' + self.buildver
else:
buildver = ''
pyver = '.'.join(self.pyver)
abi = '.'.join(self.abi)
arch = '.'.join(self.arch)
# replace - with _ as a local version separator
version = self.version.replace('-', '_')
return '%s-%s%s-%s-%s-%s.whl' % (self.name, version, buildver,
pyver, abi, arch)
@property
def exists(self):
path = os.path.join(self.dirname, self.filename)
return os.path.isfile(path)
@property
def tags(self):
for pyver in self.pyver:
for abi in self.abi:
for arch in self.arch:
yield pyver, abi, arch
@cached_property
def metadata(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
wheel_metadata = self.get_wheel_metadata(zf)
wv = wheel_metadata['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# if file_version < (1, 1):
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME,
# LEGACY_METADATA_FILENAME]
# else:
# fns = [WHEEL_METADATA_FILENAME, METADATA_FILENAME]
fns = [WHEEL_METADATA_FILENAME, LEGACY_METADATA_FILENAME]
result = None
for fn in fns:
try:
metadata_filename = posixpath.join(info_dir, fn)
with zf.open(metadata_filename) as bf:
wf = wrapper(bf)
result = Metadata(fileobj=wf)
if result:
break
except KeyError:
pass
if not result:
raise ValueError('Invalid wheel, because metadata is '
'missing: looked in %s' % ', '.join(fns))
return result
def get_wheel_metadata(self, zf):
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
metadata_filename = posixpath.join(info_dir, 'WHEEL')
with zf.open(metadata_filename) as bf:
wf = codecs.getreader('utf-8')(bf)
message = message_from_file(wf)
return dict(message)
@cached_property
def info(self):
pathname = os.path.join(self.dirname, self.filename)
with ZipFile(pathname, 'r') as zf:
result = self.get_wheel_metadata(zf)
return result
def process_shebang(self, data):
m = SHEBANG_RE.match(data)
if m:
end = m.end()
shebang, data_after_shebang = data[:end], data[end:]
# Preserve any arguments after the interpreter
if b'pythonw' in shebang.lower():
shebang_python = SHEBANG_PYTHONW
else:
shebang_python = SHEBANG_PYTHON
m = SHEBANG_DETAIL_RE.match(shebang)
if m:
args = b' ' + m.groups()[-1]
else:
args = b''
shebang = shebang_python + args
data = shebang + data_after_shebang
else:
cr = data.find(b'\r')
lf = data.find(b'\n')
if cr < 0 or cr > lf:
term = b'\n'
else:
if data[cr:cr + 2] == b'\r\n':
term = b'\r\n'
else:
term = b'\r'
data = SHEBANG_PYTHON + term + data
return data
def get_hash(self, data, hash_kind=None):
if hash_kind is None:
hash_kind = self.hash_kind
try:
hasher = getattr(hashlib, hash_kind)
except AttributeError:
raise DistlibException('Unsupported hash algorithm: %r' % hash_kind)
result = hasher(data).digest()
result = base64.urlsafe_b64encode(result).rstrip(b'=').decode('ascii')
return hash_kind, result
def write_record(self, records, record_path, base):
records = list(records) # make a copy, as mutated
p = to_posix(os.path.relpath(record_path, base))
records.append((p, '', ''))
with CSVWriter(record_path) as writer:
for row in records:
writer.writerow(row)
def write_records(self, info, libdir, archive_paths):
records = []
distinfo, info_dir = info
hasher = getattr(hashlib, self.hash_kind)
for ap, p in archive_paths:
with open(p, 'rb') as f:
data = f.read()
digest = '%s=%s' % self.get_hash(data)
size = os.path.getsize(p)
records.append((ap, digest, size))
p = os.path.join(distinfo, 'RECORD')
self.write_record(records, p, libdir)
ap = to_posix(os.path.join(info_dir, 'RECORD'))
archive_paths.append((ap, p))
def build_zip(self, pathname, archive_paths):
with ZipFile(pathname, 'w', zipfile.ZIP_DEFLATED) as zf:
for ap, p in archive_paths:
logger.debug('Wrote %s to %s in wheel', p, ap)
zf.write(p, ap)
def build(self, paths, tags=None, wheel_version=None):
"""
Build a wheel from files in specified paths, and use any specified tags
when determining the name of the wheel.
"""
if tags is None:
tags = {}
libkey = list(filter(lambda o: o in paths, ('purelib', 'platlib')))[0]
if libkey == 'platlib':
is_pure = 'false'
default_pyver = [IMPVER]
default_abi = [ABI]
default_arch = [ARCH]
else:
is_pure = 'true'
default_pyver = [PYVER]
default_abi = ['none']
default_arch = ['any']
self.pyver = tags.get('pyver', default_pyver)
self.abi = tags.get('abi', default_abi)
self.arch = tags.get('arch', default_arch)
libdir = paths[libkey]
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
archive_paths = []
# First, stuff which is not in site-packages
for key in ('data', 'headers', 'scripts'):
if key not in paths:
continue
path = paths[key]
if os.path.isdir(path):
for root, dirs, files in os.walk(path):
for fn in files:
p = fsdecode(os.path.join(root, fn))
rp = os.path.relpath(p, path)
ap = to_posix(os.path.join(data_dir, key, rp))
archive_paths.append((ap, p))
if key == 'scripts' and not p.endswith('.exe'):
with open(p, 'rb') as f:
data = f.read()
data = self.process_shebang(data)
with open(p, 'wb') as f:
f.write(data)
# Now, stuff which is in site-packages, other than the
# distinfo stuff.
path = libdir
distinfo = None
for root, dirs, files in os.walk(path):
if root == path:
# At the top level only, save distinfo for later
# and skip it for now
for i, dn in enumerate(dirs):
dn = fsdecode(dn)
if dn.endswith('.dist-info'):
distinfo = os.path.join(root, dn)
del dirs[i]
break
assert distinfo, '.dist-info directory expected, not found'
for fn in files:
# comment out next suite to leave .pyc files in
if fsdecode(fn).endswith(('.pyc', '.pyo')):
continue
p = os.path.join(root, fn)
rp = to_posix(os.path.relpath(p, path))
archive_paths.append((rp, p))
# Now distinfo. Assumed to be flat, i.e. os.listdir is enough.
files = os.listdir(distinfo)
for fn in files:
if fn not in ('RECORD', 'INSTALLER', 'SHARED', 'WHEEL'):
p = fsdecode(os.path.join(distinfo, fn))
ap = to_posix(os.path.join(info_dir, fn))
archive_paths.append((ap, p))
wheel_metadata = [
'Wheel-Version: %d.%d' % (wheel_version or self.wheel_version),
'Generator: distlib %s' % __version__,
'Root-Is-Purelib: %s' % is_pure,
]
for pyver, abi, arch in self.tags:
wheel_metadata.append('Tag: %s-%s-%s' % (pyver, abi, arch))
p = os.path.join(distinfo, 'WHEEL')
with open(p, 'w') as f:
f.write('\n'.join(wheel_metadata))
ap = to_posix(os.path.join(info_dir, 'WHEEL'))
archive_paths.append((ap, p))
# sort the entries by archive path. Not needed by any spec, but it
# keeps the archive listing and RECORD tidier than they would otherwise
# be. Use the number of path segments to keep directory entries together,
# and keep the dist-info stuff at the end.
def sorter(t):
ap = t[0]
n = ap.count('/')
if '.dist-info' in ap:
n += 10000
return (n, ap)
archive_paths = sorted(archive_paths, key=sorter)
# Now, at last, RECORD.
# Paths in here are archive paths - nothing else makes sense.
self.write_records((distinfo, info_dir), libdir, archive_paths)
# Now, ready to build the zip file
pathname = os.path.join(self.dirname, self.filename)
self.build_zip(pathname, archive_paths)
return pathname
def skip_entry(self, arcname):
"""
Determine whether an archive entry should be skipped when verifying
or installing.
"""
# The signature file won't be in RECORD,
# and we don't currently don't do anything with it
# We also skip directories, as they won't be in RECORD
# either. See:
#
# https://github.com/pypa/wheel/issues/294
# https://github.com/pypa/wheel/issues/287
# https://github.com/pypa/wheel/pull/289
#
return arcname.endswith(('/', '/RECORD.jws'))
def install(self, paths, maker, **kwargs):
"""
Install a wheel to the specified paths. If kwarg ``warner`` is
specified, it should be a callable, which will be called with two
tuples indicating the wheel version of this software and the wheel
version in the file, if there is a discrepancy in the versions.
This can be used to issue any warnings to raise any exceptions.
If kwarg ``lib_only`` is True, only the purelib/platlib files are
installed, and the headers, scripts, data and dist-info metadata are
not written. If kwarg ``bytecode_hashed_invalidation`` is True, written
bytecode will try to use file-hash based invalidation (PEP-552) on
supported interpreter versions (CPython 2.7+).
The return value is a :class:`InstalledDistribution` instance unless
``options.lib_only`` is True, in which case the return value is ``None``.
"""
dry_run = maker.dry_run
warner = kwargs.get('warner')
lib_only = kwargs.get('lib_only', False)
bc_hashed_invalidation = kwargs.get('bytecode_hashed_invalidation', False)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
if (file_version != self.wheel_version) and warner:
warner(self.wheel_version, file_version)
if message['Root-Is-Purelib'] == 'true':
libdir = paths['purelib']
else:
libdir = paths['platlib']
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
data_pfx = posixpath.join(data_dir, '')
info_pfx = posixpath.join(info_dir, '')
script_pfx = posixpath.join(data_dir, 'scripts', '')
# make a new instance rather than a copy of maker's,
# as we mutate it
fileop = FileOperator(dry_run=dry_run)
fileop.record = True # so we can rollback if needed
bc = not sys.dont_write_bytecode # Double negatives. Lovely!
outfiles = [] # for RECORD writing
# for script copying/shebang processing
workdir = tempfile.mkdtemp()
# set target dir later
# we default add_launchers to False, as the
# Python Launcher should be used instead
maker.source_dir = workdir
maker.target_dir = None
try:
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if self.skip_entry(u_arcname):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
if lib_only and u_arcname.startswith((info_pfx, data_pfx)):
logger.debug('lib_only: skipping %s', u_arcname)
continue
is_script = (u_arcname.startswith(script_pfx)
and not u_arcname.endswith('.exe'))
if u_arcname.startswith(data_pfx):
_, where, rp = u_arcname.split('/', 2)
outfile = os.path.join(paths[where], convert_path(rp))
else:
# meant for site-packages.
if u_arcname in (wheel_metadata_name, record_name):
continue
outfile = os.path.join(libdir, convert_path(u_arcname))
if not is_script:
with zf.open(arcname) as bf:
fileop.copy_stream(bf, outfile)
# Issue #147: permission bits aren't preserved. Using
# zf.extract(zinfo, libdir) should have worked, but didn't,
# see https://www.thetopsites.net/article/53834422.shtml
# So ... manually preserve permission bits as given in zinfo
if os.name == 'posix':
# just set the normal permission bits
os.chmod(outfile, (zinfo.external_attr >> 16) & 0x1FF)
outfiles.append(outfile)
# Double check the digest of the written file
if not dry_run and row[1]:
with open(outfile, 'rb') as bf:
data = bf.read()
_, newdigest = self.get_hash(data, kind)
if newdigest != digest:
raise DistlibException('digest mismatch '
'on write for '
'%s' % outfile)
if bc and outfile.endswith('.py'):
try:
pyc = fileop.byte_compile(outfile,
hashed_invalidation=bc_hashed_invalidation)
outfiles.append(pyc)
except Exception:
# Don't give up if byte-compilation fails,
# but log it and perhaps warn the user
logger.warning('Byte-compilation failed',
exc_info=True)
else:
fn = os.path.basename(convert_path(arcname))
workname = os.path.join(workdir, fn)
with zf.open(arcname) as bf:
fileop.copy_stream(bf, workname)
dn, fn = os.path.split(outfile)
maker.target_dir = dn
filenames = maker.make(fn)
fileop.set_executable_mode(filenames)
outfiles.extend(filenames)
if lib_only:
logger.debug('lib_only: returning None')
dist = None
else:
# Generate scripts
# Try to get pydist.json so we can see if there are
# any commands to generate. If this fails (e.g. because
# of a legacy wheel), log a warning but don't give up.
commands = None
file_version = self.info['Wheel-Version']
if file_version == '1.0':
# Use legacy info
ep = posixpath.join(info_dir, 'entry_points.txt')
try:
with zf.open(ep) as bwf:
epdata = read_exports(bwf)
commands = {}
for key in ('console', 'gui'):
k = '%s_scripts' % key
if k in epdata:
commands['wrap_%s' % key] = d = {}
for v in epdata[k].values():
s = '%s:%s' % (v.prefix, v.suffix)
if v.flags:
s += ' [%s]' % ','.join(v.flags)
d[v.name] = s
except Exception:
logger.warning('Unable to read legacy script '
'metadata, so cannot generate '
'scripts')
else:
try:
with zf.open(metadata_name) as bwf:
wf = wrapper(bwf)
commands = json.load(wf).get('extensions')
if commands:
commands = commands.get('python.commands')
except Exception:
logger.warning('Unable to read JSON metadata, so '
'cannot generate scripts')
if commands:
console_scripts = commands.get('wrap_console', {})
gui_scripts = commands.get('wrap_gui', {})
if console_scripts or gui_scripts:
script_dir = paths.get('scripts', '')
if not os.path.isdir(script_dir):
raise ValueError('Valid script path not '
'specified')
maker.target_dir = script_dir
for k, v in console_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script)
fileop.set_executable_mode(filenames)
if gui_scripts:
options = {'gui': True }
for k, v in gui_scripts.items():
script = '%s = %s' % (k, v)
filenames = maker.make(script, options)
fileop.set_executable_mode(filenames)
p = os.path.join(libdir, info_dir)
dist = InstalledDistribution(p)
# Write SHARED
paths = dict(paths) # don't change passed in dict
del paths['purelib']
del paths['platlib']
paths['lib'] = libdir
p = dist.write_shared_locations(paths, dry_run)
if p:
outfiles.append(p)
# Write RECORD
dist.write_installed_files(outfiles, paths['prefix'],
dry_run)
return dist
except Exception: # pragma: no cover
logger.exception('installation failed.')
fileop.rollback()
raise
finally:
shutil.rmtree(workdir)
def _get_dylib_cache(self):
global cache
if cache is None:
# Use native string to avoid issues on 2.x: see Python #20140.
base = os.path.join(get_cache_base(), str('dylib-cache'),
'%s.%s' % sys.version_info[:2])
cache = Cache(base)
return cache
def _get_extensions(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
arcname = posixpath.join(info_dir, 'EXTENSIONS')
wrapper = codecs.getreader('utf-8')
result = []
with ZipFile(pathname, 'r') as zf:
try:
with zf.open(arcname) as bf:
wf = wrapper(bf)
extensions = json.load(wf)
cache = self._get_dylib_cache()
prefix = cache.prefix_to_dir(pathname)
cache_base = os.path.join(cache.base, prefix)
if not os.path.isdir(cache_base):
os.makedirs(cache_base)
for name, relpath in extensions.items():
dest = os.path.join(cache_base, convert_path(relpath))
if not os.path.exists(dest):
extract = True
else:
file_time = os.stat(dest).st_mtime
file_time = datetime.datetime.fromtimestamp(file_time)
info = zf.getinfo(relpath)
wheel_time = datetime.datetime(*info.date_time)
extract = wheel_time > file_time
if extract:
zf.extract(relpath, cache_base)
result.append((name, dest))
except KeyError:
pass
return result
def is_compatible(self):
"""
Determine if a wheel is compatible with the running system.
"""
return is_compatible(self)
def is_mountable(self):
"""
Determine if a wheel is asserted as mountable by its metadata.
"""
return True # for now - metadata details TBD
def mount(self, append=False):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if not self.is_compatible():
msg = 'Wheel %s not compatible with this Python.' % pathname
raise DistlibException(msg)
if not self.is_mountable():
msg = 'Wheel %s is marked as not mountable.' % pathname
raise DistlibException(msg)
if pathname in sys.path:
logger.debug('%s already in path', pathname)
else:
if append:
sys.path.append(pathname)
else:
sys.path.insert(0, pathname)
extensions = self._get_extensions()
if extensions:
if _hook not in sys.meta_path:
sys.meta_path.append(_hook)
_hook.add(pathname, extensions)
def unmount(self):
pathname = os.path.abspath(os.path.join(self.dirname, self.filename))
if pathname not in sys.path:
logger.debug('%s not in path', pathname)
else:
sys.path.remove(pathname)
if pathname in _hook.impure_wheels:
_hook.remove(pathname)
if not _hook.impure_wheels:
if _hook in sys.meta_path:
sys.meta_path.remove(_hook)
def verify(self):
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
data_dir = '%s.data' % name_ver
info_dir = '%s.dist-info' % name_ver
metadata_name = posixpath.join(info_dir, LEGACY_METADATA_FILENAME)
wheel_metadata_name = posixpath.join(info_dir, 'WHEEL')
record_name = posixpath.join(info_dir, 'RECORD')
wrapper = codecs.getreader('utf-8')
with ZipFile(pathname, 'r') as zf:
with zf.open(wheel_metadata_name) as bwf:
wf = wrapper(bwf)
message = message_from_file(wf)
wv = message['Wheel-Version'].split('.', 1)
file_version = tuple([int(i) for i in wv])
# TODO version verification
records = {}
with zf.open(record_name) as bf:
with CSVReader(stream=bf) as reader:
for row in reader:
p = row[0]
records[p] = row
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
# See issue #115: some wheels have .. in their entries, but
# in the filename ... e.g. __main__..py ! So the check is
# updated to look for .. in the directory portions
p = u_arcname.split('/')
if '..' in p:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
if self.skip_entry(u_arcname):
continue
row = records[u_arcname]
if row[2] and str(zinfo.file_size) != row[2]:
raise DistlibException('size mismatch for '
'%s' % u_arcname)
if row[1]:
kind, value = row[1].split('=', 1)
with zf.open(arcname) as bf:
data = bf.read()
_, digest = self.get_hash(data, kind)
if digest != value:
raise DistlibException('digest mismatch for '
'%s' % arcname)
def update(self, modifier, dest_dir=None, **kwargs):
"""
Update the contents of a wheel in a generic way. The modifier should
be a callable which expects a dictionary argument: its keys are
archive-entry paths, and its values are absolute filesystem paths
where the contents the corresponding archive entries can be found. The
modifier is free to change the contents of the files pointed to, add
new entries and remove entries, before returning. This method will
extract the entire contents of the wheel to a temporary location, call
the modifier, and then use the passed (and possibly updated)
dictionary to write a new wheel. If ``dest_dir`` is specified, the new
wheel is written there -- otherwise, the original wheel is overwritten.
The modifier should return True if it updated the wheel, else False.
This method returns the same value the modifier returns.
"""
def get_version(path_map, info_dir):
version = path = None
key = '%s/%s' % (info_dir, LEGACY_METADATA_FILENAME)
if key not in path_map:
key = '%s/PKG-INFO' % info_dir
if key in path_map:
path = path_map[key]
version = Metadata(path=path).version
return version, path
def update_version(version, path):
updated = None
try:
v = NormalizedVersion(version)
i = version.find('-')
if i < 0:
updated = '%s+1' % version
else:
parts = [int(s) for s in version[i + 1:].split('.')]
parts[-1] += 1
updated = '%s+%s' % (version[:i],
'.'.join(str(i) for i in parts))
except UnsupportedVersionError:
logger.debug('Cannot update non-compliant (PEP-440) '
'version %r', version)
if updated:
md = Metadata(path=path)
md.version = updated
legacy = path.endswith(LEGACY_METADATA_FILENAME)
md.write(path=path, legacy=legacy)
logger.debug('Version updated from %r to %r', version,
updated)
pathname = os.path.join(self.dirname, self.filename)
name_ver = '%s-%s' % (self.name, self.version)
info_dir = '%s.dist-info' % name_ver
record_name = posixpath.join(info_dir, 'RECORD')
with tempdir() as workdir:
with ZipFile(pathname, 'r') as zf:
path_map = {}
for zinfo in zf.infolist():
arcname = zinfo.filename
if isinstance(arcname, text_type):
u_arcname = arcname
else:
u_arcname = arcname.decode('utf-8')
if u_arcname == record_name:
continue
if '..' in u_arcname:
raise DistlibException('invalid entry in '
'wheel: %r' % u_arcname)
zf.extract(zinfo, workdir)
path = os.path.join(workdir, convert_path(u_arcname))
path_map[u_arcname] = path
# Remember the version.
original_version, _ = get_version(path_map, info_dir)
# Files extracted. Call the modifier.
modified = modifier(path_map, **kwargs)
if modified:
# Something changed - need to build a new wheel.
current_version, path = get_version(path_map, info_dir)
if current_version and (current_version == original_version):
# Add or update local version to signify changes.
update_version(current_version, path)
# Decide where the new wheel goes.
if dest_dir is None:
fd, newpath = tempfile.mkstemp(suffix='.whl',
prefix='wheel-update-',
dir=workdir)
os.close(fd)
else:
if not os.path.isdir(dest_dir):
raise DistlibException('Not a directory: %r' % dest_dir)
newpath = os.path.join(dest_dir, self.filename)
archive_paths = list(path_map.items())
distinfo = os.path.join(workdir, info_dir)
info = distinfo, info_dir
self.write_records(info, workdir, archive_paths)
self.build_zip(newpath, archive_paths)
if dest_dir is None:
shutil.copyfile(newpath, pathname)
return modified
def _get_glibc_version():
import platform
ver = platform.libc_ver()
result = []
if ver[0] == 'glibc':
for s in ver[1].split('.'):
result.append(int(s) if s.isdigit() else 0)
result = tuple(result)
return result
def compatible_tags():
"""
Return (pyver, abi, arch) tuples compatible with this Python.
"""
versions = [VER_SUFFIX]
major = VER_SUFFIX[0]
for minor in range(sys.version_info[1] - 1, - 1, -1):
versions.append(''.join([major, str(minor)]))
abis = []
for suffix, _, _ in imp.get_suffixes():
if suffix.startswith('.abi'):
abis.append(suffix.split('.', 2)[1])
abis.sort()
if ABI != 'none':
abis.insert(0, ABI)
abis.append('none')
result = []
arches = [ARCH]
if sys.platform == 'darwin':
m = re.match(r'(\w+)_(\d+)_(\d+)_(\w+)$', ARCH)
if m:
name, major, minor, arch = m.groups()
minor = int(minor)
matches = [arch]
if arch in ('i386', 'ppc'):
matches.append('fat')
if arch in ('i386', 'ppc', 'x86_64'):
matches.append('fat3')
if arch in ('ppc64', 'x86_64'):
matches.append('fat64')
if arch in ('i386', 'x86_64'):
matches.append('intel')
if arch in ('i386', 'x86_64', 'intel', 'ppc', 'ppc64'):
matches.append('universal')
while minor >= 0:
for match in matches:
s = '%s_%s_%s_%s' % (name, major, minor, match)
if s != ARCH: # already there
arches.append(s)
minor -= 1
# Most specific - our Python version, ABI and arch
for abi in abis:
for arch in arches:
result.append((''.join((IMP_PREFIX, versions[0])), abi, arch))
# manylinux
if abi != 'none' and sys.platform.startswith('linux'):
arch = arch.replace('linux_', '')
parts = _get_glibc_version()
if len(parts) == 2:
if parts >= (2, 5):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux1_%s' % arch))
if parts >= (2, 12):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux2010_%s' % arch))
if parts >= (2, 17):
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux2014_%s' % arch))
result.append((''.join((IMP_PREFIX, versions[0])), abi,
'manylinux_%s_%s_%s' % (parts[0], parts[1],
arch)))
# where no ABI / arch dependency, but IMP_PREFIX dependency
for i, version in enumerate(versions):
result.append((''.join((IMP_PREFIX, version)), 'none', 'any'))
if i == 0:
result.append((''.join((IMP_PREFIX, version[0])), 'none', 'any'))
# no IMP_PREFIX, ABI or arch dependency
for i, version in enumerate(versions):
result.append((''.join(('py', version)), 'none', 'any'))
if i == 0:
result.append((''.join(('py', version[0])), 'none', 'any'))
return set(result)
COMPATIBLE_TAGS = compatible_tags()
del compatible_tags
def is_compatible(wheel, tags=None):
if not isinstance(wheel, Wheel):
wheel = Wheel(wheel) # assume it's a filename
result = False
if tags is None:
tags = COMPATIBLE_TAGS
for ver, abi, arch in tags:
if ver in wheel.pyver and abi in wheel.abi and arch in wheel.arch:
result = True
break
return result
| 41.738884 | 102 | 0.486989 |
4a1ec1ae9e998e0e1eee42173c10874cd072fcf5 | 442 | py | Python | zsh/serverlog.py | liuyang1/dotfiles | d43c051c2daff0f21cc1921f267786dcafcc7cca | [
"MIT"
] | 1 | 2021-01-15T03:41:46.000Z | 2021-01-15T03:41:46.000Z | zsh/serverlog.py | liuyang1/dotfiles | d43c051c2daff0f21cc1921f267786dcafcc7cca | [
"MIT"
] | null | null | null | zsh/serverlog.py | liuyang1/dotfiles | d43c051c2daff0f21cc1921f267786dcafcc7cca | [
"MIT"
] | 1 | 2019-05-27T11:39:08.000Z | 2019-05-27T11:39:08.000Z | import logging
log_file = "/tmp/fast-agnoster.log"
log_level = logging.DEBUG
logger = logging.getLogger("logging.NormalLogger")
handler = logging.FileHandler(log_file)
fmt = "[%(asctime)s] [%(filename)s:%(lineno)s] [%(levelname)s] %(message)s"
formatter = logging.Formatter(fmt)
handler.setFormatter(formatter)
logger.addHandler(handler)
logger.setLevel(log_level)
def inst():
return logger
def shutdown():
logging.shutdown()
| 19.217391 | 75 | 0.737557 |
4a1ec1cd3d9a51faa830ad55bbee26a2f34de02f | 2,719 | py | Python | clumioapi/models/vcenter_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | null | null | null | clumioapi/models/vcenter_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | 1 | 2021-09-16T05:56:05.000Z | 2021-09-16T05:56:05.000Z | clumioapi/models/vcenter_links.py | clumio-code/clumio-python-sdk | 63bfaf3afed5c0ab4bae3dd1be52271249d07c51 | [
"Apache-2.0"
] | null | null | null | #
# Copyright 2021. Clumio, Inc.
#
from typing import Any, Dict, Mapping, Optional, Sequence, Type, TypeVar
from clumioapi.models import hateoas_self_link
from clumioapi.models import read_v_center_object_protection_stats_hateoas_link
T = TypeVar('T', bound='VcenterLinks')
class VcenterLinks:
"""Implementation of the 'VcenterLinks' model.
URLs to pages related to the resource.
Attributes:
p_self:
The HATEOAS link to this resource.
read_vmware_vcenter_compliance_stats:
A HATEOAS link to the compliance statistics of VMs in the folders and subfolders
of this vCenter resource.
"""
# Create a mapping from Model property names to API property names
_names = {
'p_self': '_self',
'read_vmware_vcenter_compliance_stats': 'read-vmware-vcenter-compliance-stats',
}
def __init__(
self,
p_self: hateoas_self_link.HateoasSelfLink = None,
read_vmware_vcenter_compliance_stats: read_v_center_object_protection_stats_hateoas_link.ReadVCenterObjectProtectionStatsHateoasLink = None,
) -> None:
"""Constructor for the VcenterLinks class."""
# Initialize members of the class
self.p_self: hateoas_self_link.HateoasSelfLink = p_self
self.read_vmware_vcenter_compliance_stats: read_v_center_object_protection_stats_hateoas_link.ReadVCenterObjectProtectionStatsHateoasLink = (
read_vmware_vcenter_compliance_stats
)
@classmethod
def from_dictionary(cls: Type, dictionary: Mapping[str, Any]) -> Optional[T]:
"""Creates an instance of this model from a dictionary
Args:
dictionary: A dictionary representation of the object as obtained
from the deserialization of the server's response. The keys
MUST match property names in the API description.
Returns:
object: An instance of this structure class.
"""
if not dictionary:
return None
# Extract variables from the dictionary
key = '_self'
p_self = (
hateoas_self_link.HateoasSelfLink.from_dictionary(dictionary.get(key))
if dictionary.get(key)
else None
)
key = 'read-vmware-vcenter-compliance-stats'
read_vmware_vcenter_compliance_stats = (
read_v_center_object_protection_stats_hateoas_link.ReadVCenterObjectProtectionStatsHateoasLink.from_dictionary(
dictionary.get(key)
)
if dictionary.get(key)
else None
)
# Return an object of this model
return cls(p_self, read_vmware_vcenter_compliance_stats)
| 34.417722 | 149 | 0.679294 |
4a1ec21808ec2dee6f58c985323253a45b4506f1 | 979 | py | Python | mallard/config.py | zomatree/mallard | 0f25577f57213e7bff2d5daf75e39ea8096e365c | [
"MIT"
] | 9 | 2017-08-24T00:20:04.000Z | 2021-09-26T10:28:02.000Z | mallard/config.py | zomatree/mallard | 0f25577f57213e7bff2d5daf75e39ea8096e365c | [
"MIT"
] | 13 | 2017-08-27T19:32:27.000Z | 2020-09-03T02:52:11.000Z | mallard/config.py | zomatree/mallard | 0f25577f57213e7bff2d5daf75e39ea8096e365c | [
"MIT"
] | 10 | 2017-08-28T13:54:13.000Z | 2021-04-28T22:18:58.000Z | #
# config.py
#
# mallard - DDG Instant Answers bot for the Programming Server
# Copyright (c) 2017 Johannes Christ, Ammon Smith et al.
#
# mallard is available free of charge under the terms of the MIT
# License. You are free to redistribute and/or modify it under those
# terms. It is distributed in the hopes that it will be useful, but
# WITHOUT ANY WARRANTY. See the LICENSE file for more details.
#
import yaml
def load_config(path):
with open(path, "r") as fh:
obj = yaml.safe_load(fh)
# Required fields
if not isinstance(obj["bot"]["token"], str):
raise ValueError("Configuration file doesn't specify bot token")
if "ratelimit" not in obj:
raise ValueError("Configuration file doesn't specify ratelimit information")
# Optional fields
if not isinstance(obj.get("mentions", None), list):
obj["mentions"] = []
if "colour" in obj and "color" not in obj:
obj["color"] = obj["colour"]
return obj
| 27.971429 | 84 | 0.679265 |
4a1ec2b1b63578ef353d155f634afb1698e9ef3d | 4,267 | py | Python | tests/test_corpora_reddit_reader.py | nigeljyng/textacy | 17833f63103850f9ea7e8e22b16378758dae6fce | [
"Apache-2.0"
] | 2 | 2021-04-13T16:00:30.000Z | 2022-03-15T00:36:48.000Z | tests/test_corpora_reddit_reader.py | nigeljyng/textacy | 17833f63103850f9ea7e8e22b16378758dae6fce | [
"Apache-2.0"
] | null | null | null | tests/test_corpora_reddit_reader.py | nigeljyng/textacy | 17833f63103850f9ea7e8e22b16378758dae6fce | [
"Apache-2.0"
] | null | null | null | from __future__ import absolute_import, unicode_literals
import os
import shutil
import tempfile
import unittest
from textacy.compat import PY2, unicode_
from textacy.corpora import RedditReader
from textacy.fileio import write_json_lines
REDDIT_COMMENTS = [
{"author_flair_css_class": None, "created_utc": "1420070400", "controversiality": 0, "parent_id": "t3_2qyr1a", "score": 14, "author": "YoungModern", "subreddit_id": "t5_2r0gj", "gilded": 0, "distinguished": None, "id": "cnas8zv", "link_id": "t3_2qyr1a", "name": "t1_cnas8zv", "author_flair_text": None, "downs": 0, "subreddit": "exmormon", "ups": 14, "edited": False, "retrieved_on": 1425124282, "body": "Most of us have some family members like this. *Most* of my family is like this. ", "score_hidden": False, "archived": False},
{"author_flair_css_class": "on", "created_utc": "1420070400", "parent_id": "t1_cnas2b6", "downs": 0, "score": 3, "author_flair_text": "Ontario", "subreddit_id": "t5_2s4gt", "gilded": 0, "distinguished": None, "id": "cnas8zw", "link_id": "t3_2qv6c6", "author": "RedCoatsForever", "controversiality": 0, "retrieved_on": 1425124282, "archived": False, "subreddit": "CanadaPolitics", "edited": False, "ups": 3, "body": "But Mill's career was way better. Bentham is like, the Joseph Smith to Mill's Brigham Young.", "score_hidden": False, "name": "t1_cnas8zw"},
{"author_flair_css_class": None, "created_utc": "1420070400", "parent_id": "t3_2qxefp", "author_flair_text": None, "score": 1, "subreddit_id": "t5_2s7tt", "gilded": 0, "distinguished": None, "id": "cnas8zx", "link_id": "t3_2qxefp", "author": "vhisic", "name": "t1_cnas8zx", "retrieved_on": 1425124282, "downs": 0, "subreddit": "AdviceAnimals", "controversiality": 0, "edited": False, "ups": 1, "body": "Mine uses a strait razor, and as much as i love the clippers i love the razor so much more. Then he follows it up with a warm towel. \nI think i might go get a hair cut this week.", "score_hidden": False, "archived": False},
]
class RedditReaderTestCase(unittest.TestCase):
def setUp(self):
self.tempdir = tempfile.mkdtemp(
prefix='test_corpora', dir=os.path.dirname(os.path.abspath(__file__)))
reddit_fname = os.path.join(self.tempdir, 'RC_test.bz2')
if PY2 is False:
write_json_lines(REDDIT_COMMENTS, reddit_fname, mode='wt',
auto_make_dirs=True)
else:
write_json_lines(REDDIT_COMMENTS, reddit_fname, mode='wb',
auto_make_dirs=True)
self.redditreader = RedditReader(reddit_fname)
def test_texts(self):
for text in self.redditreader.texts():
self.assertIsInstance(text, unicode_)
def test_texts_limit(self):
texts = list(self.redditreader.texts(limit=1))
self.assertEqual(len(texts), 1)
def test_texts_min_len(self):
for text in self.redditreader.texts(min_len=100):
self.assertTrue(len(text) >= 100)
def test_records(self):
for record in self.redditreader.records():
self.assertIsInstance(record, dict)
def test_records_limit(self):
records = list(self.redditreader.records(limit=1))
self.assertEqual(len(records), 1)
def test_records_score_range(self):
score_ranges = [(-2, 2), (5, None), (None, 2)]
for score_range in score_ranges:
records = list(self.redditreader.records(score_range=score_range))
self.assertEqual(len(records), 1)
for record in records:
if score_range[0]:
self.assertTrue(record['score'] >= score_range[0])
if score_range[1]:
self.assertTrue(record['score'] <= score_range[1])
def test_records_subreddit(self):
subreddits = [('exmormon',), {'CanadaPolitics', 'AdviceAnimals'}]
expected_lens = (1, 2)
for subreddit, expected_len in zip(subreddits, expected_lens):
records = list(self.redditreader.records(subreddit=subreddit))
self.assertEqual(len(records), expected_len)
for record in records:
self.assertTrue(record['subreddit'] in subreddit)
def tearDown(self):
shutil.rmtree(self.tempdir)
| 56.144737 | 631 | 0.660183 |
4a1ec2ebd1bfd9fee761945af9a87c0d3ed2af55 | 1,736 | py | Python | Others/Source/19/19.1/plot_line_legend2.py | silence0201/Learn-Python | 662da7c0e74221cedb445ba17d5cb1cd3af41c86 | [
"MIT"
] | 1 | 2018-05-30T01:38:23.000Z | 2018-05-30T01:38:23.000Z | Others/Source/19/19.1/plot_line_legend2.py | silence0201/Learn-Python | 662da7c0e74221cedb445ba17d5cb1cd3af41c86 | [
"MIT"
] | null | null | null | Others/Source/19/19.1/plot_line_legend2.py | silence0201/Learn-Python | 662da7c0e74221cedb445ba17d5cb1cd3af41c86 | [
"MIT"
] | null | null | null | # coding: utf-8
#########################################################################
# 网站: <a href="http://www.crazyit.org">疯狂Java联盟</a> #
# author yeeku.H.lee [email protected] #
# #
# version 1.0 #
# #
# Copyright (C), 2001-2018, yeeku.H.Lee #
# #
# This program is protected by copyright laws. #
# #
# Program Name: #
# #
# <br>Date: #
#########################################################################
import matplotlib.pyplot as plt
x_data = ['2011', '2012', '2013', '2014', '2015', '2016', '2017']
# 定义2个列表分别作为两条折线的Y轴数据
y_data = [58000, 60200, 63000, 71000, 84000, 90500, 107000]
y_data2 = [52000, 54200, 51500,58300, 56800, 59500, 62700]
# 指定折线的颜色、线宽和样式
plt.plot(x_data, y_data, color = 'red', linewidth = 2.0,
linestyle = '--', label='疯狂Java讲义年销量')
plt.plot(x_data, y_data2, color = 'blue', linewidth = 3.0,
linestyle = '-.', label='疯狂Android讲义年销量')
import matplotlib.font_manager as fm
# 使用Matplotlib的字体管理器加载中文字体
my_font=fm.FontProperties(fname="C:\Windows\Fonts\msyh.ttf")
# 调用legend函数设置图例
plt.legend(loc='best')
# 调用show()函数显示图形
plt.show()
| 49.6 | 74 | 0.358871 |
4a1ec30a09f7e8098b00804b1acbb3fbc75685e3 | 9,199 | py | Python | bk_py_torch_libs/pt_core.py | wbkdef/CodeSamples | b7b020dee97230479a741e8d067b473957faa546 | [
"Unlicense"
] | null | null | null | bk_py_torch_libs/pt_core.py | wbkdef/CodeSamples | b7b020dee97230479a741e8d067b473957faa546 | [
"Unlicense"
] | null | null | null | bk_py_torch_libs/pt_core.py | wbkdef/CodeSamples | b7b020dee97230479a741e8d067b473957faa546 | [
"Unlicense"
] | null | null | null | import os
import os.path as osp
import re
import sys
import enum
import typing as tp
import itertools as it
from typing import Union, List, Tuple, Dict, Sequence, Iterable, TypeVar, Any, Callable, Sized, NamedTuple, Optional
from functools import partial
import numpy as np
import pandas as pd
import sklearn as sk
from sklearn.model_selection import train_test_split
import matplotlib.pyplot as plt
import torch
import torch.nn as nn
from torch.autograd import Variable
import torch.nn.functional as F # noinspection PyPep8Naming
import torch.utils.data as data
import torch.optim
from bk_ds_libs import bk_utils_ds as utd
from bk_ds_libs import bk_data_sets
import bk_general_libs.bk_typing as tp_bk
from bk_general_libs.bk_typing import SelfSequence, SelfList, SelfTuple, SelfIterable, SelfList_Recursive, SelfSequence_Recursive, SelfIterable_Recursive, NonNegInt, NonNegFloat, Probability, NNI, NNF, PBT, TV
from bk_general_libs import bk_itertools
from bk_general_libs import bk_decorators
from bk_general_libs import bk_strings
# noinspection PyProtectedMember
TensorOrVariable = tp.Union[torch._TensorBase, Variable]
Type = tp.TypeVar('Type')
ItselfOrListOf = tp.Union[Type, tp.List[Type]]
TensorOrVariableOrListOf = ItselfOrListOf[TensorOrVariable]
ArrayLike = tp.Iterable # Usually a list, a numpy array, etc., that want to convert to a torch.Tensor/Variable
tests_to_run = []
# __t Jeremy's numpy/pytorch converters from: C:\Users\wbruc\Desktop\git_repos\fast.ai2\fastai\core.py
# noinspection PyArgumentList
# pylint: disable=invalid-name
def T(arr: ArrayLike) -> tp.Union[torch.cuda.LongTensor, torch.cuda.FloatTensor]:
"""Converts input to arr torch Long/Float tensor, usually on the GPU"""
if torch.is_tensor(arr): res = arr
else:
arr_np: np.ndarray = np.array(np.ascontiguousarray(arr))
if arr_np.dtype in (np.bool, np.int8, np.int16, np.int32, np.int64):
res = torch.LongTensor(arr_np.astype(np.int64))
elif arr_np.dtype in (np.float32, np.float64):
res = torch.FloatTensor(arr_np.astype(np.float32))
else:
raise NotImplementedError(f"numpy type of not recognized for arr_np: {arr_np}")
if isinstance(res, (torch.IntTensor, torch.cuda.IntTensor)):
res = res.long()
elif isinstance(res, (torch.DoubleTensor, torch.cuda.DoubleTensor)):
res = res.float()
assert isinstance(res, (torch.LongTensor, torch.cuda.LongTensor, torch.FloatTensor, torch.cuda.FloatTensor))
# noinspection PyTypeChecker
return to_gpu(res, async=True)
# pylint: disable=invalid-name
def TEST_T():
"""Tests for function "T" """
double_tensor = torch.from_numpy(np.arange(5) / 5)
# double_tensor.dtype
res2 = T(double_tensor)
assert isinstance(res2, torch.cuda.FloatTensor)
to_np(res2) == np.arange(5)/5
res2
float_tensor = torch.from_numpy((np.arange(5) / 5).astype('float32'))
res2 = T(float_tensor)
assert isinstance(res2, torch.cuda.FloatTensor)
to_np(res2) == np.arange(5)/5
res2
double_range = np.arange(5) / 5
double_range.dtype
res2 = T(double_range)
assert isinstance(res2, torch.cuda.FloatTensor)
to_np(res2) == np.arange(5)/5
res2
float_range = np.arange(5) / 5
float_range = float_range.astype('float32')
double_range.dtype
res2 = T(float_range)
assert isinstance(res2, torch.cuda.FloatTensor)
to_np(res2) == np.arange(5)/5
res2
res = T(range(5))
assert isinstance(res, torch.cuda.LongTensor)
to_np(res) == np.arange(5)
res
# pylint: disable=unnecessary-lambda
tests_to_run.append(lambda: TEST_T())
def create_variable(x: ArrayLike, volatile=False, requires_grad=False) -> Variable:
"""Converts x to a Tensor, then to a Variable, usually on the GPU"""
if not isinstance(x, Variable):
x = Variable(T(x), volatile=volatile, requires_grad=requires_grad)
# return to_gpu(x, async=True)
return x
def V_(x: ArrayLike, requires_grad=False) -> Variable:
"""Converts x to a Tensor, then to a Variable, usually on the GPU"""
return create_variable(x, False, requires_grad=requires_grad)
# @tp.overload
# def V(x: List[ArrayLike]) -> List[Variable]: pass
# @tp.overload
# def V(x: ArrayLike) -> Variable: pass
def V(x: ItselfOrListOf[ArrayLike], requires_grad=False) -> Variable:
"""Applies V_ to x or, if x is a list, to each element of x. REQUIRES_GRAD FALSE BY DEFAULT!
V_ Converts x to a Tensor, then to a Variable, usually on the GPU"""
return [V_(o, requires_grad) for o in x] if isinstance(x, list) else V_(x, requires_grad)
def VV_(x: ArrayLike) -> Variable:
"""Converts x to a Tensor, then to a Variable, with volatile=True!!!, usually on the GPU"""
return create_variable(x, volatile=True)
# @tp.overload
# def VV(x: List[ArrayLike]) -> List[Variable]: pass
# @tp.overload
# def VV(x: ArrayLike) -> Variable: pass
def VV(x: ItselfOrListOf[ArrayLike]) -> ItselfOrListOf[Variable]:
"""Converts x or List[x] to a Tensor, then to a Variable, with volatile=True!!!, usually on the GPU"""
return [VV_(o) for o in x] if isinstance(x, list) else VV_(x)
# @tp.overload # __c This is giving an error message I don't understand - also, PyCharm less helpful when overloaded, cause just gives first defn back!
# def to_np(values: TensorOrVariable) -> np.ndarray: pass
# @tp.overload
# def to_np(values: List[TensorOrVariable]) -> List[np.ndarray]: pass
def to_np(*values: SelfList[Union[TensorOrVariable, str]]) -> SelfList[np.ndarray]:
"""Converts a Tensor/Variable/List-thereof to numpy array(s) on the CPU"""
if len(values) == 0: raise ValueError("At least 1 argument required!")
if len(values) == 1: values = values[0] # get rid of the list it's packed in!
if isinstance(values, (list, tuple)): return [to_np(o) for o in values]
if isinstance(values, Variable): values = values.data
if isinstance(values, str): return _str_to_np(values)
return values.cpu().numpy()
from bk_general_libs import bk_utils
def _str_to_np(s: str) -> np.ndarray:
lines = s.splitlines()
rows = [re.split("[\s,]+", line.strip()) for line in lines if line.strip() != '']
# __c Could also use "max(rows, key=len)", but might as well compare the numbers, rather than the values.
assert min(map(len, rows)) == max(map(len, rows))
arr = np.array([[bk_utils.to_num(num) for num in row] for row in rows])
return arr
def TEST__str_to_np() -> None:
"""Simple tests of function "_str_to_np" to help with development in debug mode, as well as for finding bugs"""
res = _str_to_np("""
1 2
3 4
5 9
""")
correct = np.array([[1, 2], [3, 4], [5, 9]])
assert res.dtype == correct.dtype
assert np.allclose(res, correct)
print(f"PC:KEY TEST__str_to_np done")
exit(1)
if __name__ == '__main__':
TEST__str_to_np()
USE_GPU = True
def to_gpu(x: TensorOrVariableOrListOf, *args, **kwargs) -> Any:
"""Usually puts the Variable/Tensor (or list thereof) "x" on the gpu (using arguments *args, **kwargs)
If the gpu is not available or USE_GPU == False, then it won't be put on the GPU
"""
if isinstance(x, (list, tuple)): return [to_gpu(o, *args, **kwargs) for o in x]
if torch.cuda.is_available() and USE_GPU:
return x.cuda(*args, **kwargs)
return x
def noop(*args, **kwargs): return
def trainable_params_(m: nn.Module) -> List[nn.Parameter]:
""" Extract, from an 'nn.Module', a list of just the trainable parameters """
return [p for p in m.parameters() if p.requires_grad]
def chain_trainable_params(p: Union[nn.Module, List, Tuple]) -> List[nn.Parameter]:
"""Extracts out all trainable parameters from an nn.Module or a list/tuple thereof
todo Refactor so accepts any iterable (just checks if isinstance(nn.Module) first)?
todo Replace the chain with summing over lists
"""
if isinstance(p, (list,tuple)):
return list(it.chain(*[trainable_params_(o) for o in p]))
return trainable_params_(p)
# def set_trainable_attr(m: nn.Module,b: bool):
# """Saves you doing a for loop through the parameters to set requires_grad"""
# m.trainable=b # __c This doesn't seem to be used in PyTorch, or in fast.ai!
# for p in m.parameters(): p.requires_grad=b
# Don't expect to need this - This is for use with his learner class, which maybe doesn't have the .modules functionality.
# def apply_leaf(m, f):
# c = children(m)
# if isinstance(m, nn.Module): f(m)
# if len(c)>0:
# for l in c: apply_leaf(l,f)
# def set_trainable(l, b):
# apply_leaf(l, lambda m: set_trainable_attr(m,b))
# Used by learner
# def SGD_Momentum(momentum):
# return lambda *args, **kwargs: optim.SGD(*args, momentum=momentum, **kwargs)
# def one_hot(a,c): return np.eye(c)[a]
class TEST_ALL:
def __init__(self) -> None:
super().__init__()
self.TEST_split_by_idxs()
def TEST_split_by_idxs(self):
res = list(split_by_idxs(list(range(6)), (2, 5)))
assert res == [[0, 1], [2, 3, 4], [5]]
if __name__ == '__main__':
TEST_ALL()
for test in tests_to_run:
test()
| 37.70082 | 209 | 0.693119 |
4a1ec31069b22c78de2a915b5711e5323c9ac806 | 1,506 | py | Python | scripts/extract_ltr.py | stajichlab/localizaTE | 31dd4fa22b2a38703855f7a86595f68547298048 | [
"MIT"
] | 1 | 2017-09-13T14:27:37.000Z | 2017-09-13T14:27:37.000Z | scripts/extract_ltr.py | stajichlab/localizaTE | 31dd4fa22b2a38703855f7a86595f68547298048 | [
"MIT"
] | null | null | null | scripts/extract_ltr.py | stajichlab/localizaTE | 31dd4fa22b2a38703855f7a86595f68547298048 | [
"MIT"
] | null | null | null |
######################################################################################################
# Program for extracting full-lenght LTRs from the ltrharvest output.
# Chanche the name of the assembly file in the "genoma" label. Sequences must be named as scaffold_1
#######################################################################################################
from Bio import SeqIO
from Bio.Seq import Seq
from Bio.Alphabet import generic_protein
from Bio.SeqFeature import SeqFeature, FeatureLocation
genoma='renamed_Cryptococcus_neoformans_H99.fasta' # Type here the name of the input file
infile=open('ltrharvest.out', 'r')
outfile1=open('listaLTR', 'w')
outfile2=open('LTRs.fasta', 'w')
def filtrar(filename):
for line in filename.readlines():
if '#' in line:
pass
else:
dividir=line.split(' ')
scaffold=int(dividir[-1])+1
inicio=dividir[0]
final=dividir[1]
outfile1.write('scaffold_'+str(scaffold)+'\t'+inicio+'\t'+final+'\n')
filtrar(infile)
outfile1.close()
ltr=open('listaLTR', 'r')
recorrido=ltr.readlines()
x=0
for line in recorrido:
line=line.strip()
line=line.split('\t')
start=int(line[1])
end=int(line[2])
#print end-start
x=x+1
for record in SeqIO.parse(genoma, 'fasta'):
if record.id == str(line[0]):
print '>'+'LTR_'+str(x)+'_'+record.id+'_'+str(start)+'_'+str(end)
outfile2.write('>'+'LTR_'+str(x)+'_'+str(record.id)+'_'+str(start)+'_'+str(end)+'\n'+str(record.seq[start:end])+'\n')
ltr.close()
outfile2.close()
| 28.415094 | 120 | 0.596946 |
4a1ec33dde09cb301c45805c630b583cc4f33eda | 4,747 | py | Python | purity_fb/purity_fb_1dot4/models/pure_error.py | tlewis-ps/purity_fb_python_client | 652835cbd485c95a86da27f8b661679727ec6ea0 | [
"Apache-2.0"
] | 5 | 2017-09-08T20:47:22.000Z | 2021-06-29T02:11:05.000Z | purity_fb/purity_fb_1dot4/models/pure_error.py | tlewis-ps/purity_fb_python_client | 652835cbd485c95a86da27f8b661679727ec6ea0 | [
"Apache-2.0"
] | 16 | 2017-11-27T20:57:48.000Z | 2021-11-23T18:46:43.000Z | purity_fb/purity_fb_1dot4/models/pure_error.py | tlewis-ps/purity_fb_python_client | 652835cbd485c95a86da27f8b661679727ec6ea0 | [
"Apache-2.0"
] | 22 | 2017-10-13T15:33:05.000Z | 2021-11-08T19:56:21.000Z | # coding: utf-8
"""
Pure Storage FlashBlade REST 1.4 Python SDK
Pure Storage FlashBlade REST 1.4 Python SDK, developed by [Pure Storage, Inc](http://www.purestorage.com/). Documentations can be found at [purity-fb.readthedocs.io](http://purity-fb.readthedocs.io/).
OpenAPI spec version: 1.4
Contact: [email protected]
Generated by: https://github.com/swagger-api/swagger-codegen.git
"""
import pprint
import re # noqa: F401
import six
class PureError(object):
"""NOTE: This class is auto generated by the swagger code generator program.
Do not edit the class manually.
"""
#BEGIN_CUSTOM
# IR-51527: Prevent Pytest from attempting to collect this class based on name.
__test__ = False
#END_CUSTOM
"""
Attributes:
swagger_types (dict): The key is attribute name
and the value is attribute type.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
"""
swagger_types = {
'code': 'int',
'context': 'str',
'message': 'str'
}
attribute_map = {
'code': 'code',
'context': 'context',
'message': 'message'
}
def __init__(self, code=None, context=None, message=None): # noqa: E501
"""PureError - a model defined in Swagger""" # noqa: E501
self._code = None
self._context = None
self._message = None
self.discriminator = None
if code is not None:
self.code = code
if context is not None:
self.context = context
if message is not None:
self.message = message
@property
def code(self):
"""Gets the code of this PureError. # noqa: E501
error code # noqa: E501
:return: The code of this PureError. # noqa: E501
:rtype: int
"""
return self._code
@code.setter
def code(self, code):
"""Sets the code of this PureError.
error code # noqa: E501
:param code: The code of this PureError. # noqa: E501
:type: int
"""
self._code = code
@property
def context(self):
"""Gets the context of this PureError. # noqa: E501
context of the error # noqa: E501
:return: The context of this PureError. # noqa: E501
:rtype: str
"""
return self._context
@context.setter
def context(self, context):
"""Sets the context of this PureError.
context of the error # noqa: E501
:param context: The context of this PureError. # noqa: E501
:type: str
"""
self._context = context
@property
def message(self):
"""Gets the message of this PureError. # noqa: E501
error message # noqa: E501
:return: The message of this PureError. # noqa: E501
:rtype: str
"""
return self._message
@message.setter
def message(self, message):
"""Sets the message of this PureError.
error message # noqa: E501
:param message: The message of this PureError. # noqa: E501
:type: str
"""
self._message = message
def to_dict(self):
"""Returns the model properties as a dict"""
result = {}
for attr, _ in six.iteritems(self.swagger_types):
value = getattr(self, attr)
if isinstance(value, list):
result[attr] = list(map(
lambda x: x.to_dict() if hasattr(x, "to_dict") else x,
value
))
elif hasattr(value, "to_dict"):
result[attr] = value.to_dict()
elif isinstance(value, dict):
result[attr] = dict(map(
lambda item: (item[0], item[1].to_dict())
if hasattr(item[1], "to_dict") else item,
value.items()
))
else:
result[attr] = value
if issubclass(PureError, dict):
for key, value in self.items():
result[key] = value
return result
def to_str(self):
"""Returns the string representation of the model"""
return pprint.pformat(self.to_dict())
def __repr__(self):
"""For `print` and `pprint`"""
return self.to_str()
def __eq__(self, other):
"""Returns true if both objects are equal"""
if not isinstance(other, PureError):
return False
return self.__dict__ == other.__dict__
def __ne__(self, other):
"""Returns true if both objects are not equal"""
return not self == other
| 26.519553 | 204 | 0.557405 |
4a1ec39ed3504230148163c6fdb12d5d3e932e0d | 129 | py | Python | inteirosentra.py | fabianocardosodev/exercicios-Python-cursoIntens | aae9f334007afe5e5210c19f092bb35659db1332 | [
"MIT"
] | null | null | null | inteirosentra.py | fabianocardosodev/exercicios-Python-cursoIntens | aae9f334007afe5e5210c19f092bb35659db1332 | [
"MIT"
] | null | null | null | inteirosentra.py | fabianocardosodev/exercicios-Python-cursoIntens | aae9f334007afe5e5210c19f092bb35659db1332 | [
"MIT"
] | null | null | null | #usar no terminal
age = input("Qual sua idade?")
#entra com a solicitação
age
#armazenou para trabalhar,porem tratado como string | 25.8 | 51 | 0.782946 |
4a1ec4917013ef2dae0f75d60ed6ad0c571b4094 | 13,018 | py | Python | youtubesearchpython/__init__.py | urhprimozic/youtube-search-python | 70046eb5a070a7a8ac7198acddca8c8bd695d8c2 | [
"MIT"
] | null | null | null | youtubesearchpython/__init__.py | urhprimozic/youtube-search-python | 70046eb5a070a7a8ac7198acddca8c8bd695d8c2 | [
"MIT"
] | null | null | null | youtubesearchpython/__init__.py | urhprimozic/youtube-search-python | 70046eb5a070a7a8ac7198acddca8c8bd695d8c2 | [
"MIT"
] | null | null | null | import urllib.parse
import urllib.request
import json
import html
class searchYoutube:
#########https://github.com/alexmercerind/youtube-search-python#########
__networkError = False
__validResponse = False
def __init__(self, keyword, offset = 1, mode = "json", max_results = 20):
#########CLASS CONSTRUCTOR#########
#########Setting Feilds#########
self.__keyword = urllib.parse.quote(keyword)
self.__offset = offset
self.__mode = mode
self.__max_results = max_results
#########Executing Entry Point Of Class#########
self.__exec()
def __request(self):
try:
#########Network request is packed in try: except: for network related error handling.#########
request = "https://www.youtube.com/results?search_query=%s&page=%d" %(self.__keyword, self.__offset)
#########Making Network Request#########
self.__page = urllib.request.urlopen(request).read().decode('utf-8')
#########Identifying the type of response returned.#########
if self.__page[0:29] == ' <!DOCTYPE html><html lang="':
self.__validResponse = True
except:
#########Setting Network Error In Case Of Network Related Errors.#########
self.__networkError = True
def __scriptResponseHandler(self):
#########MAIN PROPERTY#########
#########This property is later called in the another property exec() of the class. #########
temp = 0
#########Defining Result Arrays.#########
self.__links = []
self.__ids = []
self.__titles = []
self.__channels = []
self.__views = []
self.__durations = []
self.__thumbnails = []
#########Transversing Through Network Request Array.#########
self.__pageSource = self.__page.split('":"')
for index in range(0, len(self.__pageSource)-1, 1):
#########Setting Video Links, IDs And Thumbnails.#########
if self.__pageSource[index][-98:] == '"commandMetadata":{"webCommandMetadata":{}},"addToPlaylistCommand":{"openMiniplayer":true,"videoId':
temp+=1
if temp % 2 == 0:
id = self.__pageSource[index+1][0:11]
thumbnailbuffer = []
modes = ["default", "hqdefault", "mqdefault", "sddefault", "maxresdefault"]
self.__ids+=[id]
self.__links+=['https://www.youtube.com/watch?v='+ id]
for mode in modes:
thumbnailbuffer+=["https://img.youtube.com/vi/" + id + "/" + mode + ".jpg"]
self.__thumbnails+=[thumbnailbuffer]
#########Setting Video Channels, Titles, Views And Durations#########
if self.__pageSource[index][-44:] == '"accessibility":{"accessibilityData":{"label' and self.__pageSource[index+1][-36:] == '"descriptionSnippet":{"runs":[{"text':
infoString = self.__pageSource[index+1].split("}}}")[0]
titleBuffer = ""
channelBuffer = ""
durationBuffer = ""
durationBool = False
viewBuffer = 0
channelBool = True
separators = ["second", "minute", "hour", "day", "week", "month", "year", "seconds", "minutes", "hours", "days", "weeks", "months", "years"]
timeBool = True
for element in infoString.split()[-2]:
if element.isnumeric():
viewBuffer = viewBuffer * 10 + int(element)
for index in range(len(infoString.split())):
element = infoString.split()[index]
if element != "by" and channelBool:
titleBuffer+=element+" "
if element == "by":
channelBool = False
if element != "by" and not channelBool and index < len(infoString.split())-1:
if infoString.split()[index+1] in separators:
timeBool = False
else:
if timeBool:
channelBuffer+=element+" "
else:
break
for index in range(len(infoString.split())):
if infoString.split()[index] == "ago":
durationBool = True
continue
if durationBool:
if index <= len(infoString.split())-3:
if infoString.split()[index].isnumeric():
durationBuffer += infoString.split()[index] + ":"
else:
break
self.__channels+=[channelBuffer.rstrip().encode('utf-8').decode('unicode_escape')]
self.__titles+=[titleBuffer.rstrip().encode('utf-8').decode('unicode_escape')]
self.__views+=[viewBuffer]
self.__durations+=[durationBuffer.rstrip(":")]
if len(self.__ids) > self.__max_results:
break
def __pageResponseHandler(self):
#########MAIN PROPERTY#########
#########This property is later called in the another property exec() of the class. #########
temp = 0
#########Defining Result Arrays.#########
self.__links = []
self.__ids = []
self.__titles = []
self.__views = []
self.__durations = []
self.__thumbnails = []
#########Transversing Through Network Request Array.#########
self.__pageSource = self.__page.split()
for index in range(0, len(self.__pageSource)-1, 1):
element = self.__pageSource[index]
elementNext = self.__pageSource[index+1]
elementPrev = self.__pageSource[index-1]
#########Setting Video View Counts.#########
if element == "views</li></ul></div><div":
viewCount = 0
for character in elementPrev:
if character.isnumeric():
viewCount = viewCount * 10 + int(character)
self.__views+=[viewCount]
#########Setting Video Links, IDs And Thumbnails.#########
if element[0:15] == 'href="/watch?v=' and len('www.youtube.com'+element[6:len(element)-1]) == 35:
thumbnailbuffer = []
modes = ["default", "hqdefault", "mqdefault", "sddefault", "maxresdefault"]
temp+=1
if temp%2 ==0:
self.__links+=['https://www.youtube.com'+element[6:len(element)-1]]
self.__ids+=[element[15:len(element) - 1]]
for mode in modes:
thumbnailbuffer+=["https://img.youtube.com/vi/" + element[15:len(element) - 1] + "/" + mode + ".jpg"]
self.__thumbnails+=[thumbnailbuffer]
#########Setting Video Durations.#########
if element[0:19] == 'aria-hidden="true">' and element[19].isnumeric():
buffer = ""
bufferBool = False
for character in element:
if character == ">":
bufferBool = True
if bufferBool and character!= "<":
buffer+=character
if character == "<":
break
self.__durations+=[buffer[1::]]
#########Setting Video Titles.#########
if (element[0:23] == 'data-sessionlink="itct=') and (elementNext[0:7] == 'title="'):
buffer = ""
init = self.__pageSource[index+1]
buffer+=init
subIndex = index+2
end = index+22
while subIndex<end:
this_element = self.__pageSource[subIndex]
next_element = self.__pageSource[subIndex+1]
if (this_element[len(this_element)-1])== '"':
if next_element == 'rel="spf-prefetch"':
buffer+=(" "+this_element)
self.__titles+=[html.unescape(buffer[7:-1])]
break
else:
buffer+=(" "+this_element)
subIndex+=1
if len(self.__ids) > self.__max_results:
break
def __exec(self):
#########EXEC PROPERTY#########
#########We are calling main property within this exec property because, YouTube randomly returns two types of#########
#########responses, one having content as HTML and another as script, and this algorithm is designed to work #########
#########with both of them. So, we have no choice but to just look if the script response is recieved i.e #########
#########self.validResponse = False then we execute self.scriptResponseHandler() instead of #########
#########self.pageResponseHandler(), finally, we call self.main() and return result to the user. #########
#########We will seek potential fixes in future.#########
#########Calling the main property.#########
self.__request()
if self.__networkError:
self.__networkError = True
else:
if not self.__validResponse:
self.__scriptResponseHandler()
if self.__validResponse:
self.__pageResponseHandler()
def result(self):
#########RESULT PROPERTY#########
#########Checking for network error and returning None to the user in case of it.#########
if self.__networkError:
return None
#########Returning Result.#########
else:
result = []
#########JSON Result Handling.#########
if self.__mode in ["json", "dict"]:
for index in range(len(self.__ids)):
if not self.__validResponse:
thisResult = {
"index": index,
"id": self.__ids[index],
"link": self.__links[index],
"title": self.__titles[index],
"channel": self.__channels[index],
"duration": self.__durations[index],
"views": self.__views[index],
"thumbnails": self.__thumbnails[index]
}
else:
thisResult = {
"index": index,
"id": self.__ids[index],
"link": self.__links[index],
"title": self.__titles[index],
"duration": self.__durations[index],
"views": self.__views[index],
"thumbnails": self.__thumbnails[index]
}
result+=[thisResult]
return json.dumps({"search_result": result}, indent=4) \
if self.__mode == "json" else {"search_result": result}
#########List Result Handling.#########
elif self.__mode == "list":
for index in range(len(self.__ids)):
if not self.__validResponse:
thisResult=[
index,
self.__ids[index],
self.__links[index],
self.__titles[index],
self.__channels[index],
self.__durations[index],
self.__views[index],
self.__thumbnails[index]
]
else:
thisResult=[
index,
self.__ids[index],
self.__links[index],
self.__titles[index],
self.__durations[index],
self.__views[index],
self.__thumbnails[index]
]
result+=[thisResult]
return result
| 40.179012 | 176 | 0.444462 |
4a1ec5871c82f79ac1726f452103d9470dadd43f | 2,741 | py | Python | pychecker/check/experiment/detection_in_the_field.py | PyVCEchecker/PyVCEchecker | 879756927c82a2612c817bdffbae25234ff62558 | [
"MIT"
] | null | null | null | pychecker/check/experiment/detection_in_the_field.py | PyVCEchecker/PyVCEchecker | 879756927c82a2612c817bdffbae25234ff62558 | [
"MIT"
] | null | null | null | pychecker/check/experiment/detection_in_the_field.py | PyVCEchecker/PyVCEchecker | 879756927c82a2612c817bdffbae25234ff62558 | [
"MIT"
] | null | null | null | import os
import logging
from datetime import datetime
from tqdm import tqdm
import pychecker.config as config
from pychecker.utils import read_object_from_file, write_object_to_file
from pychecker.check.no_avl_resource_detection import parse_comp_expr
from pychecker.check.visit_pypi import get_metadata
from pychecker.check.check import check_pkgver
from pychecker.check.experiment.exp_config import IN_THE_FIELD_ROOT
now = datetime.now()
log_name = "Detection.{}{:02d}{:02d}.log".format(now.year, now.month, now.day)
logging.basicConfig(filename=os.path.join(config.LOG_DIR, log_name),
level=logging.INFO, format=config.LOG_FORMAT)
class Detection:
def __init__(self):
self.root = IN_THE_FIELD_ROOT
self.data = self.read_data()
def read_data(self):
data_path = os.path.join(self.root, "data.json")
data = read_object_from_file(data_path)
if not data:
data = self.prepare_data(data_path)
return data
def prepare_data(self, path):
data = dict() # key: pkg#ver, val: comp_expr
pkg_count = 500 # 500 pkgs in the field
pkg_dict = read_object_from_file(os.path.join(self.root, "packages_10000.json"))
pkgver_dict = read_object_from_file(os.path.join(self.root, "package_versions.json"))
pkgs = list(pkg_dict.keys())[3010:] # pkgs in the field
count = 0
for pkg in tqdm(pkgs):
try:
latest = list(pkgver_dict[pkg].keys())[-1]
except (KeyError, IndexError):
continue
metadata = get_metadata(pkg, latest)
if not metadata:
continue
comp_expr = metadata["requires_python"]
key = f"{pkg}#{latest}"
data[key] = comp_expr
count += 1
if count >= pkg_count:
break
write_object_to_file(path, data)
return data
def detect(self, start=None, end=None):
if not start:
start = 0
if not end:
end = len(self.data.items())
detected_count = 0
for key, value in tqdm(list(self.data.items())[start:end]):
pkg, ver = key.split("#")
pyvers = set(parse_comp_expr(value, config.PY_VERSIONS))
result = check_pkgver(pkg, ver, cache_path=self.root, save_files=True)
if True not in result:
continue
# print(key, result)
logging.log(logging.INFO, f"{pkg}-{ver}, {int(result[0])}, {int(result[1])}, {int(result[2])}")
detected_count += 1
print(f"{detected_count} errors detected.")
if __name__ == '__main__':
detection = Detection()
detection.detect()
| 35.597403 | 107 | 0.619847 |
4a1ec5a0ffab785b54b0abc5d06930111343ef6a | 37 | py | Python | controller/__init__.py | NMITTutor/SDV602-2021 | 18f9bdbf6c2785b62e9af3543f989a8b5a22f3b6 | [
"MIT"
] | null | null | null | controller/__init__.py | NMITTutor/SDV602-2021 | 18f9bdbf6c2785b62e9af3543f989a8b5a22f3b6 | [
"MIT"
] | null | null | null | controller/__init__.py | NMITTutor/SDV602-2021 | 18f9bdbf6c2785b62e9af3543f989a8b5a22f3b6 | [
"MIT"
] | 1 | 2021-09-29T09:59:32.000Z | 2021-09-29T09:59:32.000Z | __all__ = ["DES","uploader","User"]
| 18.5 | 36 | 0.594595 |
4a1ec5d5368f3e2b156bb4a5bef54023f0055c6f | 1,439 | py | Python | OnlineDB/SiStripO2O/test/testPopCon/read_UnitTest_ApvGain_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 852 | 2015-01-11T21:03:51.000Z | 2022-03-25T21:14:00.000Z | OnlineDB/SiStripO2O/test/testPopCon/read_UnitTest_ApvGain_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 30,371 | 2015-01-02T00:14:40.000Z | 2022-03-31T23:26:05.000Z | OnlineDB/SiStripO2O/test/testPopCon/read_UnitTest_ApvGain_cfg.py | ckamtsikis/cmssw | ea19fe642bb7537cbf58451dcf73aa5fd1b66250 | [
"Apache-2.0"
] | 3,240 | 2015-01-02T05:53:18.000Z | 2022-03-31T17:24:21.000Z | # The following comments couldn't be translated into the new config version:
# upload to database
#string timetype = "timestamp"
import FWCore.ParameterSet.Config as cms
process = cms.Process("Reader")
process.MessageLogger = cms.Service("MessageLogger",
cerr = cms.untracked.PSet(
enable = cms.untracked.bool(False)
),
debugModules = cms.untracked.vstring(''),
files = cms.untracked.PSet(
SiStripApvGainReader = cms.untracked.PSet(
)
)
)
process.maxEvents = cms.untracked.PSet(
input = cms.untracked.int32(1)
)
process.source = cms.Source("EmptySource",
numberEventsInRun = cms.untracked.uint32(1),
firstRun = cms.untracked.uint32(1)
)
process.poolDBESSource = cms.ESSource("PoolDBESSource",
BlobStreamerName = cms.untracked.string('TBufferBlobStreamingService'),
DBParameters = cms.PSet(
messageLevel = cms.untracked.int32(2),
authenticationPath = cms.untracked.string('/afs/cern.ch/cms/DB/conddb')
),
timetype = cms.untracked.string('runnumber'),
connect = cms.string('sqlite_file:dbfile.db'),
toGet = cms.VPSet(cms.PSet(
record = cms.string('SiStripApvGainRcd'),
tag = cms.string('SiStripApvGain_test')
))
)
process.reader = cms.EDFilter("SiStripApvGainReader",
printDebug = cms.untracked.uint32(5)
)
process.p1 = cms.Path(process.reader)
| 27.150943 | 79 | 0.663655 |
4a1ec5f10eb75103d7b7edd7934f0816a20f68f7 | 7,466 | py | Python | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_5GHzGetRadioResetCount.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_5GHzGetRadioResetCount.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | testscripts/RDKB/component/WIFIHAL/TS_WIFIHAL_5GHzGetRadioResetCount.py | cablelabs/tools-tdkb | 1fd5af0f6b23ce6614a4cfcbbaec4dde430fad69 | [
"Apache-2.0"
] | null | null | null | ##########################################################################
# If not stated otherwise in this file or this component's Licenses.txt
# file the following copyright and licenses apply:
#
# Copyright 2018 RDK Management
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
##########################################################################
'''
<?xml version='1.0' encoding='utf-8'?>
<xml>
<id></id>
<!-- Do not edit id. This will be auto filled while exporting. If you are adding a new script keep the id empty -->
<version>1</version>
<!-- Do not edit version. This will be auto incremented while updating. If you are adding a new script you can keep the vresion as 1 -->
<name>TS_WIFIHAL_5GHzGetRadioResetCount</name>
<!-- If you are adding a new script you can specify the script name. Script Name should be unique same as this file name with out .py extension -->
<primitive_test_id> </primitive_test_id>
<!-- Do not change primitive_test_id if you are editing an existing script. -->
<primitive_test_name>WIFIHAL_GetOrSetParamULongValue</primitive_test_name>
<!-- -->
<primitive_test_version>2</primitive_test_version>
<!-- -->
<status>FREE</status>
<!-- -->
<synopsis>To get the radio reset count and verify that it is incremented after a reset operation for 5GHz</synopsis>
<!-- -->
<groups_id />
<!-- -->
<execution_time>1</execution_time>
<!-- -->
<long_duration>false</long_duration>
<!-- -->
<advanced_script>false</advanced_script>
<!-- execution_time is the time out time for test execution -->
<remarks></remarks>
<!-- Reason for skipping the tests if marked to skip -->
<skip>false</skip>
<!-- -->
<box_types>
<box_type>Broadband</box_type>
<!-- -->
</box_types>
<rdk_versions>
<rdk_version>RDKB</rdk_version>
<!-- -->
</rdk_versions>
<test_cases>
<test_case_id>TC_WIFIHAL_173</test_case_id>
<test_objective>To get the radio reset count and verify that it is incremented after a reset operation for 5GHz</test_objective>
<test_type>Positive</test_type>
<test_setup>Broadband</test_setup>
<pre_requisite>1.Ccsp Components should be in a running state else invoke cosa_start.sh manually that includes all the ccsp components and TDK Component
2.TDK Agent should be in running state or invoke it through StartTdk.sh script</pre_requisite>
<api_or_interface_used>wifi_getRadioResetCount()
wifi_reset()</api_or_interface_used>
<input_parameters>methodName : getRadioResetCount
RadioIndex : 1</input_parameters>
<automation_approch>1. Load wifihal module
2. Using WIFIHAL_GetOrSetParamULongValue invoke wifi_getRadioResetCount() and save the value returned.
3. Using WIFIHAL_Reset invoke wifi_reset()
4. Invoke wifi_getRadioResetCount() and check if the value returned has incremented by 1
5.Depending upon the value returned , return SUCCESS or FAILURE
6. Unload wifihal module</automation_approch>
<except_output>Should increment by 1 after reset operation</except_output>
<priority>High</priority>
<test_stub_interface>WIFIHAL</test_stub_interface>
<test_script>TS_WIFIHAL_5GHzGetRadioResetCount</test_script>
<skipped>No</skipped>
<release_version></release_version>
<remarks></remarks>
</test_cases>
</xml>
'''
# use tdklib library,which provides a wrapper for tdk testcase script
import tdklib;
from wifiUtility import *;
#Test component to be tested
obj = tdklib.TDKScriptingLibrary("wifihal","1");
#IP and Port of box, No need to change,
#This will be replaced with correspoing Box Ip and port while executing script
ip = <ipaddress>
port = <port>
obj.configureTestCase(ip,port,'TS_WIFIHAL_5GHzGetRadioResetCount');
loadmodulestatus =obj.getLoadModuleResult();
print "[LIB LOAD STATUS] : %s" %loadmodulestatus
if "SUCCESS" in loadmodulestatus.upper():
obj.setLoadModuleStatus("SUCCESS");
expectedresult="SUCCESS";
radioIndex = 1
getMethod = "getRadioResetCount"
primitive = 'WIFIHAL_GetOrSetParamULongValue'
#Calling the method to execute wifi_getRadioResetCount()
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
if expectedresult in actualresult:
initCount = details.split(":")[1].strip()
#Script to load the configuration file of the component
tdkTestObj = obj.createTestStep("WIFIHAL_Reset");
expectedresult="SUCCESS";
tdkTestObj.executeTestCase(expectedresult);
actualresult = tdkTestObj.getResult();
details = tdkTestObj.getResultDetails();
if expectedresult in actualresult:
print "Reset operation SUCCESS"
tdkTestObj.setResultStatus("SUCCESS");
expectedresult="SUCCESS";
radioIndex = 1
getMethod = "getRadioResetCount"
primitive = 'WIFIHAL_GetOrSetParamULongValue'
#Calling the method to execute wifi_getRadioResetCount()
tdkTestObj, actualresult, details = ExecuteWIFIHalCallMethod(obj, primitive, radioIndex, 0, getMethod)
if expectedresult in actualresult:
finalCount = details.split(":")[1].strip()
if int(finalCount) == int(initCount)+1:
#Set the result status of execution
tdkTestObj.setResultStatus("SUCCESS");
print "TEST STEP: Check if the ResetCount is incremented by 1 after reset operation"
print "EXPECTED RESULT : Final count should increment by 1"
print "ACTUAL RESULT : Final count is incremented by 1"
print "Initial RadioResetCount = %s" %initCount
print "RadioResetCount after reset operation = %s" %finalCount
print "TEST EXECUTION RESULT : SUCCESS"
else:
#Set the result status of execution
tdkTestObj.setResultStatus("FAILURE");
print "TEST STEP: Check if the ResetCount is incremented by 1 after reset operation"
print "EXPECTED RESULT : Final count should increment by 1"
print "ACTUAL RESULT : Final count is NOT incremented by 1"
print "Initial RadioResetCount = %s" %initCount
print "RadioResetCount after reset operation = %s" %finalCount
print "TEST EXECUTION RESULT : FAILURE"
else:
tdkTestObj.setResultStatus("FAILURE");
print "wifi_getRadioResetCount() call failed after reset operation"
else:
tdkTestObj.setResultStatus("FAILURE");
print "wifi_reset() call failed"
else:
tdkTestObj.setResultStatus("FAILURE");
print "wifi_getRadioResetCount() call failed"
obj.unloadModule("wifihal");
else:
print "Failed to load the module";
obj.setLoadModuleStatus("FAILURE");
print "Module loading failed";
| 43.917647 | 157 | 0.677605 |
4a1ec695d079e0272ffabe87d15f4faea71172b6 | 4,081 | py | Python | test/functional/feature_includeconf.py | KaSt/emircoin | 5d05003dfde81eb6cacc8505f55b2e6b816e698a | [
"MIT"
] | null | null | null | test/functional/feature_includeconf.py | KaSt/emircoin | 5d05003dfde81eb6cacc8505f55b2e6b816e698a | [
"MIT"
] | null | null | null | test/functional/feature_includeconf.py | KaSt/emircoin | 5d05003dfde81eb6cacc8505f55b2e6b816e698a | [
"MIT"
] | null | null | null | #!/usr/bin/env python3
# Copyright (c) 2018 The Bitcoin Core developers
# Distributed under the MIT software license, see the accompanying
# file COPYING or http://www.opensource.org/licenses/mit-license.php.
"""Tests the includeconf argument
Verify that:
1. adding includeconf to the configuration file causes the includeconf
file to be loaded in the correct order.
2. includeconf cannot be used as a command line argument.
3. includeconf cannot be used recursively (ie includeconf can only
be used from the base config file).
4. multiple includeconf arguments can be specified in the main config
file.
"""
import os
from test_framework.test_framework import BitcoinTestFramework
class IncludeConfTest(BitcoinTestFramework):
def set_test_params(self):
self.setup_clean_chain = False
self.num_nodes = 1
def setup_chain(self):
super().setup_chain()
# Create additional config files
# - tmpdir/node0/relative.conf
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative\n")
# - tmpdir/node0/relative2.conf
with open(os.path.join(self.options.tmpdir, "node0", "relative2.conf"), "w", encoding="utf8") as f:
f.write("uacomment=relative2\n")
with open(os.path.join(self.options.tmpdir, "node0", "emircoin.conf"), "a", encoding='utf8') as f:
f.write("uacomment=main\nincludeconf=relative.conf\n")
def run_test(self):
self.log.info("-includeconf works from config file. subversion should end with 'main; relative)/'")
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.log.info("-includeconf cannot be used as command-line arg")
self.stop_node(0)
self.nodes[0].assert_start_raises_init_error(extra_args=["-includeconf=relative2.conf"], expected_msg="Error parsing command line arguments: -includeconf cannot be used from commandline; -includeconf=relative2.conf")
self.log.info("-includeconf cannot be used recursively. subversion should end with 'main; relative)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "a", encoding="utf8") as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative)/")
self.stop_node(0, expected_stderr="warning: -includeconf cannot be used from included files; ignoring -includeconf=relative2.conf")
self.log.info("-includeconf cannot contain invalid arg")
# Commented out as long as we ignore invalid arguments in configuration files
#with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
# f.write("foo=bar\n")
#self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Invalid configuration value foo")
self.log.info("-includeconf cannot be invalid path")
os.remove(os.path.join(self.options.tmpdir, "node0", "relative.conf"))
self.nodes[0].assert_start_raises_init_error(expected_msg="Error reading configuration file: Failed to include configuration file relative.conf")
self.log.info("multiple -includeconf args can be used from the base config file. subversion should end with 'main; relative; relative2)/'")
with open(os.path.join(self.options.tmpdir, "node0", "relative.conf"), "w", encoding="utf8") as f:
# Restore initial file contents
f.write("uacomment=relative\n")
with open(os.path.join(self.options.tmpdir, "node0", "emircoin.conf"), "a", encoding='utf8') as f:
f.write("includeconf=relative2.conf\n")
self.start_node(0)
subversion = self.nodes[0].getnetworkinfo()["subversion"]
assert subversion.endswith("main; relative; relative2)/")
if __name__ == '__main__':
IncludeConfTest().main()
| 49.168675 | 224 | 0.694683 |
4a1ec69fb94ee08b0d819dd73ffadf19602b0602 | 6,193 | py | Python | python/level2_simple_inference/1_classification/googlenet_imagenet_multi_batch/src/classify.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 25 | 2020-11-20T09:01:35.000Z | 2022-03-29T10:35:38.000Z | python/level2_simple_inference/1_classification/googlenet_imagenet_multi_batch/src/classify.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 5 | 2021-02-28T20:49:37.000Z | 2022-03-04T21:50:27.000Z | python/level2_simple_inference/1_classification/googlenet_imagenet_multi_batch/src/classify.py | Ascend/samples | 5e060ddf8c502cf0e248ecbe1c8986e95351cbbd | [
"Apache-2.0"
] | 16 | 2020-12-06T07:26:13.000Z | 2022-03-01T07:51:55.000Z | import sys
import os
import numpy
import acl
path = os.path.dirname(os.path.abspath(__file__))
sys.path.append(os.path.join(path, ".."))
sys.path.append(os.path.join(path, "../../../../common/"))
sys.path.append(os.path.join(path, "../../../../common/acllite"))
from utils import check_ret
from constants import ACL_MEM_MALLOC_HUGE_FIRST, ACL_MEMCPY_DEVICE_TO_DEVICE, IMG_EXT
from acllite_imageproc import AclLiteImageProc
from acllite_model import AclLiteModel
from acllite_image import AclLiteImage
from acllite_resource import AclLiteResource
from image_net_classes import get_image_net_class
from PIL import Image, ImageDraw, ImageFont
class Classify(object):
def __init__(self, acl_resource, model_path, model_width, model_height):
self.total_buffer = None
self._model_path = model_path
self._model_width = model_width
self._model_height = model_height
self._model = AclLiteModel(model_path)
self._dvpp = AclLiteImageProc(acl_resource)
print("The App arg is __init__")
def __del__(self):
if self.total_buffer:
acl.rt.free(self.total_buffer)
if self._dvpp:
del self._dvpp
print("[Sample] class Samle release source success")
def pre_process(self, image):
yuv_image = self._dvpp.jpegd(image)
print("decode jpeg end")
resized_image = self._dvpp.resize(yuv_image,
self._model_width, self._model_height)
print("resize yuv end")
return resized_image
def batch_process(self, resized_image_list, batch):
resized_img_data_list = []
resized_img_size = resized_image_list[0].size
total_size = batch * resized_img_size
stride = 0
for resized_image in resized_image_list:
resized_img_data_list.append(resized_image.data())
self.total_buffer, ret = acl.rt.malloc(total_size, ACL_MEM_MALLOC_HUGE_FIRST)
check_ret("acl.rt.malloc", ret)
for i in range(len(resized_image_list)):
ret = acl.rt.memcpy(self.total_buffer + stride, resized_img_size,\
resized_img_data_list[i], resized_img_size,\
ACL_MEMCPY_DEVICE_TO_DEVICE)
check_ret("acl.rt.memcpy", ret)
stride += resized_img_size
return total_size
def inference(self, resized_image_list, batch):
total_size = self.batch_process(resized_image_list, batch)
batch_buffer = {'data': self.total_buffer, 'size':total_size}
return self._model.execute([batch_buffer, ])
def post_process(self, infer_output, batch_image_files, number_of_images):
print("post process")
datas = infer_output[0]
for number in range(number_of_images):
data = datas[number]
vals = data.flatten()
top_k = vals.argsort()[-1:-6:-1]
print("images:{}".format(batch_image_files[number]))
print("======== top5 inference results: =============")
for n in top_k:
object_class = get_image_net_class(n)
print("label:%d confidence: %f, class: %s" % (n, vals[n], object_class))
#Use Pillow to write the categories with the highest confidence on the image and save them locally
if len(top_k):
object_class = get_image_net_class(top_k[0])
output_path = os.path.join("../out", os.path.basename(batch_image_files[number]))
origin_img = Image.open(batch_image_files[number])
draw = ImageDraw.Draw(origin_img)
font = ImageFont.truetype("/usr/share/fonts/truetype/dejavu/DejaVuSans-Bold.ttf", size=20)
draw.text((10, 50), object_class, font=font, fill=255)
origin_img.save(output_path)
MODEL_PATH = "../model/googlenet_yuv.om"
MODEL_WIDTH = 224
MODEL_HEIGHT = 224
#Batch number to 10
BATCH = 10
def main():
"""
Program execution with picture directory parameters
"""
if (len(sys.argv) != 2):
print("The App arg is invalid")
exit(1)
acl_resource = AclLiteResource()
acl_resource.init()
#Instance classification detection, pass into the OM model storage path, model input width and height parameters
classify = Classify(acl_resource, MODEL_PATH, MODEL_WIDTH, MODEL_HEIGHT)
#From the parameters of the picture storage directory, reasoning by a picture
image_dir = sys.argv[1]
images_list = [os.path.join(image_dir, img)
for img in os.listdir(image_dir)
if os.path.splitext(img)[1] in IMG_EXT]
#Create a directory to store the inference results
if not os.path.isdir('../out'):
os.mkdir('../out')
resized_image_list = []
batch_image_files = []
num = 0
batch_amount = len(images_list) // BATCH
left = len(images_list) % BATCH
for image_file in images_list:
num += 1
#Read the pictures
image = AclLiteImage(image_file)
image_dvpp = image.copy_to_dvpp()
#preprocess image
resized_image = classify.pre_process(image_dvpp)
print("pre process end")
batch_image_files.append(image_file)
resized_image_list.append(resized_image)
if batch_amount > 0:
#Each set of BATCH pictures, reasoning and post-processing
if num == BATCH:
#Reasoning pictures
result = classify.inference(resized_image_list, BATCH)
#process inference results
classify.post_process(result, batch_image_files, BATCH)
batch_amount -= 1
num = 0
batch_image_files = []
resized_image_list = []
else:
#remaining images are inferred and post-processed
if num == left:
#Reasoning pictures
result = classify.inference(resized_image_list, BATCH)
#The inference results are processed
classify.post_process(result, batch_image_files, left)
if __name__ == '__main__':
main()
| 38.70625 | 116 | 0.633619 |
4a1ec6deda221c0cf17ca59165c7f5e673002942 | 1,970 | py | Python | Code/DrumSynth/Samples/float2mozzi_uint8.py | ctag-fh-kiel/troll-8 | 18b872b5b0290dbb0e9f514edea392601a896346 | [
"CC-BY-4.0"
] | 11 | 2017-11-01T14:47:33.000Z | 2022-01-31T09:04:44.000Z | Code/SequenceRecorder/Samples/float2mozzi_uint8.py | ctag-fh-kiel/troll-8 | 18b872b5b0290dbb0e9f514edea392601a896346 | [
"CC-BY-4.0"
] | null | null | null | Code/SequenceRecorder/Samples/float2mozzi_uint8.py | ctag-fh-kiel/troll-8 | 18b872b5b0290dbb0e9f514edea392601a896346 | [
"CC-BY-4.0"
] | 3 | 2017-11-20T17:22:12.000Z | 2021-11-08T23:23:13.000Z | ## for converting 32 bit float raw files from Audacity, with values > 0, to 0-255 uint8 Mozzi table
import sys, array, os, textwrap, math
if len(sys.argv) != 5:
print 'Usage: float2mozzi.py <infile outfile tablename samplerate>'
sys.exit(1)
[infile, outfile, tablename, samplerate] = sys.argv[1:]
def float2mozzi_uint8(infile, outfile, tablename,samplerate):
fin = open(os.path.expanduser(infile), "rb")
print "opened " + infile
valuesetad = os.path.getsize(os.path.expanduser(infile))/4 ## adjust for number format
##print valuesetad
valuesfromfile = array.array('f')## array of floats
try:
valuesfromfile.fromfile(fin,valuesetad)
finally:
fin.close()
values=valuesfromfile.tolist()
## print values[0]
## print values[len(values)-1]
## print len(values)
fout = open(os.path.expanduser(outfile), "w")
fout.write('#ifndef ' + tablename + '_H_' + '\n')
fout.write('#define ' + tablename + '_H_' + '\n \n')
fout.write('#if ARDUINO >= 100'+'\n')
fout.write('#include "Arduino.h"'+'\n')
fout.write('#else'+'\n')
fout.write('#include "WProgram.h"'+'\n')
fout.write('#endif'+'\n')
fout.write('#include <avr/pgmspace.h>'+'\n \n')
fout.write('#define ' + tablename + '_NUM_CELLS '+ str(len(values))+'\n')
fout.write('#define ' + tablename + '_SAMPLERATE '+ str(samplerate)+'\n \n')
outstring = 'const int8_t __attribute__((section(".progmem.data"))) ' + tablename + '_DATA [] = {'
try:
for num in values:
outstring += str(math.trunc((num*256)+0.5)) + ", "
## outstring += str(num) + ", "
##values.fromfile(fin, uint8_tsetad)
finally:
outstring += "};"
outstring = textwrap.fill(outstring, 80)
fout.write(outstring)
fout.write('\n \n #endif /* ' + tablename + '_H_ */\n')
fout.close()
print "wrote " + outfile
float2mozzi_uint8(infile, outfile, tablename, samplerate)
| 37.884615 | 102 | 0.613198 |
4a1ec72bedc484d284da0b1a09f2e7cac2c23d37 | 190,063 | py | Python | src/sage/combinat/root_system/root_lattice_realizations.py | nikmihale/sage | e2dcdeeabb578c37bcf0361c0be3079315e9252c | [
"BSL-1.0"
] | null | null | null | src/sage/combinat/root_system/root_lattice_realizations.py | nikmihale/sage | e2dcdeeabb578c37bcf0361c0be3079315e9252c | [
"BSL-1.0"
] | null | null | null | src/sage/combinat/root_system/root_lattice_realizations.py | nikmihale/sage | e2dcdeeabb578c37bcf0361c0be3079315e9252c | [
"BSL-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
"""
Root lattice realizations
"""
# ****************************************************************************
# Copyright (C) 2007-2013 Nicolas M. Thiery <nthiery at users.sf.net>
# 2012 Nicolas Borie <nicolas.borie at univ-mlv.fr>
#
# (with contributions of many others)
#
# Distributed under the terms of the GNU General Public License (GPL)
# https://www.gnu.org/licenses/
# ****************************************************************************
from __future__ import print_function, absolute_import
from sage.misc.abstract_method import abstract_method, AbstractMethod
from sage.misc.misc import attrcall
from sage.misc.cachefunc import cached_method, cached_in_parent_method
from sage.misc.lazy_attribute import lazy_attribute
from sage.misc.lazy_import import LazyImport
from sage.categories.coxeter_groups import CoxeterGroups
from sage.categories.category_types import Category_over_base_ring
from sage.categories.modules_with_basis import ModulesWithBasis
from sage.structure.element import Element
from sage.sets.family import Family
from sage.rings.all import ZZ, QQ
from sage.matrix.constructor import matrix
from sage.modules.free_module_element import vector
from sage.sets.recursively_enumerated_set import RecursivelyEnumeratedSet
from sage.combinat.root_system.plot import PlotOptions, barycentric_projection_matrix
from itertools import combinations_with_replacement
class RootLatticeRealizations(Category_over_base_ring):
r"""
The category of root lattice realizations over a given base ring
A *root lattice realization* `L` over a base ring `R` is a free
module (or vector space if `R` is a field) endowed with an embedding
of the root lattice of some root system.
Typical root lattice realizations over `\ZZ` include the root
lattice, weight lattice, and ambient lattice. Typical root lattice
realizations over `\QQ` include the root space, weight space, and
ambient space.
To describe the embedding, a root lattice realization must
implement a method
:meth:`~RootLatticeRealizations.ParentMethods.simple_root`
returning for each `i` in the index set the image of the simple root
`\alpha_i` under the embedding.
A root lattice realization must further implement a method on elements
:meth:`~RootLatticeRealizations.ElementMethods.scalar`, computing
the scalar product with elements of the coroot lattice or coroot space.
Using those, this category provides tools for reflections, roots,
the Weyl group and its action, ...
.. SEEALSO::
- :class:`~sage.combinat.root_system.root_system.RootSystem`
- :class:`~sage.combinat.root_system.weight_lattice_realizations.WeightLatticeRealizations`
- :class:`~sage.combinat.root_system.root_space.RootSpace`
- :class:`~sage.combinat.root_system.weight_space.WeightSpace`
- :class:`~sage.combinat.root_system.ambient_space.AmbientSpace`
EXAMPLES:
Here, we consider the root system of type `A_7`, and embed the root
lattice element `x = \alpha_2 + 2 \alpha_6` in several root lattice
realizations::
sage: R = RootSystem(["A",7])
sage: alpha = R.root_lattice().simple_roots()
sage: x = alpha[2] + 2 * alpha[5]
sage: L = R.root_space()
sage: L(x)
alpha[2] + 2*alpha[5]
sage: L = R.weight_lattice()
sage: L(x)
-Lambda[1] + 2*Lambda[2] - Lambda[3] - 2*Lambda[4] + 4*Lambda[5] - 2*Lambda[6]
sage: L = R.ambient_space()
sage: L(x)
(0, 1, -1, 0, 2, -2, 0, 0)
We embed the root space element `x = \alpha_2 + 1/2 \alpha_6` in
several root lattice realizations::
sage: alpha = R.root_space().simple_roots()
sage: x = alpha[2] + 1/2 * alpha[5]
sage: L = R.weight_space()
sage: L(x)
-Lambda[1] + 2*Lambda[2] - Lambda[3] - 1/2*Lambda[4] + Lambda[5] - 1/2*Lambda[6]
sage: L = R.ambient_space()
sage: L(x)
(0, 1, -1, 0, 1/2, -1/2, 0, 0)
Of course, one can't embed the root space in the weight lattice::
sage: L = R.weight_lattice()
sage: L(x)
Traceback (most recent call last):
...
TypeError: do not know how to make x (= alpha[2] + 1/2*alpha[5]) an element of self (=Weight lattice of the Root system of type ['A', 7])
If `K_1` is a subring of `K_2`, then one could in theory have
an embedding from the root space over `K_1` to any root
lattice realization over `K_2`; this is not implemented::
sage: K1 = QQ
sage: K2 = QQ['q']
sage: L = R.weight_space(K2)
sage: alpha = R.root_space(K2).simple_roots()
sage: L(alpha[1])
2*Lambda[1] - Lambda[2]
sage: alpha = R.root_space(K1).simple_roots()
sage: L(alpha[1])
Traceback (most recent call last):
...
TypeError: do not know how to make x (= alpha[1]) an element of self (=Weight space over the Univariate Polynomial Ring in q over Rational Field of the Root system of type ['A', 7])
By a slight abuse, the embedding of the root lattice is not actually
required to be faithful. Typically for an affine root system, the
null root of the root lattice is killed in the non extended weight
lattice::
sage: R = RootSystem(["A", 3, 1])
sage: delta = R.root_lattice().null_root()
sage: L = R.weight_lattice()
sage: L(delta)
0
TESTS::
sage: TestSuite(L).run()
"""
@cached_method
def super_categories(self):
"""
EXAMPLES::
sage: from sage.combinat.root_system.root_lattice_realizations import RootLatticeRealizations
sage: RootLatticeRealizations(QQ).super_categories()
[Category of vector spaces with basis over Rational Field]
"""
return [ModulesWithBasis(self.base_ring())]
Algebras = LazyImport('sage.combinat.root_system.root_lattice_realization_algebras', 'Algebras')
class ParentMethods:
def __init_extra__(self):
r"""
Register the embedding of the root lattice into ``self``.
Also registers the embedding of the root space over the same
base field `K` into ``self`` if `K` is not `\ZZ`.
EXAMPLES:
We embed the simple root `\alpha_2` of the root lattice in
the weight lattice::
sage: R = RootSystem(["A",3])
sage: alpha = R.root_lattice().simple_roots()
sage: L = R.weight_lattice()
sage: L(alpha[2])
-Lambda[1] + 2*Lambda[2] - Lambda[3]
.. NOTE::
More examples are given in :class:`RootLatticeRealizations`;
The embeddings are systematically tested in
:meth:`_test_root_lattice_realization`.
"""
from .root_space import RootSpace
K = self.base_ring()
# If self is the root lattice or the root space, we don't want
# to register its trivial embedding into itself. This builds
# the domains from which we want to register an embedding.
domains = []
if not isinstance(self, RootSpace) or K is not ZZ:
domains.append(self.root_system.root_lattice())
if not isinstance(self, RootSpace):
domains.append(self.root_system.root_space(K))
# Build and register the embeddings
for domain in domains:
domain.module_morphism(self.simple_root,
codomain = self
).register_as_coercion()
if self.cartan_type().is_affine():
self._to_classical.register_as_conversion()
def cartan_type(self):
"""
EXAMPLES::
sage: r = RootSystem(['A',4]).root_space()
sage: r.cartan_type()
['A', 4]
"""
return self.root_system.cartan_type()
def index_set(self):
"""
EXAMPLES::
sage: r = RootSystem(['A',4]).root_space()
sage: r.index_set()
(1, 2, 3, 4)
"""
return self.root_system.index_set()
def dynkin_diagram(self):
"""
EXAMPLES::
sage: r = RootSystem(['A',4]).root_space()
sage: r.dynkin_diagram()
O---O---O---O
1 2 3 4
A4
"""
return self.root_system.dynkin_diagram()
def _name_string_helper(self, name, capitalize=True, base_ring=True, type=True, prefix=""):
"""
EXAMPLES::
sage: r = RootSystem(['A',4]).root_space()
sage: r._name_string_helper("root")
"Root space over the Rational Field of the Root system of type ['A', 4]"
sage: r._name_string_helper("root", base_ring=False)
"Root space of the Root system of type ['A', 4]"
sage: r._name_string_helper("root", base_ring=False, type=False)
'Root space'
sage: r._name_string_helper("root", capitalize=False, base_ring=False, type=False)
'root space'
sage: r = RootSystem(['A',4]).coroot_space()
sage: r._name_string_helper("weight", prefix="extended ")
"Extended coweight space over the Rational Field of the Root system of type ['A', 4]"
"""
s = prefix
if self.root_system.dual_side:
s += "co"
s += name + " "
if self.base_ring() == ZZ:
s += "lattice "
else:
s += "space "
if base_ring:
s += "over the %s "%self.base_ring()
if type:
s += "of the "
if self.root_system.dual_side:
s += repr(self.root_system.dual)
else:
s += repr(self.root_system)
if capitalize:
s = s[:1].upper() + s[1:]
return s.strip()
def some_elements(self):
"""
Return some elements of this root lattice realization
EXAMPLES::
sage: L = RootSystem(["A",2]).weight_lattice()
sage: L.some_elements()
[2*Lambda[1] + 2*Lambda[2], 2*Lambda[1] - Lambda[2], -Lambda[1] + 2*Lambda[2], Lambda[1], Lambda[2]]
sage: L = RootSystem(["A",2]).root_lattice()
sage: L.some_elements()
[2*alpha[1] + 2*alpha[2], alpha[1], alpha[2]]
"""
result = [self.an_element()]+list(self.simple_roots())
if hasattr(self, "fundamental_weights"):
result += list(self.fundamental_weights())
return result
##########################################################################
# checks
##########################################################################
def _test_root_lattice_realization(self, **options):
"""
Runs sanity checks on this root lattice realization
- embedding of the root lattice
- embedding of the root space over the same base ring
- scalar products between simple roots and simple coroots
- ...
.. SEEALSO:: :class:`TestSuite`
EXAMPLES::
sage: RootSystem(['A',3]).root_lattice()._test_root_lattice_realization()
"""
tester = self._tester(**options)
alpha = self.simple_roots()
alphacheck = self.simple_coroots()
R = self.base_ring()
tester.assertEqual(alpha .keys(), self.index_set())
tester.assertEqual(alphacheck.keys(), self.index_set())
# Check the consistency between simple_root and simple_roots
for i in self.index_set():
tester.assertEqual(self.simple_root(i), alpha[i])
# Check the embeddings from the root lattice and the root space over the same base ring
root_lattice = self.root_system.root_lattice()
root_space = self.root_system.root_space (R)
tester.assertIsNot(self.coerce_map_from(root_lattice), None)
tester.assertIsNot(self.coerce_map_from(root_space), None)
for i in self.index_set():
# This embedding maps simple roots to simple roots
tester.assertEqual(self(root_lattice.simple_root(i)), alpha[i])
tester.assertEqual(self(root_space .simple_root(i)), alpha[i])
# Check that the scalar products match with the Dynkin diagram
dynkin_diagram = self.dynkin_diagram()
for i in self.index_set():
for j in self.index_set():
tester.assertEqual(alpha[j].scalar(alphacheck[i]), R(dynkin_diagram[i,j]))
# Check associated_coroot, if it is implemented
if not isinstance(self.element_class.associated_coroot, AbstractMethod):
for i in self.index_set():
tester.assertEqual(alpha[i].associated_coroot(), alphacheck[i])
if self.cartan_type().is_affine():
# Check that the null root is orthogonal to all coroots
# and similarly for the null coroot
nullroot = self.null_root()
nullcoroot = self.null_coroot()
special_node = self.cartan_type().special_node()
for i in alpha.keys():
tester.assertTrue(nullroot.scalar(alphacheck[i]).is_zero())
tester.assertTrue(alpha[i].scalar(nullcoroot).is_zero())
# Check the projection on the classical space
classical = self.classical()
alpha_classical = classical.alpha()
for i in alpha.keys():
if i != special_node or self.cartan_type().is_untwisted_affine():
tester.assertEqual(classical(alpha[i]), alpha_classical[i])
# Todo: add tests of highest root, roots, has_descent, ...
##########################################################################
# highest root
##########################################################################
@cached_method
def highest_root(self):
"""
Returns the highest root (for an irreducible finite root system)
EXAMPLES::
sage: RootSystem(['A',4]).ambient_space().highest_root()
(1, 0, 0, 0, -1)
sage: RootSystem(['E',6]).weight_space().highest_root()
Lambda[2]
"""
if not self.root_system.is_finite():
raise ValueError("The root system of %s is not of finite Cartan type"%self)
if not self.root_system.is_irreducible():
raise ValueError("The root system of %s is reducible"%self)
return self.a_long_simple_root().to_dominant_chamber()
@cached_method
def a_long_simple_root(self):
"""
Returns a long simple root, corresponding to the highest outgoing edge
in the Dynkin diagram.
Caveat: this may be break in affine type `A_{2n}^{(2)}`
Caveat: meaningful/broken for non irreducible?
TODO: implement CartanType.nodes_by_length as in
MuPAD-Combinat (using CartanType.symmetrizer), and use it
here.
TESTS::
sage: X=RootSystem(['A',1]).weight_space()
sage: X.a_long_simple_root()
2*Lambda[1]
sage: X=RootSystem(['A',5]).weight_space()
sage: X.a_long_simple_root()
2*Lambda[1] - Lambda[2]
"""
if self.dynkin_diagram().rank() == 1:
return self.simple_roots()[self.index_set()[0]]
longest=next(self.dynkin_diagram().edge_iterator())
for j in self.dynkin_diagram().edge_iterator():
if j[2]>longest[2]:
longest=j
return self.simple_roots()[longest[0]]
##########################################################################
# simple roots
##########################################################################
@abstract_method
def simple_root(self, i):
"""
Returns the `i^{th}` simple root.
This should be overridden by any subclass, and typically
implemented as a cached method for efficiency.
EXAMPLES::
sage: r = RootSystem(["A",3]).root_lattice()
sage: r.simple_root(1)
alpha[1]
TESTS::
sage: super(sage.combinat.root_system.root_space.RootSpace, r).simple_root(1)
Traceback (most recent call last):
...
NotImplementedError: <abstract method simple_root at ...>
"""
@cached_method
def simple_roots(self):
r"""
Returns the family `(\alpha_i)_{i\in I}` of the simple roots.
EXAMPLES::
sage: alpha = RootSystem(["A",3]).root_lattice().simple_roots()
sage: [alpha[i] for i in [1,2,3]]
[alpha[1], alpha[2], alpha[3]]
"""
if not hasattr(self,"_simple_roots"):
self._simple_roots = Family(self.index_set(), self.simple_root)
# Should we use rename to set a nice name for this family?
# self._simple_roots.rename("alpha")
# This break some doctests
return self._simple_roots
@cached_method
def alpha(self):
r"""
Returns the family `(\alpha_i)_{i\in I}` of the simple roots,
with the extra feature that, for simple irreducible root
systems, `\alpha_0` yields the opposite of the highest root.
EXAMPLES::
sage: alpha = RootSystem(["A",2]).root_lattice().alpha()
sage: alpha[1]
alpha[1]
sage: alpha[0]
-alpha[1] - alpha[2]
"""
if self.root_system.is_finite() and self.root_system.is_irreducible():
return Family(self.index_set(), self.simple_root, \
hidden_keys = [0], hidden_function = lambda i: - self.highest_root())
else:
return self.simple_roots()
@cached_method
def basic_imaginary_roots(self):
r"""
Return the basic imaginary roots of ``self``.
The basic imaginary roots `\delta` are the set of imaginary roots
in `-C^{\vee}` where `C` is the dominant chamber (i.e.,
`\langle \beta, \alpha_i^{\vee} \rangle \leq 0` for all `i \in I`).
All imaginary roots are `W`-conjugate to a simple imaginary root.
EXAMPLES::
sage: RootSystem(['A', 2]).root_lattice().basic_imaginary_roots()
()
sage: Q = RootSystem(['A', 2, 1]).root_lattice()
sage: Q.basic_imaginary_roots()
(alpha[0] + alpha[1] + alpha[2],)
sage: delta = Q.basic_imaginary_roots()[0]
sage: all(delta.scalar(Q.simple_coroot(i)) <= 0 for i in Q.index_set())
True
"""
if self.cartan_type().is_finite():
return ()
if self.cartan_type().is_affine():
return (self.null_root(),)
raise ValueError("only implemented for finite and affine types")
@cached_method
def simple_roots_tilde(self):
r"""
Return the family `(\tilde\alpha_i)_{i\in I}` of the simple roots.
INPUT:
- ``self`` -- an affine root lattice realization
The `\tilde \alpha_i` give the embedding of the root
lattice of the other affinization of the same classical
root lattice into this root lattice (space?).
This uses the fact that `\alpha_i = \tilde \alpha_i` for
`i` not a special node, and that
.. MATH::
\delta = \sum a_i \alpha_i = \sum b_i \tilde \alpha_i
EXAMPLES:
In simply laced cases, this is boring::
sage: RootSystem(["A",3, 1]).root_lattice().simple_roots_tilde()
Finite family {0: alpha[0], 1: alpha[1], 2: alpha[2], 3: alpha[3]}
This was checked by hand::
sage: RootSystem(["C",2,1]).coroot_lattice().simple_roots_tilde()
Finite family {0: alphacheck[0] - alphacheck[2], 1: alphacheck[1], 2: alphacheck[2]}
sage: RootSystem(["B",2,1]).coroot_lattice().simple_roots_tilde()
Finite family {0: alphacheck[0] - alphacheck[1], 1: alphacheck[1], 2: alphacheck[2]}
What about type BC?
"""
i0 = self.cartan_type().special_node()
I0 = self.cartan_type().classical().index_set()
other_affinization = self.cartan_type().other_affinization()
b = other_affinization.col_annihilator()
alpha = self.simple_roots()
result = { i: alpha[i] for i in I0 }
result[i0] = (self.null_root() - self.linear_combination( (alpha[i], b[i]) for i in I0))/ b[i0]
return Family(result)
##########################################################################
# roots
##########################################################################
def roots(self):
"""
Return the roots of ``self``.
EXAMPLES::
sage: RootSystem(['A',2]).ambient_lattice().roots()
[(1, -1, 0), (1, 0, -1), (0, 1, -1), (-1, 1, 0), (-1, 0, 1), (0, -1, 1)]
This matches with :wikipedia:`Root_systems`::
sage: for T in CartanType.samples(finite = True, crystallographic = True):
....: print("%s %3s %3s"%(T, len(RootSystem(T).root_lattice().roots()), len(RootSystem(T).weight_lattice().roots())))
['A', 1] 2 2
['A', 5] 30 30
['B', 1] 2 2
['B', 5] 50 50
['C', 1] 2 2
['C', 5] 50 50
['D', 2] 4 4
['D', 3] 12 12
['D', 5] 40 40
['E', 6] 72 72
['E', 7] 126 126
['E', 8] 240 240
['F', 4] 48 48
['G', 2] 12 12
.. TODO::
The result should be an enumerated set, and handle
infinite root systems.
"""
if not self.cartan_type().is_finite():
from sage.sets.disjoint_union_enumerated_sets \
import DisjointUnionEnumeratedSets
D = DisjointUnionEnumeratedSets([self.positive_roots(),
self.negative_roots()])
D.rename("All roots of type {}".format(self.cartan_type()))
return D
return list(self.positive_roots()) + list(self.negative_roots())
def short_roots(self):
"""
Return a list of the short roots of ``self``.
EXAMPLES::
sage: L = RootSystem(['B',3]).root_lattice()
sage: sorted(L.short_roots())
[-alpha[1] - alpha[2] - alpha[3],
alpha[1] + alpha[2] + alpha[3],
-alpha[2] - alpha[3],
alpha[2] + alpha[3],
-alpha[3],
alpha[3]]
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("only implemented for finite Cartan types")
return [x for x in self.roots() if x.is_short_root()]
def long_roots(self):
"""
Return a list of the long roots of ``self``.
EXAMPLES::
sage: L = RootSystem(['B',3]).root_lattice()
sage: sorted(L.long_roots())
[-alpha[1], -alpha[1] - 2*alpha[2] - 2*alpha[3],
-alpha[1] - alpha[2], -alpha[1] - alpha[2] - 2*alpha[3],
alpha[1], alpha[1] + alpha[2],
alpha[1] + alpha[2] + 2*alpha[3],
alpha[1] + 2*alpha[2] + 2*alpha[3], -alpha[2],
-alpha[2] - 2*alpha[3], alpha[2], alpha[2] + 2*alpha[3]]
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("only implemented for finite Cartan types")
return [x for x in self.roots() if x.is_long_root()]
@cached_method
def positive_roots(self, index_set=None):
r"""
Return the positive roots of ``self``.
If ``index_set`` is not ``None``, returns the positive roots of
the parabolic subsystem with simple roots in ``index_set``.
Algorithm for finite type: generate them from the simple roots by
applying successive reflections toward the positive chamber.
EXAMPLES::
sage: L = RootSystem(['A',3]).root_lattice()
sage: sorted(L.positive_roots())
[alpha[1], alpha[1] + alpha[2],
alpha[1] + alpha[2] + alpha[3], alpha[2],
alpha[2] + alpha[3], alpha[3]]
sage: sorted(L.positive_roots((1,2)))
[alpha[1], alpha[1] + alpha[2], alpha[2]]
sage: sorted(L.positive_roots(()))
[]
sage: L = RootSystem(['A',3,1]).root_lattice()
sage: PR = L.positive_roots(); PR
Disjoint union of Family (Positive real roots of type ['A', 3, 1],
Positive imaginary roots of type ['A', 3, 1])
sage: [PR.unrank(i) for i in range(10)]
[alpha[1],
alpha[2],
alpha[3],
alpha[1] + alpha[2],
alpha[2] + alpha[3],
alpha[1] + alpha[2] + alpha[3],
alpha[0] + 2*alpha[1] + alpha[2] + alpha[3],
alpha[0] + alpha[1] + 2*alpha[2] + alpha[3],
alpha[0] + alpha[1] + alpha[2] + 2*alpha[3],
alpha[0] + 2*alpha[1] + 2*alpha[2] + alpha[3]]
"""
if self.cartan_type().is_affine():
from sage.sets.disjoint_union_enumerated_sets \
import DisjointUnionEnumeratedSets
return DisjointUnionEnumeratedSets([self.positive_real_roots(),
self.positive_imaginary_roots()])
if not self.cartan_type().is_finite():
raise NotImplementedError("Only implemented for finite and"
" affine Cartan types")
if index_set is None:
index_set = tuple(self.cartan_type().index_set())
return RecursivelyEnumeratedSet([self.simple_root(i) for i in index_set],
attrcall('pred', index_set=index_set),
structure='graded', enumeration='breadth')
@cached_method
def nonparabolic_positive_roots(self, index_set = None):
r"""
Return the positive roots of ``self`` that are not in the
parabolic subsystem indicated by ``index_set``.
If ``index_set`` is None, as in :meth:`positive_roots`
it is assumed to be the entire Dynkin node set. Then the
parabolic subsystem consists of all positive roots and the
empty list is returned.
EXAMPLES::
sage: L = RootSystem(['A',3]).root_lattice()
sage: L.nonparabolic_positive_roots()
[]
sage: sorted(L.nonparabolic_positive_roots((1,2)))
[alpha[1] + alpha[2] + alpha[3], alpha[2] + alpha[3], alpha[3]]
sage: sorted(L.nonparabolic_positive_roots(()))
[alpha[1], alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3], alpha[2], alpha[2] + alpha[3], alpha[3]]
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("Only implemented for "
"finite Cartan type")
if index_set is None:
return []
return [x for x in self.positive_roots()
if not x in self.positive_roots(index_set)]
@cached_method
def nonparabolic_positive_root_sum(self, index_set=None):
r"""
Return the sum of positive roots not in a parabolic subsystem.
The conventions for ``index_set`` are as in :meth:`nonparabolic_positive_roots`.
EXAMPLES::
sage: Q = RootSystem(['A',3]).root_lattice()
sage: Q.nonparabolic_positive_root_sum((1,2))
alpha[1] + 2*alpha[2] + 3*alpha[3]
sage: Q.nonparabolic_positive_root_sum()
0
sage: Q.nonparabolic_positive_root_sum(())
3*alpha[1] + 4*alpha[2] + 3*alpha[3]
"""
return self.sum(self.nonparabolic_positive_roots(index_set))
def positive_real_roots(self):
"""
Return the positive real roots of ``self``.
EXAMPLES::
sage: L = RootSystem(['A',3]).root_lattice()
sage: sorted(L.positive_real_roots())
[alpha[1], alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3],
alpha[2], alpha[2] + alpha[3], alpha[3]]
sage: L = RootSystem(['A',3,1]).root_lattice()
sage: PRR = L.positive_real_roots(); PRR
Positive real roots of type ['A', 3, 1]
sage: [PRR.unrank(i) for i in range(10)]
[alpha[1],
alpha[2],
alpha[3],
alpha[1] + alpha[2],
alpha[2] + alpha[3],
alpha[1] + alpha[2] + alpha[3],
alpha[0] + 2*alpha[1] + alpha[2] + alpha[3],
alpha[0] + alpha[1] + 2*alpha[2] + alpha[3],
alpha[0] + alpha[1] + alpha[2] + 2*alpha[3],
alpha[0] + 2*alpha[1] + 2*alpha[2] + alpha[3]]
sage: Q = RootSystem(['A',4,2]).root_lattice()
sage: PR = Q.positive_roots()
sage: [PR.unrank(i) for i in range(5)]
[alpha[1],
alpha[2],
alpha[1] + alpha[2],
2*alpha[1] + alpha[2],
alpha[0] + alpha[1] + alpha[2]]
sage: Q = RootSystem(['D',3,2]).root_lattice()
sage: PR = Q.positive_roots()
sage: [PR.unrank(i) for i in range(5)]
[alpha[1],
alpha[2],
alpha[1] + 2*alpha[2],
alpha[1] + alpha[2],
alpha[0] + alpha[1] + 2*alpha[2]]
"""
if self.cartan_type().is_finite():
return tuple(RecursivelyEnumeratedSet(self.simple_roots(),
attrcall('pred'), structure='graded',
enumeration='breadth'))
if not self.cartan_type().is_affine():
raise NotImplementedError("only implemented for finite and affine Cartan types")
from sage.categories.cartesian_product import cartesian_product
from sage.combinat.root_system.root_system import RootSystem
from sage.sets.positive_integers import PositiveIntegers
from sage.sets.disjoint_union_enumerated_sets import DisjointUnionEnumeratedSets
Q = RootSystem(self.cartan_type().classical()).root_space(self.base_ring())
# Start with the classical positive roots
alpha = self.simple_roots()
def lift(x):
"""
Lift up the classical element into ``self``.
"""
return self.sum(c*alpha[i] for i,c in x)
P = Family(Q.positive_real_roots(), lift)
# Add all of the delta shifts
delta = self.null_root()
if self.cartan_type().is_untwisted_affine():
C = cartesian_product([PositiveIntegers(), Q.roots()])
F = Family(C, lambda x: lift(x[1]) + x[0]*delta)
D = DisjointUnionEnumeratedSets([P, F])
elif self.cartan_type().type() == 'BC' or self.cartan_type().dual().type() == 'BC':
Cs = cartesian_product([PositiveIntegers(), Q.short_roots()])
Cl = cartesian_product([PositiveIntegers(), Q.long_roots()])
Fs = Family(Cl, lambda x: (lift(x[1]) + (2*x[0]-1)*delta) / 2)
Fm = Family(Cs, lambda x: lift(x[1]) + x[0]*delta)
Fl = Family(Cl, lambda x: lift(x[1]) + 2*x[0]*delta)
D = DisjointUnionEnumeratedSets([P, Fs, Fm, Fl])
else: # Other twisted types
Cs = cartesian_product([PositiveIntegers(), Q.short_roots()])
Cl = cartesian_product([PositiveIntegers(), Q.long_roots()])
Fs = Family(Cs, lambda x: lift(x[1]) + x[0]*delta)
if self.cartan_type().dual() == 'G': # D_4^3
k = 3
else:
k = 2
Fl = Family(Cl, lambda x: lift(x[1]) + x[0]*k*delta)
D = DisjointUnionEnumeratedSets([P, Fs, Fl])
# Return the final union
D.rename("Positive real roots of type {}".format(self.cartan_type()))
return D
def positive_imaginary_roots(self):
"""
Return the positive imaginary roots of ``self``.
EXAMPLES::
sage: L = RootSystem(['A',3]).root_lattice()
sage: L.positive_imaginary_roots()
()
sage: L = RootSystem(['A',3,1]).root_lattice()
sage: PIR = L.positive_imaginary_roots(); PIR
Positive imaginary roots of type ['A', 3, 1]
sage: [PIR.unrank(i) for i in range(5)]
[alpha[0] + alpha[1] + alpha[2] + alpha[3],
2*alpha[0] + 2*alpha[1] + 2*alpha[2] + 2*alpha[3],
3*alpha[0] + 3*alpha[1] + 3*alpha[2] + 3*alpha[3],
4*alpha[0] + 4*alpha[1] + 4*alpha[2] + 4*alpha[3],
5*alpha[0] + 5*alpha[1] + 5*alpha[2] + 5*alpha[3]]
"""
if self.cartan_type().is_finite():
return ()
if not self.cartan_type().is_affine():
raise NotImplementedError("only implemented for finite and affine Cartan types")
from sage.sets.positive_integers import PositiveIntegers
delta = self.null_root()
F = Family(PositiveIntegers(), lambda x: x*delta)
F.rename("Positive imaginary roots of type {}".format(self.cartan_type()))
return F
@cached_method
def positive_roots_by_height(self, increasing = True):
r"""
Returns a list of positive roots in increasing order by height.
If ``increasing`` is False, returns them in decreasing order.
.. warning::
Raise an error if the Cartan type is not finite.
EXAMPLES::
sage: L = RootSystem(['C',2]).root_lattice()
sage: L.positive_roots_by_height()
[alpha[2], alpha[1], alpha[1] + alpha[2], 2*alpha[1] + alpha[2]]
sage: L.positive_roots_by_height(increasing = False)
[2*alpha[1] + alpha[2], alpha[1] + alpha[2], alpha[2], alpha[1]]
sage: L = RootSystem(['A',2,1]).root_lattice()
sage: L.positive_roots_by_height()
Traceback (most recent call last):
...
NotImplementedError: Only implemented for finite Cartan type
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("Only implemented for finite Cartan type")
ranks = self.root_poset().level_sets()
if not increasing:
ranks.reverse()
roots = []
for x in ranks:
roots += x
return [x.element for x in roots]
@cached_method
def positive_roots_parabolic(self, index_set = None):
r"""
Return the set of positive roots for the parabolic subsystem with Dynkin node set ``index_set``.
INPUT:
- ``index_set`` -- (default:None) the Dynkin node set of the parabolic subsystem. It should be a tuple. The default value implies the entire Dynkin node set
EXAMPLES::
sage: lattice = RootSystem(['A',3]).root_lattice()
sage: sorted(lattice.positive_roots_parabolic((1,3)), key=str)
[alpha[1], alpha[3]]
sage: sorted(lattice.positive_roots_parabolic((2,3)), key=str)
[alpha[2], alpha[2] + alpha[3], alpha[3]]
sage: sorted(lattice.positive_roots_parabolic(), key=str)
[alpha[1], alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3], alpha[2], alpha[2] + alpha[3], alpha[3]]
.. WARNING::
This returns an error if the Cartan type is not finite.
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("Only implemented for finite Cartan type")
if index_set is None:
index_set = tuple(self.cartan_type().index_set())
def parabolic_covers(alpha):
return [x for x in alpha.pred() if x.is_parabolic_root(index_set)]
generators = [x for x in self.simple_roots() if x.is_parabolic_root(index_set)]
return RecursivelyEnumeratedSet(generators, parabolic_covers,
structure='graded', enumeration='breadth')
@cached_method
def positive_roots_nonparabolic(self, index_set = None):
r"""
Returns the set of positive roots outside the parabolic subsystem with Dynkin node set ``index_set``.
INPUT:
- ``index_set`` -- (default:None) the Dynkin node set of the parabolic subsystem. It should be a tuple. The default value implies the entire Dynkin node set
EXAMPLES::
sage: lattice = RootSystem(['A',3]).root_lattice()
sage: sorted(lattice.positive_roots_nonparabolic((1,3)), key=str)
[alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3], alpha[2], alpha[2] + alpha[3]]
sage: sorted(lattice.positive_roots_nonparabolic((2,3)), key=str)
[alpha[1], alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3]]
sage: lattice.positive_roots_nonparabolic()
[]
sage: lattice.positive_roots_nonparabolic((1,2,3))
[]
.. WARNING::
This returns an error if the Cartan type is not finite.
"""
if not self.cartan_type().is_finite():
raise NotImplementedError("Only implemented for finite Cartan type")
if index_set is None:
index_set = tuple(self.cartan_type().index_set())
return [x for x in self.positive_roots() if not x.is_parabolic_root(index_set)]
@cached_method
def positive_roots_nonparabolic_sum(self, index_set = None):
r"""
Returns the sum of positive roots outside the parabolic subsystem with Dynkin node set ``index_set``.
INPUT:
- ``index_set`` -- (default:None) the Dynkin node set of the parabolic subsystem. It should be a tuple. The default value implies the entire Dynkin node set
EXAMPLES::
sage: lattice = RootSystem(['A',3]).root_lattice()
sage: lattice.positive_roots_nonparabolic_sum((1,3))
2*alpha[1] + 4*alpha[2] + 2*alpha[3]
sage: lattice.positive_roots_nonparabolic_sum((2,3))
3*alpha[1] + 2*alpha[2] + alpha[3]
sage: lattice.positive_roots_nonparabolic_sum(())
3*alpha[1] + 4*alpha[2] + 3*alpha[3]
sage: lattice.positive_roots_nonparabolic_sum()
0
sage: lattice.positive_roots_nonparabolic_sum((1,2,3))
0
.. WARNING::
This returns an error if the Cartan type is not finite.
"""
if not self.cartan_type().is_finite():
raise ValueError("Cartan type %s is not finite"%(self.cartan_type()))
if index_set is None or index_set == tuple(self.cartan_type().index_set()):
return self.zero()
return sum(self.positive_roots_nonparabolic(index_set))
def root_poset(self, restricted=False, facade=False):
r"""
Returns the (restricted) root poset associated to ``self``.
The elements are given by the positive roots (resp. non-simple, positive roots), and
`\alpha \leq \beta` iff `\beta - \alpha` is a non-negative linear combination of simple roots.
INPUT:
- ``restricted`` -- (default:False) if True, only non-simple roots are considered.
- ``facade`` -- (default:False) passes facade option to the poset generator.
EXAMPLES::
sage: Phi = RootSystem(['A',1]).root_poset(); Phi
Finite poset containing 1 elements
sage: Phi.cover_relations()
[]
sage: Phi = RootSystem(['A',2]).root_poset(); Phi
Finite poset containing 3 elements
sage: sorted(Phi.cover_relations(), key=str)
[[alpha[1], alpha[1] + alpha[2]], [alpha[2], alpha[1] + alpha[2]]]
sage: Phi = RootSystem(['A',3]).root_poset(restricted=True); Phi
Finite poset containing 3 elements
sage: sorted(Phi.cover_relations(), key=str)
[[alpha[1] + alpha[2], alpha[1] + alpha[2] + alpha[3]], [alpha[2] + alpha[3], alpha[1] + alpha[2] + alpha[3]]]
sage: Phi = RootSystem(['B',2]).root_poset(); Phi
Finite poset containing 4 elements
sage: sorted(Phi.cover_relations(), key=str)
[[alpha[1] + alpha[2], alpha[1] + 2*alpha[2]],
[alpha[1], alpha[1] + alpha[2]],
[alpha[2], alpha[1] + alpha[2]]]
TESTS:
Check that :trac:`17982` is fixed::
sage: RootSystem(['A', 2]).ambient_space().root_poset()
Finite poset containing 3 elements
"""
from sage.combinat.posets.posets import Poset
rels = []
pos_roots = set(self.positive_roots())
simple_roots = self.simple_roots()
if restricted:
pos_roots = [beta for beta in pos_roots if beta not in simple_roots]
for root in pos_roots:
for simple_root in simple_roots:
root_cover = root + simple_root
if root_cover in pos_roots:
rels.append((root, root_cover))
return Poset((pos_roots, rels), cover_relations=True, facade=facade)
def nonnesting_partition_lattice(self, facade=False):
r"""
Return the lattice of nonnesting partitions
This is the lattice of order ideals of the root poset.
This has been defined by Postnikov, see Remark 2 in [Reiner97]_.
.. SEEALSO::
:meth:`generalized_nonnesting_partition_lattice`, :meth:`root_poset`
EXAMPLES::
sage: R = RootSystem(['A', 3])
sage: RS = R.root_lattice()
sage: P = RS.nonnesting_partition_lattice(); P
Finite lattice containing 14 elements
sage: P.coxeter_transformation()**10 == 1
True
sage: R = RootSystem(['B', 3])
sage: RS = R.root_lattice()
sage: P = RS.nonnesting_partition_lattice(); P
Finite lattice containing 20 elements
sage: P.coxeter_transformation()**7 == 1
True
REFERENCES:
.. [Reiner97] Victor Reiner. *Non-crossing partitions for
classical reflection groups*. Discrete Mathematics 177 (1997)
.. [Arm06] Drew Armstrong. *Generalized Noncrossing Partitions and
Combinatorics of Coxeter Groups*. :arxiv:`math/0611106`
"""
return self.root_poset(facade=facade).order_ideals_lattice(facade=facade)
def generalized_nonnesting_partition_lattice(self, m, facade=False):
r"""
Return the lattice of `m`-nonnesting partitions
This has been defined by Athanasiadis, see chapter 5 of [Arm06]_.
INPUT:
- `m` -- integer
.. SEEALSO::
:meth:`nonnesting_partition_lattice`
EXAMPLES::
sage: R = RootSystem(['A', 2])
sage: RS = R.root_lattice()
sage: P = RS.generalized_nonnesting_partition_lattice(2); P
Finite lattice containing 12 elements
sage: P.coxeter_transformation()**20 == 1
True
"""
Phi_plus = self.positive_roots()
L = self.nonnesting_partition_lattice(facade=True)
chains = [chain for chain in L.chains().list() if len(chain) <= m]
multichains = []
for chain in chains:
for multilist in combinations_with_replacement(list(range(len(chain))), m):
if len(set(multilist)) == len(chain):
multichains.append(tuple([chain[i] for i in multilist]))
def is_saturated_chain(chain):
for i in range(1, m + 1):
for j in range(1, m - i + 1):
for alpha in chain[i - 1]:
for beta in chain[j - 1]:
gamma = alpha + beta
if gamma in Phi_plus and gamma not in chain[i+j-1]:
return False
cochain = [[beta for beta in Phi_plus if beta not in ideal]
for ideal in chain]
for i in range(1, m + 1):
for j in range(1, m + 1):
for alpha in cochain[i - 1]:
for beta in cochain[j - 1]:
gamma = alpha + beta
if gamma in Phi_plus and gamma not in cochain[min(m - 1, i + j - 1)]:
return False
return True
def is_componentwise_subset(chain1, chain2):
return all(chain1[i].issubset(chain2[i])
for i in range(len(chain1)))
from sage.combinat.posets.lattices import LatticePoset
saturated_chains = [multichain for multichain in multichains
if is_saturated_chain(multichain)]
return LatticePoset((saturated_chains, is_componentwise_subset),
facade=facade)
def almost_positive_roots(self):
r"""
Returns the almost positive roots of ``self``
These are the positive roots together with the simple negative roots.
.. SEEALSO:: :meth:`almost_positive_root_decomposition`, :meth:`tau_plus_minus`
EXAMPLES::
sage: L = RootSystem(['A',2]).root_lattice()
sage: L.almost_positive_roots()
[-alpha[1], alpha[1], alpha[1] + alpha[2], -alpha[2], alpha[2]]
"""
if not self.cartan_type().is_finite():
raise ValueError("%s is not a finite Cartan type"%(self.cartan_type()))
return sorted([ -beta for beta in self.simple_roots() ] + list(self.positive_roots()))
def negative_roots(self):
r"""
Returns the negative roots of self.
EXAMPLES::
sage: L = RootSystem(['A', 2]).weight_lattice()
sage: sorted(L.negative_roots())
[-2*Lambda[1] + Lambda[2], -Lambda[1] - Lambda[2], Lambda[1] - 2*Lambda[2]]
Algorithm: negate the positive roots
"""
if not self.cartan_type().is_finite():
raise ValueError("%s is not a finite Cartan type" % self.cartan_type())
return self.positive_roots().map(attrcall('__neg__'))
##########################################################################
# coroots
##########################################################################
def coroot_lattice(self):
"""
Returns the coroot lattice.
EXAMPLES::
sage: RootSystem(['A',2]).root_lattice().coroot_lattice()
Coroot lattice of the Root system of type ['A', 2]
"""
return self.root_system.coroot_lattice()
def coroot_space(self, base_ring = QQ):
r"""
Return the coroot space over ``base_ring``.
INPUT:
- ``base_ring`` -- a ring (default: `\QQ`)
EXAMPLES::
sage: RootSystem(['A',2]).root_lattice().coroot_space()
Coroot space over the Rational Field of the Root system of type ['A', 2]
sage: RootSystem(['A',2]).root_lattice().coroot_space(QQ['q'])
Coroot space over the Univariate Polynomial Ring in q over Rational Field of the Root system of type ['A', 2]
"""
return self.root_system.coroot_space(base_ring = base_ring)
def simple_coroot(self, i):
"""
Returns the `i^{th}` simple coroot.
EXAMPLES::
sage: RootSystem(['A',2]).root_lattice().simple_coroot(1)
alphacheck[1]
"""
return self.coroot_lattice().simple_root(i)
@cached_method
def simple_coroots(self):
r"""
Returns the family `( \alpha^\vee_i)_{i\in I}` of the simple coroots.
EXAMPLES::
sage: alphacheck = RootSystem(['A',3]).root_lattice().simple_coroots()
sage: [alphacheck[i] for i in [1, 2, 3]]
[alphacheck[1], alphacheck[2], alphacheck[3]]
"""
if not hasattr(self,"cache_simple_coroots"):
self.cache_simple_coroots = Family(self.index_set(), self.simple_coroot)
# Should we use rename to set a nice name for this family?
# self.cache_simple_coroots.rename("alphacheck")
# break some doctests
return self.cache_simple_coroots
def alphacheck(self):
r"""
Returns the family `( \alpha^\vee_i)_{i\in I}` of the simple
coroots, with the extra feature that, for simple irreducible
root systems, `\alpha^\vee_0` yields the coroot associated to
the opposite of the highest root (caveat: for non simply laced
root systems, this is not the opposite of the highest coroot!)
EXAMPLES::
sage: alphacheck = RootSystem(["A",2]).ambient_space().alphacheck()
sage: alphacheck
Finite family {1: (1, -1, 0), 2: (0, 1, -1)}
Here is now `\alpha^\vee_0`:
(-1, 0, 1)
.. todo:: add a non simply laced example
Finaly, here is an affine example::
sage: RootSystem(["A",2,1]).weight_space().alphacheck()
Finite family {0: alphacheck[0], 1: alphacheck[1], 2: alphacheck[2]}
sage: RootSystem(["A",3]).ambient_space().alphacheck()
Finite family {1: (1, -1, 0, 0), 2: (0, 1, -1, 0), 3: (0, 0, 1, -1)}
"""
if self.root_system.is_finite() and self.root_system.is_irreducible():
return Family(self.index_set(), self.simple_coroot, \
hidden_keys = [0], hidden_function = lambda i: - self.cohighest_root())
else:
return self.simple_coroots()
@cached_method
def cohighest_root(self):
"""
Returns the associated coroot of the highest root.
.. note:: this is usually not the highest coroot.
EXAMPLES::
sage: RootSystem(['A', 3]).ambient_space().cohighest_root()
(1, 0, 0, -1)
"""
return self.highest_root().associated_coroot()
##########################################################################
# null_root
##########################################################################
@cached_method
def null_root(self):
"""
Returns the null root of self. The null root is the smallest
non trivial positive root which is orthogonal to all simple
coroots. It exists for any affine root system.
EXAMPLES::
sage: RootSystem(['C',2,1]).root_lattice().null_root()
alpha[0] + 2*alpha[1] + alpha[2]
sage: RootSystem(['D',4,1]).root_lattice().null_root()
alpha[0] + alpha[1] + 2*alpha[2] + alpha[3] + alpha[4]
sage: RootSystem(['F',4,1]).root_lattice().null_root()
alpha[0] + 2*alpha[1] + 3*alpha[2] + 4*alpha[3] + 2*alpha[4]
"""
if self.cartan_type().is_affine():
coef = self.cartan_type().a()
return sum(coef[k]*self.simple_roots()[k] for k in coef.keys())
##########################################################################
# null_coroot (Also called CanonicalCentralElement)
##########################################################################
@cached_method
def null_coroot(self):
"""
Returns the null coroot of self.
The null coroot is the smallest non trivial positive
coroot which is orthogonal to all simple roots. It exists
for any affine root system.
EXAMPLES::
sage: RootSystem(['C',2,1]).root_lattice().null_coroot()
alphacheck[0] + alphacheck[1] + alphacheck[2]
sage: RootSystem(['D',4,1]).root_lattice().null_coroot()
alphacheck[0] + alphacheck[1] + 2*alphacheck[2] + alphacheck[3] + alphacheck[4]
sage: RootSystem(['F',4,1]).root_lattice().null_coroot()
alphacheck[0] + 2*alphacheck[1] + 3*alphacheck[2] + 2*alphacheck[3] + alphacheck[4]
"""
if not self.cartan_type().is_affine():
raise ValueError("%s is not an affine Cartan type"%(self.cartan_type()))
coef = self.cartan_type().acheck()
return sum(coef[k]*self.simple_coroots()[k] for k in coef.keys())
##########################################################################
# fundamental weights
##########################################################################
def fundamental_weights_from_simple_roots(self):
r"""
Return the fundamental weights.
This is computed from the simple roots by using the
inverse of the Cartan matrix. This method is therefore
only valid for finite types and if this realization of the
root lattice is large enough to contain them.
EXAMPLES:
In the root space, we retrieve the inverse of the Cartan matrix::
sage: L = RootSystem(["B",3]).root_space()
sage: L.fundamental_weights_from_simple_roots()
Finite family {1: alpha[1] + alpha[2] + alpha[3],
2: alpha[1] + 2*alpha[2] + 2*alpha[3],
3: 1/2*alpha[1] + alpha[2] + 3/2*alpha[3]}
sage: ~L.cartan_type().cartan_matrix()
[ 1 1 1/2]
[ 1 2 1]
[ 1 2 3/2]
In the weight lattice and the ambient space, we retrieve
the fundamental weights::
sage: L = RootSystem(["B",3]).weight_lattice()
sage: L.fundamental_weights_from_simple_roots()
Finite family {1: Lambda[1], 2: Lambda[2], 3: Lambda[3]}
sage: L = RootSystem(["B",3]).ambient_space()
sage: L.fundamental_weights()
Finite family {1: (1, 0, 0), 2: (1, 1, 0), 3: (1/2, 1/2, 1/2)}
sage: L.fundamental_weights_from_simple_roots()
Finite family {1: (1, 0, 0), 2: (1, 1, 0), 3: (1/2, 1/2, 1/2)}
However the fundamental weights do not belong to the root
lattice::
sage: L = RootSystem(["B",3]).root_lattice()
sage: L.fundamental_weights_from_simple_roots()
Traceback (most recent call last):
...
ValueError: The fundamental weights do not live in this realization of the root lattice
Beware of the usual `GL_n` vs `SL_n` catch in type `A`::
sage: L = RootSystem(["A",3]).ambient_space()
sage: L.fundamental_weights()
Finite family {1: (1, 0, 0, 0), 2: (1, 1, 0, 0), 3: (1, 1, 1, 0)}
sage: L.fundamental_weights_from_simple_roots()
Finite family {1: (3/4, -1/4, -1/4, -1/4), 2: (1/2, 1/2, -1/2, -1/2), 3: (1/4, 1/4, 1/4, -3/4)}
sage: L = RootSystem(["A",3]).ambient_lattice()
sage: L.fundamental_weights_from_simple_roots()
Traceback (most recent call last):
...
ValueError: The fundamental weights do not live in this realization of the root lattice
"""
# We first scale the inverse of the Cartan matrix to be
# with integer coefficients; then the linear combination
# of the simple roots is guaranteed to live in this space,
# and then we rely on division by d to fail gracefuly.
M = self.cartan_type().cartan_matrix()
d = M.det()
if not d:
raise TypeError("The Cartan matrix is not invertible")
M = d*~M
fundamental_weights = [self.linear_combination(zip(self.simple_roots(), column))
for column in M.columns()]
try:
fundamental_weights = [x/d for x in fundamental_weights]
except ValueError:
raise ValueError("The fundamental weights do not live in this realization of the root lattice")
return Family(dict(zip(self.index_set(),fundamental_weights)))
##########################################################################
# reflections
##########################################################################
def reflection(self, root, coroot=None):
"""
Returns the reflection along the root, and across the
hyperplane define by coroot, as a function from
self to self.
EXAMPLES::
sage: space = RootSystem(['A',2]).weight_lattice()
sage: x=space.simple_roots()[1]
sage: y=space.simple_coroots()[1]
sage: s = space.reflection(x,y)
sage: x
2*Lambda[1] - Lambda[2]
sage: s(x)
-2*Lambda[1] + Lambda[2]
sage: s(-x)
2*Lambda[1] - Lambda[2]
"""
if coroot is None:
coroot = root.associated_coroot()
return lambda v: v - v.scalar(coroot) * root
@cached_method
def simple_reflection(self, i):
"""
Returns the `i^{th}` simple reflection, as a function from
self to self.
INPUT:
- ``i`` - i is in self's index set
EXAMPLES::
sage: space = RootSystem(['A',2]).ambient_lattice()
sage: s = space.simple_reflection(1)
sage: x = space.simple_roots()[1]
sage: x
(1, -1, 0)
sage: s(x)
(-1, 1, 0)
"""
return self.reflection(self.simple_root(i), self.simple_coroot(i))
@cached_method
def simple_reflections(self):
r"""
Return the family `(s_i)_{i\in I}` of the simple reflections
of this root system.
EXAMPLES::
sage: r = RootSystem(["A", 2]).root_lattice()
sage: s = r.simple_reflections()
sage: s[1]( r.simple_root(1) )
-alpha[1]
TESTS::
sage: s
simple reflections
"""
res = self.alpha().zip(self.reflection, self.alphacheck())
# Should we use rename to set a nice name for this family?
res.rename("simple reflections")
return res
s = simple_reflections
##########################################################################
# projections
##########################################################################
def projection(self, root, coroot=None, to_negative=True):
r"""
Returns the projection along the root, and across the
hyperplane define by coroot, as a function `\pi` from self to
self. `\pi` is a half-linear map which stabilizes the negative
half space, and acts by reflection on the positive half space.
If to_negative is False, then this project onto the positive
half space instead.
EXAMPLES::
sage: space = RootSystem(['A',2]).weight_lattice()
sage: x=space.simple_roots()[1]
sage: y=space.simple_coroots()[1]
sage: pi = space.projection(x,y)
sage: x
2*Lambda[1] - Lambda[2]
sage: pi(x)
-2*Lambda[1] + Lambda[2]
sage: pi(-x)
-2*Lambda[1] + Lambda[2]
sage: pi = space.projection(x,y,False)
sage: pi(-x)
2*Lambda[1] - Lambda[2]
"""
if coroot is None:
coroot = root.associated_coroot()
return lambda v: v - v.scalar(coroot) * root if ((v.scalar(coroot) > 0) == to_negative) else v
@cached_method
def simple_projection(self, i, to_negative=True):
"""
Returns the projection along the `i^{th}` simple root, and across the
hyperplane define by the `i^{th}` simple coroot, as a function from
self to self.
INPUT:
- ``i`` - i is in self's index set
EXAMPLES::
sage: space = RootSystem(['A',2]).weight_lattice()
sage: x = space.simple_roots()[1]
sage: pi = space.simple_projection(1)
sage: x
2*Lambda[1] - Lambda[2]
sage: pi(x)
-2*Lambda[1] + Lambda[2]
sage: pi(-x)
-2*Lambda[1] + Lambda[2]
sage: pi = space.simple_projection(1,False)
sage: pi(-x)
2*Lambda[1] - Lambda[2]
"""
return self.projection(self.simple_root(i), self.simple_coroot(i), to_negative)
@cached_method
def simple_projections(self, to_negative=True):
r"""
Returns the family `(s_i)_{i\in I}` of the simple projections
of this root system
EXAMPLES::
sage: space = RootSystem(['A',2]).weight_lattice()
sage: pi = space.simple_projections()
sage: x = space.simple_roots()
sage: pi[1](x[2])
-Lambda[1] + 2*Lambda[2]
TESTS::
sage: pi
pi
"""
if to_negative is not True:
raise NotImplementedError("only implemented when 'to_negative' is True")
res = self.alpha().zip(self.projection, self.alphacheck())
# Should this use rename to set a nice name for this family?
res.rename("pi")
return res
##########################################################################
# Weyl group
##########################################################################
def weyl_group(self, prefix=None):
"""
Returns the Weyl group associated to self.
EXAMPLES::
sage: RootSystem(['F',4]).ambient_space().weyl_group()
Weyl Group of type ['F', 4] (as a matrix group acting on the ambient space)
sage: RootSystem(['F',4]).root_space().weyl_group()
Weyl Group of type ['F', 4] (as a matrix group acting on the root space)
"""
from sage.combinat.root_system.weyl_group import WeylGroup
return WeylGroup(self, prefix=prefix)
##########################################################################
# The piecewise linear involutive operators tau_plus and tau_minus on self,
# and the orbit decomposition of the almost positive roots
# by the associated dihedral group
##########################################################################
# TODO: find a better name; at least, this temporary one won't
# create conflicts
def tau_epsilon_operator_on_almost_positive_roots(self, J):
r"""
The `\tau_\epsilon` operator on almost positive roots
Given a subset `J` of non adjacent vertices of the Dynkin
diagram, this constructs the operator on the almost positive
roots which fixes the negative simple roots `\alpha_i` for `i`
not in `J`, and acts otherwise by:
.. MATH::
\tau_+( \beta ) = (\prod_{i \in J} s_i) (\beta)
See Equation (1.2) of [CFZ2002]_.
EXAMPLES::
sage: L = RootSystem(['A',4]).root_lattice()
sage: tau = L.tau_epsilon_operator_on_almost_positive_roots([1,3])
sage: alpha = L.simple_roots()
The action on a negative simple root not in `J`::
sage: tau(-alpha[2])
-alpha[2]
The action on a negative simple root in `J`::
sage: tau(-alpha[1])
alpha[1]
The action on all almost positive roots::
sage: for root in L.almost_positive_roots():
....: print('tau({:<41}) = {}'.format(str(root), tau(root)))
tau(-alpha[1] ) = alpha[1]
tau(alpha[1] ) = -alpha[1]
tau(alpha[1] + alpha[2] ) = alpha[2] + alpha[3]
tau(alpha[1] + alpha[2] + alpha[3] ) = alpha[2]
tau(alpha[1] + alpha[2] + alpha[3] + alpha[4]) = alpha[2] + alpha[3] + alpha[4]
tau(-alpha[2] ) = -alpha[2]
tau(alpha[2] ) = alpha[1] + alpha[2] + alpha[3]
tau(alpha[2] + alpha[3] ) = alpha[1] + alpha[2]
tau(alpha[2] + alpha[3] + alpha[4] ) = alpha[1] + alpha[2] + alpha[3] + alpha[4]
tau(-alpha[3] ) = alpha[3]
tau(alpha[3] ) = -alpha[3]
tau(alpha[3] + alpha[4] ) = alpha[4]
tau(-alpha[4] ) = -alpha[4]
tau(alpha[4] ) = alpha[3] + alpha[4]
This method works on any root lattice realization::
sage: L = RootSystem(['B',3]).ambient_space()
sage: tau = L.tau_epsilon_operator_on_almost_positive_roots([1,3])
sage: for root in L.almost_positive_roots():
....: print('tau({:<41}) = {}'.format(str(root), tau(root)))
tau((-1, 1, 0) ) = (1, -1, 0)
tau((1, 0, 0) ) = (0, 1, 0)
tau((1, -1, 0) ) = (-1, 1, 0)
tau((1, 1, 0) ) = (1, 1, 0)
tau((1, 0, -1) ) = (0, 1, 1)
tau((1, 0, 1) ) = (0, 1, -1)
tau((0, -1, 1) ) = (0, -1, 1)
tau((0, 1, 0) ) = (1, 0, 0)
tau((0, 1, -1) ) = (1, 0, 1)
tau((0, 1, 1) ) = (1, 0, -1)
tau((0, 0, -1) ) = (0, 0, 1)
tau((0, 0, 1) ) = (0, 0, -1)
.. SEEALSO:: :meth:`tau_plus_minus`
"""
W = self.weyl_group()
t = W.from_reduced_word(J)
simple_roots = self.simple_roots()
other_negative_simple_roots = set(-simple_roots[i] for i in self.index_set() if i not in J)
def tau_epsilon(alpha):
if alpha in other_negative_simple_roots:
return alpha
else:
return t.action(alpha)
return tau_epsilon
def tau_plus_minus(self):
r"""
Returns the `\tau^+` and `\tau^-` piecewise linear operators on ``self``
Those operators are induced by the bipartition `\{L,R\}` of
the simple roots of ``self``, and stabilize the almost
positive roots. Namely, `\tau_+` fixes the negative simple
roots `\alpha_i` for `i` in `R`, and acts otherwise by:
.. MATH::
\tau_+( \beta ) = (\prod_{i \in L} s_i) (\beta)
`\tau_-` acts analogously, with `L` and `R` interchanged.
Those operators are used to construct the associahedron, a
polytopal realization of the cluster complex (see
:class:`Associahedron`).
.. SEEALSO:: :meth:`tau_epsilon_operator_on_almost_positive_roots`
EXAMPLES:
We explore the example of [CFZ2002]_ Eq.(1.3)::
sage: S = RootSystem(['A',2]).root_lattice()
sage: taup, taum = S.tau_plus_minus()
sage: for beta in S.almost_positive_roots(): print("{} , {} , {}".format(beta, taup(beta), taum(beta)))
-alpha[1] , alpha[1] , -alpha[1]
alpha[1] , -alpha[1] , alpha[1] + alpha[2]
alpha[1] + alpha[2] , alpha[2] , alpha[1]
-alpha[2] , -alpha[2] , alpha[2]
alpha[2] , alpha[1] + alpha[2] , -alpha[2]
"""
ct = self.cartan_type()
L,R = ct.index_set_bipartition()
return self.tau_epsilon_operator_on_almost_positive_roots(L), self.tau_epsilon_operator_on_almost_positive_roots(R)
def almost_positive_roots_decomposition(self):
r"""
Returns the decomposition of the almost positive roots of ``self``
This is the list of the orbits of the almost positive roots
under the action of the dihedral group generated by the
operators `\tau_+` and `\tau_-`.
.. SEEALSO::
- :meth:`almost_positive_roots`
- :meth:`tau_plus_minus`
EXAMPLES::
sage: RootSystem(['A',2]).root_lattice().almost_positive_roots_decomposition()
[[-alpha[1], alpha[1], alpha[1] + alpha[2], alpha[2], -alpha[2]]]
sage: RootSystem(['B',2]).root_lattice().almost_positive_roots_decomposition()
[[-alpha[1], alpha[1], alpha[1] + 2*alpha[2]], [-alpha[2], alpha[2], alpha[1] + alpha[2]]]
sage: RootSystem(['D',4]).root_lattice().almost_positive_roots_decomposition()
[[-alpha[1], alpha[1], alpha[1] + alpha[2], alpha[2] + alpha[3] + alpha[4]],
[-alpha[2], alpha[2], alpha[1] + alpha[2] + alpha[3] + alpha[4], alpha[1] + 2*alpha[2] + alpha[3] + alpha[4]],
[-alpha[3], alpha[3], alpha[2] + alpha[3], alpha[1] + alpha[2] + alpha[4]],
[-alpha[4], alpha[4], alpha[2] + alpha[4], alpha[1] + alpha[2] + alpha[3]]]
"""
# TODO: this should use a generic function for computing
# orbits under the action of a group:
# def orbits(seeds, operators)
# INPUT:
# - seeds: a list of elements
# - operators: a list of functions
#
# Returns the orbits generated by seeds under the action of the operators
tau_plus, tau_minus = self.tau_plus_minus()
I = set(self.index_set())
Delta = self.simple_roots()
L, R = self.cartan_type().index_set_bipartition()
orbits = []
while I:
i = I.pop()
alpha = -self.simple_root(i)
orbit = [alpha]
if i in L:
plus = False
beta = tau_plus(alpha)
else:
plus = True
beta = tau_minus(alpha)
while -beta not in Delta and beta not in orbit:
orbit.append(beta)
if beta in Delta:
j = beta.leading_support()
I.discard(j)
if plus:
beta = tau_plus(beta)
else:
beta = tau_minus(beta)
plus = not plus
if -beta in Delta:
orbit.append(beta)
orbits.append(orbit)
return orbits
##########################################################################
# Methods for affine root lattice realizations
# Should eventually go in an Affine nested class
##########################################################################
@cached_method
def classical(self):
"""
Return the corresponding root/weight/ambient lattice/space.
EXAMPLES::
sage: RootSystem(["A",4,1]).root_lattice().classical()
Root lattice of the Root system of type ['A', 4]
sage: RootSystem(["A",4,1]).weight_lattice().classical()
Weight lattice of the Root system of type ['A', 4]
sage: RootSystem(["A",4,1]).ambient_space().classical()
Ambient space of the Root system of type ['A', 4]
"""
from .root_space import RootSpace
from .weight_space import WeightSpace
R = self.cartan_type().classical().root_system()
if isinstance(self, RootSpace):
return R.root_space(self.base_ring())
elif isinstance(self, WeightSpace):
return R.weight_space(self.base_ring())
else:
return R.ambient_space(self.base_ring())
@lazy_attribute
def _to_classical(self):
r"""
The projection onto the classical ambient space.
EXAMPLES::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: e = L.basis()
sage: L._to_classical(e["delta"])
(0, 0, 0)
sage: L._to_classical(e["deltacheck"])
(0, 0, 0)
sage: L._to_classical(e[0])
(1, 0, 0)
sage: L._to_classical(e[1])
(0, 1, 0)
sage: L._to_classical(e[2])
(0, 0, 1)
"""
return self.module_morphism(self._to_classical_on_basis, codomain = self.classical())
def _classical_alpha_0(self):
"""
Return the projection of `\alpha_0` in the classical space.
This is used e.g. to construct the projections onto the
classical space.
EXAMPLES:
This is the opposite of the highest root in the untwisted case::
sage: L = RootSystem(["B",3,1]).root_space()
sage: L._classical_alpha_0()
-alpha[1] - 2*alpha[2] - 2*alpha[3]
sage: L._to_classical_on_basis(0)
-alpha[1] - 2*alpha[2] - 2*alpha[3]
sage: L.classical().highest_root()
alpha[1] + 2*alpha[2] + 2*alpha[3]
But not in the other cases::
sage: L = RootSystem(CartanType(["B",3,1]).dual()).root_space()
sage: L._to_classical_on_basis(0)
-alpha[1] - 2*alpha[2] - alpha[3]
sage: L.classical().highest_root()
2*alpha[1] + 2*alpha[2] + alpha[3]
"""
cartan_type = self.cartan_type()
special_node = cartan_type.special_node()
a = self.cartan_type().col_annihilator()
classical = self.classical()
return -classical.sum(a[i] * self.simple_root(i)
for i in self.index_set() if i != special_node) \
/ a[special_node]
######################################################################
# Root system plots
def plot(self,
roots="simple",
coroots=False,
reflection_hyperplanes="simple",
fundamental_weights=None,
fundamental_chamber=None,
alcoves=None,
alcove_labels=False,
alcove_walk=None,
**options):
r"""
Return a picture of this root lattice realization.
INPUT:
- ``roots`` -- which roots to display, if any.
Can be one of the following:
* ``"simple"`` -- The simple roots (the default)
* ``"classical"`` -- Not yet implemented
* ``"all"`` -- Only works in the finite case
* A list or tuple of roots
* ``False``
- ``coroots`` -- which coroots to display, if any.
Can be one of the following:
* ``"simple"`` -- The simple coroots (the default)
* ``"classical"`` -- Not yet implemented
* ``"all"`` -- Only works in the finite case
* A list or tuple of coroots
* ``False``
- ``fundamental_weights`` -- a boolean or ``None`` (default: ``None``)
whether to display the fundamental weights.
If ``None``, the fundamental weights are drawn if available.
- ``reflection_hyperplanes`` -- which reflection
hyperplanes to display, if any. Can be one of the
following:
* ``"simple"`` -- The simple roots
* ``"classical"`` -- Not yet implemented
* ``"all"`` -- Only works in the finite case
* A list or tuple of roots
* ``False`` (the default)
- ``fundamental_chamber`` -- whether and how to draw the
fundamental chamber. Can be one of the following:
* A boolean -- Set to ``True`` to draw the fundamental
chamber
* ``"classical"`` -- Draw the classical fundamental chamber
* ``None`` -- (the default) The fundamental chamber is
drawn except in the root lattice where this is not yet
implemented. For affine types the classical
fundamental chamber is drawn instead.
- ``alcoves`` -- one of the following (default: ``True``):
* A boolean -- Whether to display the alcoves
* A list of alcoves -- The alcoves to be drawn. Each alcove is
specified by the coordinates of its center in the root lattice
(affine type only). Otherwise the alcoves that intersect the
bounding box are drawn.
- ``alcove_labels`` -- one of the following (default: ``False``):
* A boolean -- Whether to display the elements of the Weyl group
indexing the alcoves. This currently requires to also
set the ``alcoves`` option.
* A number `l` -- The label is drawn at level `l` (affine type
only), which only makes sense if ``affine`` is ``False``.
- ``bounding_box`` -- a rational number or a list of pairs
thereof (default: 3)
Specifies a bounding box, in the coordinate system for
this plot, in which to plot alcoves and other infinite
objects. If the bounding box is a number `a`, then the
bounding box is of the form `[-a,a]` in all directions.
Beware that there can be some border effects and the
returned graphic is not necessarily strictly contained
in the bounding box.
- ``alcove_walk`` -- an alcove walk or ``None`` (default: ``None``)
The alcove walk is described by a list (or iterable) of
vertices of the Dynkin diagram which specifies which
wall is crossed at each step, starting from the
fundamental alcove.
- ``projection`` -- one of the following (default: ``True``):
* ``True`` -- The default projection for the root
lattice realization is used.
* ``False`` -- No projection is used.
* ``barycentric`` -- A barycentric projection is used.
* A function -- If a function is specified, it should implement a
linear (or affine) map taking as input an element of
this root lattice realization and returning its
desired coordinates in the plot, as a vector with
rational coordinates.
- ``color`` -- a function mapping vertices of the Dynkin
diagram to colors (default: ``"black"`` for 0,
``"blue"`` for 1, ``"red"`` for 2, ``"green"`` for 3)
This is used to set the color for the simple roots,
fundamental weights, reflection hyperplanes, alcove
facets, etc. If the color is ``None``, the object is not
drawn.
- ``labels`` -- a boolean (default: ``True``)
whether to display labels on the simple roots,
fundamental weights, etc.
EXAMPLES::
sage: L = RootSystem(["A",2,1]).ambient_space().plot() # long time
.. SEEALSO::
- :meth:`plot_parse_options`
- :meth:`plot_roots`, :meth:`plot_coroots`
- :meth:`plot_fundamental_weights`
- :meth:`plot_fundamental_chamber`
- :meth:`plot_reflection_hyperplanes`
- :meth:`plot_alcoves`
- :meth:`plot_alcove_walk`
- :meth:`plot_ls_paths`
- :meth:`plot_mv_polytope`
- :meth:`plot_crystal`
"""
plot_options = self.plot_parse_options(**options)
G = plot_options.empty()
if roots:
G += self.plot_roots(roots, plot_options=plot_options)
# if coroots is None:
# coroot_lattice = self.root_system.coroot_lattice()
# if self.has_coerce_map_from(coroot_lattice):
# coroots="simple"
# else:
# coroots=False
if coroots:
G += self.plot_coroots(coroots, plot_options=plot_options)
if fundamental_weights is None:
fundamental_weights = hasattr(self, "fundamental_weights")
if fundamental_weights:
G += self.plot_fundamental_weights(plot_options=plot_options)
if reflection_hyperplanes:
G += self.plot_reflection_hyperplanes(reflection_hyperplanes, plot_options=plot_options)
if alcoves is None:
alcoves = self.cartan_type().is_affine() and hasattr(self, "fundamental_weights")
if alcoves:
G += self.plot_alcoves(alcoves, alcove_labels=alcove_labels, plot_options=plot_options)
if fundamental_chamber is None:
if not hasattr(self, "fundamental_weights"):
fundamental_chamber = False
elif self.cartan_type().is_affine():
fundamental_chamber = "classical"
else:
fundamental_chamber = True
if fundamental_chamber:
G += self.plot_fundamental_chamber(fundamental_chamber, plot_options=plot_options)
if alcove_walk is not None:
G += self.plot_alcove_walk(alcove_walk, plot_options=plot_options)
return plot_options.finalize(G)
def plot_parse_options(self, **args):
r"""
Return an option object to be used for root system plotting.
EXAMPLES::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: options = L.plot_parse_options()
sage: options
<sage.combinat.root_system.plot.PlotOptions object at ...>
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
"""
if len(args) == 1 and "plot_options" in args:
return args["plot_options"]
else:
return PlotOptions(self, **args)
def _plot_projection(self, x):
r"""
Implement the default projection to be used for plots.
EXAMPLES:
By default, this is just the identity::
sage: L = RootSystem(["B",3]).root_lattice()
sage: l = L.an_element(); l
2*alpha[1] + 2*alpha[2] + 3*alpha[3]
sage: L._plot_projection(l)
2*alpha[1] + 2*alpha[2] + 3*alpha[3]
In the ambient space of type `A_2`, this is the
barycentric projection. In the ambient space of affine
type this goes through the classical ambient space.
.. SEEALSO::
- :meth:`sage.combinat.root_system.type_A.AmbientSpace._plot_projection`
- :meth:`sage.combinat.root_system.type_affine.AmbientSpace._plot_projection`
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
"""
return x
@cached_method
def _plot_projection_barycentric_matrix(self):
"""
A rational approximation of the matrix for the barycentric
projection.
OUTPUT:
a matrix with rational coefficients whose column sum is zero
.. SEEALSO::
- :func:`sage.combinat.root_system.plot.barycentric_projection_matrix`
- :meth:`_plot_projection_barycentric`
EXAMPLES::
sage: RootSystem(["A",0]).ambient_space()._plot_projection_barycentric_matrix()
[]
sage: m = RootSystem(["A",1]).ambient_space()._plot_projection_barycentric_matrix(); m
[ 1 -1]
sage: sum(m.columns())
(0)
sage: m = RootSystem(["A",2]).ambient_space()._plot_projection_barycentric_matrix(); m
[ 1/2 -1 1/2]
[ 989/1142 0 -989/1142]
sage: sum(m.columns())
(0, 0)
sage: m = RootSystem(["A",3]).ambient_space()._plot_projection_barycentric_matrix(); m
[ 1277/1564 -1277/1564 0 0]
[1009460/2141389 849/1801 -1121/1189 0]
[ 1/3 1/3 1/3 -1]
sage: sum(m.columns())
(0, 0, 0)
"""
from sage.symbolic.constants import pi
m = matrix(QQ, barycentric_projection_matrix(self.dimension()-1, angle=2*pi/3).n(20))
# We want to guarantee that the sum of the columns of the
# result is zero. This is close to be the case for the
# original matrix and for the current rational
# approximation. We tidy up the work by replacing the
# first colum by the opposite of the sum of the others.
if self.dimension()>1: # not needed in the trivial cases
m.set_column(0, -sum(m[:,1:].columns()))
m.set_immutable()
return m
def _plot_projection_barycentric(self, x):
r"""
Implement the barycentric projection to be used for plots.
It is in fact a rational approximation thereof, but the
sum of the basis vectors is guaranteed to be mapped to
zero.
EXAMPLES::
sage: L = RootSystem(["A",2]).ambient_space()
sage: e = L.basis()
sage: L._plot_projection_barycentric(e[0])
(1/2, 989/1142)
sage: L._plot_projection_barycentric(e[1])
(-1, 0)
sage: L._plot_projection_barycentric(e[2])
(1/2, -989/1142)
.. SEEALSO::
- :meth:`_plot_projection`, :meth:`plot`
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
"""
return self._plot_projection_barycentric_matrix()*vector(x)
def plot_roots(self, collection="simple", **options):
r"""
Plot the (simple/classical) roots of this root lattice.
INPUT:
- ``collection`` -- which roots to display
can be one of the following:
* ``"simple"`` (the default)
* ``"classical"``
* ``"all"``
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: RootSystem(["B",3]).ambient_space().plot_roots()
Graphics3d Object
sage: RootSystem(["B",3]).ambient_space().plot_roots("all")
Graphics3d Object
TESTS::
sage: list(RootSystem(["A",2]).root_lattice().plot_roots())
[Arrow from (0.0,0.0) to (1.0,0.0),
Text '$\alpha_{1}$' at the point (1.05,0.0),
Arrow from (0.0,0.0) to (0.0,1.0),
Text '$\alpha_{2}$' at the point (0.0,1.05)]
sage: list(RootSystem(["A",2]).weight_lattice().plot_roots(labels=False))
[Arrow from (0.0,0.0) to (2.0,-1.0),
Arrow from (0.0,0.0) to (-1.0,2.0)]
sage: list(RootSystem(["A",2]).ambient_lattice().plot_roots())
[Arrow from (0.0,0.0) to (1.5,0.86...),
Text '$\alpha_{1}$' at the point (1.575...,0.90...),
Arrow from (0.0,0.0) to (-1.5,0.86...),
Text '$\alpha_{2}$' at the point (-1.575...,0.90...)]
sage: list(RootSystem(["B",2]).ambient_space().plot_roots())
[Arrow from (0.0,0.0) to (1.0,-1.0),
Text '$\alpha_{1}$' at the point (1.05,-1.05),
Arrow from (0.0,0.0) to (0.0,1.0),
Text '$\alpha_{2}$' at the point (0.0,1.05)]
sage: list(RootSystem(["A",2]).root_lattice().plot_roots("all"))
[Arrow from (0.0,0.0) to (1.0,0.0),
Text '$\alpha_{1}$' at the point (1.05,0.0),
Arrow from (0.0,0.0) to (0.0,1.0),
Text '$\alpha_{2}$' at the point (0.0,1.05),
Arrow from (0.0,0.0) to (1.0,1.0),
Text '$\alpha_{1} + \alpha_{2}$' at the point (1.05,1.05),
Arrow from (0.0,0.0) to (-1.0,0.0),
Text '$-\alpha_{1}$' at the point (-1.05,0.0),
Arrow from (0.0,0.0) to (0.0,-1.0),
Text '$-\alpha_{2}$' at the point (0.0,-1.05),
Arrow from (0.0,0.0) to (-1.0,-1.0),
Text '$-\alpha_{1} - \alpha_{2}$' at the point (-1.05,-1.05)]
"""
plot_options = self.plot_parse_options(**options)
root_lattice = self.root_system.root_lattice()
if collection == "simple":
roots = root_lattice.simple_roots()
elif collection == "classical":
if not self.cartan_type().is_affine():
raise ValueError("plotting classical roots only available in affine type")
raise NotImplementedError("classical roots")
elif collection == "all":
if not self.cartan_type().is_finite():
raise ValueError("plotting all roots only available in finite type")
roots = root_lattice.roots()
elif isinstance(collection, (list, tuple)):
roots = collection
else:
raise ValueError("Unknown value: %s"%collection)
roots = Family(roots, self)
return plot_options.family_of_vectors(roots)
def plot_coroots(self, collection="simple", **options):
r"""
Plot the (simple/classical) coroots of this root lattice.
INPUT:
- ``collection`` -- which coroots to display.
Can be one of the following:
* ``"simple"`` (the default)
* ``"classical"``
* ``"all"``
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: RootSystem(["B",3]).ambient_space().plot_coroots()
Graphics3d Object
TESTS::
sage: list(RootSystem(["B",2]).ambient_space().plot_coroots())
[Arrow from (0.0,0.0) to (1.0,-1.0),
Text '$\alpha^\vee_{1}$' at the point (1.05,-1.05),
Arrow from (0.0,0.0) to (0.0,2.0),
Text '$\alpha^\vee_{2}$' at the point (0.0,2.1)]
"""
# Functionally speaking, this is duplicated from plot_roots ...
# Can we avoid that, say by going to the dual space?
plot_options = self.plot_parse_options(**options)
coroot_lattice = self.root_system.coroot_lattice()
if not self.has_coerce_map_from(coroot_lattice):
raise ValueError("Can't plot the coroots: there is no embedding of the coroot lattice to this space")
if collection == "simple":
coroots = coroot_lattice.simple_roots()
elif collection == "classical":
if not self.cartan_type().is_affine():
raise ValueError("plotting classical coroots only available in affine type")
raise NotImplementedError("classical coroots")
elif collection == "all":
if not self.cartan_type().is_finite():
raise ValueError("plotting all coroots only available in finite type")
coroots = coroot_lattice.roots()
elif isinstance(collection, (list, tuple)):
coroots = collection
else:
raise ValueError("Unknown value: %s"%collection)
coroots = Family(coroots, self)
return plot_options.family_of_vectors(coroots)
def plot_fundamental_weights(self, **options):
r"""
Plot the fundamental weights of this root lattice.
INPUT:
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: RootSystem(["B",3]).ambient_space().plot_fundamental_weights()
Graphics3d Object
TESTS::
sage: sorted(RootSystem(["A",2]).weight_lattice().plot_fundamental_weights(), key=str)
[Arrow from (0.0,0.0) to (0.0,1.0),
Arrow from (0.0,0.0) to (1.0,0.0),
Text '$\Lambda_{1}$' at the point (1.05,0.0),
Text '$\Lambda_{2}$' at the point (0.0,1.05)]
sage: sorted(RootSystem(["A",2]).ambient_lattice().plot_fundamental_weights(), key=str)
[Arrow from (0.0,0.0) to (-0.5,0.86602451838...),
Arrow from (0.0,0.0) to (0.5,0.86602451838...),
Text '$\Lambda_{1}$' at the point (0.525,0.909325744308...),
Text '$\Lambda_{2}$' at the point (-0.525,0.909325744308...)]
"""
plot_options = self.plot_parse_options(**options)
# We build the family of fundamental weights in this space,
# indexed by the fundamental weights in the weight lattice.
#
# To this end, we don't use the embedding of the weight
# lattice into self as for the roots or coroots because
# the ambient space can define the fundamental weights
# slightly differently (the usual GL_n vs SL_n catch).
weight_lattice = self.root_system.weight_lattice()
fundamental_weights = Family(dict(zip(weight_lattice.fundamental_weights(),
self.fundamental_weights())))
return plot_options.family_of_vectors(fundamental_weights)
def plot_reflection_hyperplanes(self, collection="simple", **options):
r"""
Plot the simple reflection hyperplanes.
INPUT:
- ``collection`` -- which reflection hyperplanes to display.
Can be one of the following:
* ``"simple"`` (the default)
* ``"classical"``
* ``"all"``
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: RootSystem(["A",2,1]).ambient_space().plot_reflection_hyperplanes()
Graphics object consisting of 6 graphics primitives
sage: RootSystem(["G",2,1]).ambient_space().plot_reflection_hyperplanes()
Graphics object consisting of 6 graphics primitives
sage: RootSystem(["A",3]).weight_space().plot_reflection_hyperplanes()
Graphics3d Object
sage: RootSystem(["B",3]).ambient_space().plot_reflection_hyperplanes()
Graphics3d Object
sage: RootSystem(["A",3,1]).weight_space().plot_reflection_hyperplanes()
Graphics3d Object
sage: RootSystem(["B",3,1]).ambient_space().plot_reflection_hyperplanes()
Graphics3d Object
sage: RootSystem(["A",2,1]).weight_space().plot_reflection_hyperplanes(affine=False, level=1)
Graphics3d Object
sage: RootSystem(["A",2]).root_lattice().plot_reflection_hyperplanes()
Graphics object consisting of 4 graphics primitives
TESTS::
sage: L = RootSystem(["A",2]).ambient_space()
sage: print(L.plot_reflection_hyperplanes().description())
Text '$H_{\alpha^\vee_{1}}$' at the point (-1.81...,3.15...)
Text '$H_{\alpha^\vee_{2}}$' at the point (1.81...,3.15...)
Line defined by 2 points: [(-1.73..., 3.0), (1.73..., -3.0)]
Line defined by 2 points: [(1.73..., 3.0), (-1.73..., -3.0)]
sage: print(L.plot_reflection_hyperplanes("all").description())
Text '$H_{\alpha^\vee_{1} + \alpha^\vee_{2}}$' at the point (3.15...,0.0)
Text '$H_{\alpha^\vee_{1}}$' at the point (-1.81...,3.15...)
Text '$H_{\alpha^\vee_{2}}$' at the point (1.81...,3.15...)
Line defined by 2 points: [(-1.73..., 3.0), (1.73..., -3.0)]
Line defined by 2 points: [(1.73..., 3.0), (-1.73..., -3.0)]
Line defined by 2 points: [(3.0, 0.0), (-3.0, 0.0)]
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: print(L.plot_reflection_hyperplanes().description())
Text '$H_{\alpha^\vee_{0}}$' at the point (3.15...,0.90...)
Text '$H_{\alpha^\vee_{1}}$' at the point (-1.81...,3.15...)
Text '$H_{\alpha^\vee_{2}}$' at the point (1.81...,3.15...)
Line defined by 2 points: [(-1.73..., 3.0), (1.73..., -3.0)]
Line defined by 2 points: [(1.73..., 3.0), (-1.73..., -3.0)]
Line defined by 2 points: [(3.0, 0.86...), (-3.0, 0.86...)]
.. TODO:: Provide an option for transparency?
"""
plot_options = self.plot_parse_options(**options)
coroot_lattice = self.root_system.coroot_lattice()
# Recall that the coroots are given by the roots of the coroot lattice
if collection == "simple":
coroots = coroot_lattice.simple_roots()
elif collection == "classical":
if not self.cartan_type().is_affine():
raise ValueError("plotting classical reflection hyperplanes only available in affine type")
raise NotImplementedError("classical roots")
elif collection == "all":
if not self.cartan_type().is_finite():
raise ValueError("plotting all reflection hyperplanes only available in finite type")
coroots = coroot_lattice.positive_roots()
elif isinstance(collection, (list, tuple)):
coroots = collection
else:
raise ValueError("Unknown value: %s"%collection)
G = plot_options.empty()
for coroot in coroots:
G += plot_options.reflection_hyperplane(coroot)
return plot_options.finalize(G)
def plot_hedron(self, **options):
r"""
Plot the polyhedron whose vertices are given by the orbit
of `\rho`.
In type `A`, this is the usual permutohedron.
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: RootSystem(["A",2]).ambient_space().plot_hedron()
Graphics object consisting of 8 graphics primitives
sage: RootSystem(["A",3]).ambient_space().plot_hedron()
Graphics3d Object
sage: RootSystem(["B",3]).ambient_space().plot_hedron()
Graphics3d Object
sage: RootSystem(["C",3]).ambient_space().plot_hedron()
Graphics3d Object
sage: RootSystem(["D",3]).ambient_space().plot_hedron()
Graphics3d Object
Surprise: polyhedra of large dimension know how to
project themselves nicely::
sage: RootSystem(["F",4]).ambient_space().plot_hedron() # long time
Graphics3d Object
TESTS::
sage: L = RootSystem(["B",2]).ambient_space()
sage: print(L.plot_hedron().description())
Polygon defined by 8 points: [(1.5, 0.5), (0.5, 1.5), (-0.5, 1.5), (-1.5, 0.5), (-1.5, -0.5), (-0.5, -1.5), (0.5, -1.5), (1.5, -0.5)]
Line defined by 2 points: [(-0.5, -1.5), (0.5, -1.5)]
Line defined by 2 points: [(-0.5, 1.5), (0.5, 1.5)]
Line defined by 2 points: [(-1.5, -0.5), (-0.5, -1.5)]
Line defined by 2 points: [(-1.5, -0.5), (-1.5, 0.5)]
Line defined by 2 points: [(-1.5, 0.5), (-0.5, 1.5)]
Line defined by 2 points: [(0.5, -1.5), (1.5, -0.5)]
Line defined by 2 points: [(0.5, 1.5), (1.5, 0.5)]
Line defined by 2 points: [(1.5, -0.5), (1.5, 0.5)]
Point set defined by 8 point(s): [(-1.5, -0.5), (-1.5, 0.5), (-0.5, -1.5), (-0.5, 1.5), (0.5, -1.5), (0.5, 1.5), (1.5, -0.5), (1.5, 0.5)]
"""
from sage.geometry.polyhedron.all import Polyhedron
plot_options = self.plot_parse_options(**options)
if not self.cartan_type().is_finite():
raise ValueError("the Cartan type must be finite")
vertices = [plot_options.projection(vertex)
for vertex in self.rho().orbit()]
return Polyhedron(vertices=vertices).plot()
def plot_fundamental_chamber(self, style="normal", **options):
r"""
Plot the (classical) fundamental chamber.
INPUT:
- ``style`` -- ``"normal"`` or ``"classical"`` (default: ``"normal"``)
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES:
2D plots::
sage: RootSystem(["B",2]).ambient_space().plot_fundamental_chamber()
Graphics object consisting of 1 graphics primitive
sage: RootSystem(["B",2,1]).ambient_space().plot_fundamental_chamber()
Graphics object consisting of 1 graphics primitive
sage: RootSystem(["B",2,1]).ambient_space().plot_fundamental_chamber("classical")
Graphics object consisting of 1 graphics primitive
3D plots::
sage: RootSystem(["A",3,1]).weight_space() .plot_fundamental_chamber()
Graphics3d Object
sage: RootSystem(["B",3,1]).ambient_space().plot_fundamental_chamber()
Graphics3d Object
This feature is currently not available in the root lattice/space::
sage: list(RootSystem(["A",2]).root_lattice().plot_fundamental_chamber())
Traceback (most recent call last):
...
TypeError: classical fundamental chamber not yet available in the root lattice
TESTS::
sage: L = RootSystem(["B",2,1]).ambient_space()
sage: print(L.plot_fundamental_chamber().description())
Polygon defined by 3 points: [(0.5, 0.5), (1.0, 0.0), (0.0, 0.0)]
sage: print(L.plot_fundamental_chamber(style="classical").description())
Polygon defined by 3 points: [(0.0, 0.0), (3.0, 3.0), (3.0, 0.0)]
"""
plot_options = self.plot_parse_options(**options)
if not hasattr(self, "fundamental_weights"):
raise TypeError("classical fundamental chamber not yet available in the root lattice")
Lambda = self.fundamental_weights()
cartan_type = self.cartan_type()
if style=="classical":
if not cartan_type.is_affine():
raise TypeError("classical fundamental chamber only available in affine type")
I = cartan_type.classical().index_set()
lines = [Lambda[cartan_type.special_node()]]
else:
I = cartan_type.index_set()
lines = []
return plot_options.cone(rays = [Lambda[i] for i in I],
lines=lines,
color="lightgrey",
alpha=.3)
def plot_alcoves(self, alcoves=True, alcove_labels=False, wireframe=False, **options):
r"""
Plot the alcoves and optionaly their labels.
INPUT:
- ``alcoves`` -- a list of alcoves or ``True`` (default: ``True``)
- ``alcove_labels`` -- a boolean or a number specifying at
which level to put the label (default: ``False``)
- ``**options`` -- Plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a
tutorial on root system plotting, and in particular
how the alcoves can be specified.
EXAMPLES:
2D plots::
sage: RootSystem(["B",2,1]).ambient_space().plot_alcoves() # long time (3s)
Graphics object consisting of 228 graphics primitives
3D plots::
sage: RootSystem(["A",2,1]).weight_space() .plot_alcoves(affine=False) # long time (3s)
Graphics3d Object
sage: RootSystem(["G",2,1]).ambient_space().plot_alcoves(affine=False, level=1) # long time (3s)
Graphics3d Object
Here we plot a single alcove::
sage: L = RootSystem(["A",3,1]).ambient_space()
sage: W = L.weyl_group()
sage: L.plot(alcoves=[W.one()], reflection_hyperplanes=False, bounding_box=2)
Graphics3d Object
TESTS::
sage: L = RootSystem(["A",2,1]).weight_space()
sage: p = L.plot_alcoves(alcoves=[[0,0]])
sage: print(p.description())
Line defined by 2 points: [(-1.0, 0.0), (0.0, -1.0)]
Line defined by 2 points: [(-1.0, 1.0), (-1.0, 0.0)]
Line defined by 2 points: [(-1.0, 1.0), (0.0, 0.0)]
Line defined by 2 points: [(0.0, 0.0), (-1.0, 0.0)]
Line defined by 2 points: [(0.0, 0.0), (0.0, -1.0)]
Line defined by 2 points: [(0.0, 0.0), (1.0, -1.0)]
Line defined by 2 points: [(0.0, 1.0), (-1.0, 1.0)]
Line defined by 2 points: [(0.0, 1.0), (0.0, 0.0)]
Line defined by 2 points: [(0.0, 1.0), (1.0, 0.0)]
Line defined by 2 points: [(1.0, -1.0), (0.0, -1.0)]
Line defined by 2 points: [(1.0, 0.0), (0.0, 0.0)]
Line defined by 2 points: [(1.0, 0.0), (1.0, -1.0)]
sage: sorted((line.options()['rgbcolor'], line.options()['thickness']) for line in p)
[('black', 2), ('black', 2), ('black', 2),
('black', 2), ('black', 2), ('black', 2),
('blue', 1), ('blue', 1), ('blue', 1),
('red', 1), ('red', 1), ('red', 1)]
"""
plot_options = self.plot_parse_options(**options)
if not hasattr(self, "fundamental_weights"):
raise TypeError("alcoves not yet available in the root lattice")
Lambda = self.fundamental_weights()
cartan_type = self.cartan_type()
I = cartan_type.index_set()
W = self.weyl_group()
if alcove_labels is not False:
rho = self.rho()
if alcove_labels is not True:
# The input is the desired level
rho = rho * alcove_labels / rho.level()
else:
rho = plot_options.intersection_at_level_1(rho)
# The rays of the fundamental alcove
fundamental_alcove_rays = Lambda.map(plot_options.intersection_at_level_1)
def alcove_in_bounding_box(w):
return any(plot_options.in_bounding_box(w.action(fundamental_alcove_rays[i]))
for i in I)
def alcove_facet(w, i):
# Alcove facets with degenerate intersection with the
# bounding box bring no information; we might as well
# not draw them. Besides this avoids ugly fat points
# in dimension 2.
return plot_options.cone(rays=[w.action(fundamental_alcove_rays[j]) for j in I if j != i],
color=plot_options.color(i),
thickness=plot_options.thickness(i),
wireframe=wireframe,
draw_degenerate=False)
def alcove_label(w):
label = "$1$" if w.is_one() else "$s_{"+"".join(str(j) for j in w.reduced_word())+"}$"
position = plot_options.projection(w.action(rho))
if position in plot_options.bounding_box:
return plot_options.text(label, position)
else:
return plot_options.empty()
G = plot_options.empty()
if alcoves is not True:
alcoves = list(alcoves)
if alcoves is True or (alcoves and W.is_parent_of(alcoves[0])):
if alcoves is True:
alcoves = W.weak_order_ideal(alcove_in_bounding_box, side="right")
# We assume that the fundamental alcove lies within
# the bounding box, and explore the alcoves
# intersecting the bounding box by going up right
# order (i.e. going away from the fundamental alcove)
for w in alcoves:
for i in w.descents(side="right", positive=True):
G += alcove_facet(w, i)
if alcove_labels is not False:
G += alcove_label(w)
else:
if not cartan_type.is_affine():
raise TypeError("alcoves=list only available in affine type")
translation_factors = cartan_type.translation_factors()
simple_roots = self.simple_roots()
translation_vectors = Family({i: translation_factors[i]*simple_roots[i]
for i in cartan_type.classical().index_set()})
# The elements of the classical Weyl group, as elements of W
W0 = [W.from_reduced_word(w.reduced_word()) for w in self.weyl_group().classical()]
for alcove in alcoves:
# The translation mapping the center of the
# fundamental polygon to polygon indexed by alcove
shift = sum(x*v for x,v in zip(alcove, translation_vectors))
shift = W.from_morphism(shift.translation)
for w in W0:
for i in w.descents(side="right", positive=True):
G += alcove_facet(shift * w, i)
if alcove_labels:
G += alcove_label(w)
return plot_options.finalize(G)
# In this alternative commented-out implementation, the
# alcove picture is constructed directly in the
# projection. It only works for rank 2+1 with, but it is
# faster; we keep for reference for now. With #12553
# (Cythoned PPL polytopes), the difference is likely to
# disappear. If this is confirmed, the code below should be discarded.
#
# from sage.plot.line import line
# translation_vectors = Family({i: translation_factors[i]*plot_options.projection(simple_roots[i])
# for i in cartan_type.classical().index_set()})
#
# # For each polygon P to be drawn, alcoves_shift contains the translation
# # from fundamental polygon to P in the plot coordinate system
# def immutable_vector(x):
# # Takes care of possible numerical instabilities
# x = x.numerical_approx(8)
# x.set_immutable()
# return x
#
# # Construct the fundamental polygon
# # The classical group acting on ``self``
# W0 = self.weyl_group().classical().list()
# # The coordinates of the vertices of the fundamental alcove
# fundamental_alcove_rays = Lambda.map(plot_options.intersection_at_level_1)
# # The coordinates of the vertices of the fundamental polygon
# fundamental_polygon_rays = {
# (i, w): plot_options.projection(w.action(fundamental_alcove_rays[i]))
# for w in W0
# for i in I
# }
#
# # Get the center of the polygons
# if alcoves is True:
# def neighbors(x):
# return filter(lambda y: plot_options.bounding_box.contains(plot_options.origin_projected+y),
# [immutable_vector(x+epsilon*t) for t in translation_vectors for epsilon in [-1,1]])
# alcoves_shift = list(RecursivelyEnumeratedSet([immutable_vector(plot_options.origin_projected)], neighbors))
# else:
# alcoves_shift = [sum(x*v for x,v in zip(alcove, translation_vectors))
# for alcove in alcoves]
#
# G = plot_options.empty()
# for shift in alcoves_shift:
# # for each center of polygon and each element of classical
# # parabolic subgroup, we have to draw an alcove.
# polygon_center = plot_options.origin_projected + shift
#
# for w in W0:
# for i in I:
# facet_indices = [j for j in I if j != i]
# assert len(facet_indices) == 2
# facet = [fundamental_polygon_rays[j, w] + shift for j in facet_indices]
# # This takes a bit of time; do we really want that feature?
# #if not all(bounding_box_as_polytope.contains(v) for v in facet):
# # continue
# G += line(facet,
# rgbcolor = plot_options.color(i),
# thickness = 2 if i == special_node else 1)
def plot_bounding_box(self, **options):
r"""
Plot the bounding box.
INPUT:
- ``**options`` -- Plotting options
This is mostly for testing purposes.
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: L.plot_bounding_box()
Graphics object consisting of 1 graphics primitive
TESTS::
sage: list(L.plot_bounding_box())
[Polygon defined by 4 points]
"""
plot_options = self.plot_parse_options(**options)
return plot_options.bounding_box.plot(color="gray", alpha=0.5, wireframe=False)
def plot_alcove_walk(self, word, start=None, foldings=None, color="orange", **options):
r"""
Plot an alcove walk.
INPUT:
- ``word`` -- a list of elements of the index set
- ``foldings`` -- a list of booleans or ``None`` (default: ``None``)
- ``start`` -- an element of this space (default: ``None`` for `\rho`)
- ``**options`` -- plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES:
An alcove walk of type `A_2^{(1)}`::
sage: L = RootSystem(["A",2,1]).ambient_space()
sage: w1 = [0,2,1,2,0,2,1,0,2,1,2,1,2,0,2,0,1,2,0]
sage: p = L.plot_alcoves(bounding_box=5) # long time (5s)
sage: p += L.plot_alcove_walk(w1) # long time
sage: p # long time
Graphics object consisting of 375 graphics primitives
The same plot with another alcove walk::
sage: w2 = [2,1,2,0,2,0,2,1,2,0,1,2,1,2,1,0,1,2,0,2,0,1,2,0,2]
sage: p += L.plot_alcove_walk(w2, color="orange") # long time
And another with some foldings::
sage: pic = L.plot_alcoves(bounding_box=3) # long time
sage: pic += L.plot_alcove_walk([0,1,2,0,2,0,1,2,0,1], # long time (3s)
....: foldings = [False, False, True, False, False, False, True, False, True, False],
....: color="green"); pic
Graphics object consisting of 155 graphics primitives
TESTS::
sage: L = RootSystem(["A",2,1]).weight_space()
sage: p = L.plot_alcove_walk([0,1,2,0,2,0,1,2,0,1],
....: foldings = [False, False, True, False, False, False, True, False, True, False],
....: color="green",
....: start=L.rho())
sage: print(p.description())
Line defined by 2 points: [(-1.0, 8.0), (-1.5, 9.0)]
Line defined by 2 points: [(1.0, 4.0), (1.5, 4.5)]
Line defined by 2 points: [(1.0, 7.0), (1.5, 6.0)]
Arrow from (-1.0,5.0) to (-2.0,7.0)
Arrow from (-1.0,8.0) to (1.0,7.0)
Arrow from (-1.5,9.0) to (-1.0,8.0)
Arrow from (-2.0,7.0) to (-1.0,8.0)
Arrow from (1.0,1.0) to (2.0,2.0)
Arrow from (1.0,4.0) to (-1.0,5.0)
Arrow from (1.0,7.0) to (2.0,8.0)
Arrow from (1.5,4.5) to (1.0,4.0)
Arrow from (1.5,6.0) to (1.0,7.0)
Arrow from (2.0,2.0) to (1.0,4.0)
"""
from sage.plot.line import line
from sage.plot.arrow import arrow
plot_options = self.plot_parse_options(**options)
W = self.weyl_group()
s = W.simple_reflections()
if start is None:
start = plot_options.intersection_at_level_1(self.rho())
if foldings is None:
foldings = [False] * len(word)
w = W.one()
source = plot_options.projection(start)
G = plot_options.empty()
for (i, folding) in zip(word, foldings):
w = w * s[i]
target = plot_options.projection(w.action(start))
if folding:
middle = (source+target)/2
G += line ([source, middle], rgbcolor=color)
G += arrow(middle, source, rgbcolor=color, arrowsize=plot_options._arrowsize)
# reset w
w = w * s[i]
else:
G += arrow(source, target, rgbcolor=color, arrowsize=plot_options._arrowsize)
source=target
return G
@cached_method
def _maximum_root_length(self):
r"""
Return the square of the maximum of the root lengths for irreducible finite type root systems.
EXAMPLES::
sage: Q = RootSystem(['C',2]).root_lattice()
sage: Q._maximum_root_length()
4
sage: Q = RootSystem(['G',2]).root_lattice()
sage: Q._maximum_root_length()
6
sage: Q = RootSystem(['A',3]).root_lattice()
sage: Q._maximum_root_length()
2
"""
ct = self.cartan_type()
if not ct.is_irreducible():
raise NotImplementedError("Implemented only for irreducible finite root systems")
if not ct.is_finite():
raise NotImplementedError("Implemented only for irreducible finite root systems")
L = self.root_system.ambient_space() # uses peculiarities of ambient embedding
return max([root.scalar(root) for root in L.simple_roots()])
def plot_ls_paths(self, paths, plot_labels=None, colored_labels=True, **options):
r"""
Plot LS paths.
INPUT:
- ``paths`` -- a finite crystal or list of LS paths
- ``plot_labels`` -- (default: ``None``) the distance to plot
the LS labels from the endpoint of the path; set to ``None``
to not display the labels
- ``colored_labels`` -- (default: ``True``) if ``True``, then
color the labels the same color as the LS path
- ``**options`` -- plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: B = crystals.LSPaths(['A',2], [1,1])
sage: L = RootSystem(['A',2]).ambient_space()
sage: L.plot_fundamental_weights() + L.plot_ls_paths(B)
Graphics object consisting of 14 graphics primitives
This also works in 3 dimensions::
sage: B = crystals.LSPaths(['B',3], [2,0,0])
sage: L = RootSystem(['B',3]).ambient_space()
sage: L.plot_ls_paths(B)
Graphics3d Object
"""
if not isinstance(paths, (list, tuple, set)):
from sage.combinat.crystals.littelmann_path import CrystalOfLSPaths
from sage.categories.finite_crystals import FiniteCrystals
if not isinstance(paths, CrystalOfLSPaths):
raise ValueError("the input must be LS paths")
if paths not in FiniteCrystals():
raise ValueError("the crystal must be finite")
from sage.plot.line import line
from sage.plot.colors import rainbow
plot_options = self.plot_parse_options(**options)
color = rainbow(len(paths), 'rgbtuple')
G = plot_options.empty()
for i,b in enumerate(paths):
prev = plot_options.projection(self.zero())
for x in b.value:
next = prev + plot_options.projection(self(x))
G += line([prev, next], rgbcolor=color[i])
prev = next
if plot_labels is not None:
if colored_labels:
G += plot_options.text(b, prev + prev.normalized()*plot_labels, rgbcolor=color[i])
else:
G += plot_options.text(b, prev + prev.normalized()*plot_labels)
return G
def plot_mv_polytope(self, mv_polytope, mark_endpoints=True,
circle_size=0.06, circle_thickness=1.6,
wireframe='blue', fill='green', alpha=1,
**options):
r"""
Plot an MV polytope.
INPUT:
- ``mv_polytope`` -- an MV polytope
- ``mark_endpoints`` -- (default: ``True``) mark the endpoints
of the MV polytope
- ``circle_size`` -- (default: 0.06) the size of the circles
- ``circle_thickness`` -- (default: 1.6) the thinkness of the
extra rings of circles
- ``wireframe`` -- (default: ``'blue'``) color to draw the
wireframe of the polytope with
- ``fill`` -- (default: ``'green'``) color to fill the polytope with
- ``alpha`` -- (default: 1) the alpha value (opacity) of the fill
- ``**options`` -- plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: B = crystals.infinity.MVPolytopes(['C',2])
sage: L = RootSystem(['C',2]).ambient_space()
sage: p = B.highest_weight_vector().f_string([1,2,1,2])
sage: L.plot_fundamental_weights() + L.plot_mv_polytope(p)
Graphics object consisting of 14 graphics primitives
This also works in 3 dimensions::
sage: B = crystals.infinity.MVPolytopes(['A',3])
sage: L = RootSystem(['A',3]).ambient_space()
sage: p = B.highest_weight_vector().f_string([2,1,3,2])
sage: L.plot_mv_polytope(p)
Graphics3d Object
"""
from sage.geometry.polyhedron.all import Polyhedron
plot_options = self.plot_parse_options(**options)
# Setup the shift for plotting
pbw_data = mv_polytope._pbw_datum.parent
al = self.simple_roots()
red = tuple(mv_polytope._pbw_datum.long_word)
roots = [self.sum(c*al[a] for a,c in root)
for root in pbw_data._root_list_from(red)]
datum = mv_polytope._pbw_datum.lusztig_datum
end_pt = self.sum(roots[i] * c for i,c in enumerate(datum))
shift = plot_options.projection(end_pt)
vertices = [plot_options.projection(vertex) - shift
for vertex in mv_polytope._polytope_vertices(self)]
p = Polyhedron(vertices=vertices).plot(wireframe=wireframe,
fill=fill, alpha=alpha)
if mark_endpoints:
from sage.plot.circle import circle
p += circle(plot_options.projection(self.zero()),
circle_size, fill=True,
thickness=circle_thickness, color=wireframe)
p += circle(-shift,
circle_size, fill=True,
thickness=circle_thickness, color=wireframe)
return p
def plot_crystal(self, crystal,
plot_labels=True, label_color='black',
edge_labels=False,
circle_size=0.06, circle_thickness=1.6,
**options):
r"""
Plot a finite crystal.
INPUT:
- ``crystal`` -- the finite crystal to plot
- ``plot_labels`` -- (default: ``True``) can be one of the
following:
* ``True`` - use the latex labels
* ``'circles'`` - use circles for multiplicity up to 4; if the
multiplicity is larger, then it uses the multiplicity
* ``'multiplicities'`` - use the multiplicities
- ``label_color`` -- (default: ``'black'``) the color of the
labels
- ``edge_labels`` -- (default: ``False``) if ``True``, then draw
in the edge label
- ``circle_size`` -- (default: 0.06) the size of the circles
- ``circle_thickness`` -- (default: 1.6) the thinkness of the
extra rings of circles
- ``**options`` -- plotting options
.. SEEALSO::
- :meth:`plot` for a description of the plotting options
- :ref:`sage.combinat.root_system.plot` for a tutorial
on root system plotting
EXAMPLES::
sage: L = RootSystem(['A',2]).ambient_space()
sage: C = crystals.Tableaux(['A',2], shape=[2,1])
sage: L.plot_crystal(C, plot_labels='multiplicities')
Graphics object consisting of 15 graphics primitives
sage: C = crystals.Tableaux(['A',2], shape=[8,4])
sage: p = L.plot_crystal(C, plot_labels='circles')
sage: p.show(figsize=15)
A 3-dimensional example::
sage: L = RootSystem(['B',3]).ambient_space()
sage: C = crystals.Tableaux(['B',3], shape=[2,1])
sage: L.plot_crystal(C, plot_labels='circles', edge_labels=True) # long time
Graphics3d Object
TESTS:
Check that :trac:`29548` is fixed::
sage: LS = crystals.LSPaths(['A',2], [1,1])
sage: L = RootSystem(['A',2]).ambient_space()
sage: L.plot_crystal(LS)
Graphics object consisting of 16 graphics primitives
"""
from sage.plot.arrow import arrow
from sage.plot.circle import circle
from sage.plot.colors import rgbcolor
from sage.categories.finite_crystals import FiniteCrystals
if crystal not in FiniteCrystals():
raise ValueError("only implemented for finite crystals")
plot_options = self.plot_parse_options(**options)
label_color = rgbcolor(label_color)
g = crystal.digraph()
mults = {}
for x in g.vertex_iterator():
wt = self(x.weight())
mults[wt] = mults.get(wt, []) + [x]
positions = {x: plot_options.projection(x) for x in mults.keys()}
G = plot_options.empty()
if plot_labels == 'circles':
for wt,m in mults.items():
m = len(m)
if m > 4:
G += plot_options.text(m, positions[wt], rgbcolor=label_color)
continue
if m >= 1:
G += circle(positions[wt], circle_size, fill=True,
thickness=circle_thickness,
rgbcolor=label_color)
for i in range(2,m+1):
G += circle(positions[wt], i*circle_size,
thickness=circle_thickness,
rgbcolor=label_color)
elif plot_labels == 'multiplicities':
for wt,m in mults.items():
G += plot_options.text(len(m), positions[wt], rgbcolor=label_color)
elif plot_labels:
for wt,m in mults.items():
for elt in m:
# TODO: Destack the multiple weights
G += plot_options.text(elt, positions[wt], rgbcolor=label_color)
for h,t,i in g.edges():
G += arrow(positions[self(h.weight())], positions[self(t.weight())],
zorder=1, rgbcolor=plot_options.color(i),
arrowsize=plot_options._arrowsize)
if edge_labels:
mid = (positions[self(h.weight())] + positions[self(t.weight())]) / QQ(2)
if plot_options.dimension >= 2:
diff = (positions[self(h.weight())] - positions[self(t.weight())]).normalized()
if plot_options.dimension >= 3:
from copy import copy
diff2 = copy(diff)
diff[0], diff[1] = -diff[1], diff[0]
if abs(diff.dot_product(diff2)) > 0.9:
diff[1], diff[2] = -diff[2], diff[1]
else:
diff[0], diff[1] = -diff[1], diff[0]
mid += diff / QQ(10)
G += plot_options.text(i, mid, rgbcolor=plot_options.color(i))
return G
@cached_method
def dual_type_cospace(self):
r"""
Returns the cospace of dual type.
For example, if invoked on the root lattice of type `['B',2]`, returns the
coroot lattice of type `['C',2]`.
.. WARNING::
Not implemented for ambient spaces.
EXAMPLES::
sage: CartanType(['B',2]).root_system().root_lattice().dual_type_cospace()
Coroot lattice of the Root system of type ['C', 2]
sage: CartanType(['F',4]).root_system().coweight_lattice().dual_type_cospace()
Weight lattice of the Root system of type ['F', 4] relabelled by {1: 4, 2: 3, 3: 2, 4: 1}
"""
from .root_space import RootSpace
from .weight_space import WeightSpace
if isinstance(self, RootSpace):
if self.root_system.dual_side:
return self.cartan_type().root_system().root_space(self.base_ring())
else:
return self.cartan_type().dual().root_system().coroot_space(self.base_ring())
if isinstance(self, WeightSpace):
if self.root_system.dual_side:
return self.cartan_type().root_system().weight_space(self.base_ring())
else:
return self.cartan_type().dual().root_system().coweight_space(self.base_ring())
raise TypeError("Not implemented for %s" % self)
@abstract_method(optional=True)
def to_ambient_space_morphism(self):
r"""
Return the morphism to the ambient space.
EXAMPLES::
sage: CartanType(['B',2]).root_system().root_lattice().to_ambient_space_morphism()
Generic morphism:
From: Root lattice of the Root system of type ['B', 2]
To: Ambient space of the Root system of type ['B', 2]
sage: CartanType(['B',2]).root_system().coroot_lattice().to_ambient_space_morphism()
Generic morphism:
From: Coroot lattice of the Root system of type ['B', 2]
To: Ambient space of the Root system of type ['B', 2]
sage: CartanType(['B',2]).root_system().weight_lattice().to_ambient_space_morphism()
Generic morphism:
From: Weight lattice of the Root system of type ['B', 2]
To: Ambient space of the Root system of type ['B', 2]
"""
##########################################################################
class ElementMethods:
@abstract_method
def scalar(self, lambdacheck):
"""
Implement the natural pairing with the coroot lattice.
INPUT:
- ``self`` -- an element of a root lattice realization
- ``lambdacheck`` -- an element of the coroot lattice or coroot space
OUTPUT: the scalar product of ``self`` and ``lambdacheck``
EXAMPLES::
sage: L = RootSystem(['A',4]).root_lattice()
sage: alpha = L.simple_roots()
sage: alphacheck = L.simple_coroots()
sage: alpha[1].scalar(alphacheck[1])
2
sage: alpha[1].scalar(alphacheck[2])
-1
sage: matrix([ [ alpha[i].scalar(alphacheck[j])
....: for i in L.index_set() ]
....: for j in L.index_set() ])
[ 2 -1 0 0]
[-1 2 -1 0]
[ 0 -1 2 -1]
[ 0 0 -1 2]
TESTS::
sage: super(sage.combinat.root_system.root_space.RootSpaceElement,alpha[1]).scalar(alphacheck[1])
Traceback (most recent call last):
...
NotImplementedError: <abstract method scalar at ...>
"""
def symmetric_form(self, alpha):
r"""
Return the symmetric form of ``self`` with ``alpha``.
Consider the simple roots `\alpha_i` and let `(b_{ij})_{ij}`
denote the symmetrized Cartan matrix `(a_{ij})_{ij}`, we have
.. MATH::
(\alpha_i | \alpha_j) = b_{ij}
and extended bilinearly. See Chapter 6 in Kac, Infinite
Dimensional Lie Algebras for more details.
EXAMPLES::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[1].symmetric_form(alpha[0])
0
sage: alpha[1].symmetric_form(alpha[1])
4
sage: elt = alpha[0] - 3*alpha[1] + alpha[2]
sage: elt.symmetric_form(alpha[1])
-14
sage: elt.symmetric_form(alpha[0]+2*alpha[2])
14
sage: Q = RootSystem(CartanType(['A',4,2]).dual()).root_lattice()
sage: Qc = RootSystem(['A',4,2]).coroot_lattice()
sage: alpha = Q.simple_roots()
sage: alphac = Qc.simple_roots()
sage: elt = alpha[0] + 2*alpha[1] + 2*alpha[2]
sage: eltc = alphac[0] + 2*alphac[1] + 2*alphac[2]
sage: elt.symmetric_form(alpha[1])
0
sage: eltc.symmetric_form(alphac[1])
0
"""
cm = self.parent().dynkin_diagram().cartan_matrix()
sym = cm.symmetrized_matrix()
iset = self.parent().index_set()
return sum(cl*sym[iset.index(ml),iset.index(mr)]*cr
for ml,cl in self for mr,cr in alpha)
def norm_squared(self):
"""
Return the norm squared of ``self`` with respect to the
symmetric form.
EXAMPLES::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[1].norm_squared()
4
sage: alpha[2].norm_squared()
2
sage: elt = alpha[0] - 3*alpha[1] + alpha[2]
sage: elt.norm_squared()
50
sage: elt = alpha[0] + alpha[1] + 2*alpha[2]
sage: elt.norm_squared()
0
sage: Q = RootSystem(CartanType(['A',4,2]).dual()).root_lattice()
sage: Qc = RootSystem(['A',4,2]).coroot_lattice()
sage: alpha = Q.simple_roots()
sage: alphac = Qc.simple_roots()
sage: elt = alpha[0] + 2*alpha[1] + 2*alpha[2]
sage: eltc = alphac[0] + 2*alphac[1] + 2*alphac[2]
sage: elt.norm_squared()
0
sage: eltc.norm_squared()
0
"""
return self.symmetric_form(self)
##########################################################################
# Action and orbits w.r.t. the Weyl group
##########################################################################
def simple_reflection(self, i):
r"""
Returns the image of ``self`` by the `i`-th simple reflection.
EXAMPLES::
sage: alpha = RootSystem(["A", 3]).root_lattice().alpha()
sage: alpha[1].simple_reflection(2)
alpha[1] + alpha[2]
sage: Q = RootSystem(['A', 3, 1]).weight_lattice(extended = True)
sage: Lambda = Q.fundamental_weights()
sage: L = Lambda[0] + Q.null_root()
sage: L.simple_reflection(0)
-Lambda[0] + Lambda[1] + Lambda[3]
"""
# Subclasses should optimize whenever possible!
return self.parent().simple_reflection(i)(self)
def simple_reflections(self):
"""
The images of self by all the simple reflections
EXAMPLES::
sage: alpha = RootSystem(["A", 3]).root_lattice().alpha()
sage: alpha[1].simple_reflections()
[-alpha[1], alpha[1] + alpha[2], alpha[1]]
"""
return [s(self) for s in self.parent().simple_reflections()]
def _orbit_iter(self):
"""
Iterate the orbit of ``self`` under the action of the Weyl group.
Call this method when the orbit just needs to be iterated over.
EXAMPLES::
sage: L = RootSystem(["A", 2]).ambient_lattice()
sage: sorted(L.rho()._orbit_iter()) # the output order is not specified
[(1, 2, 0), (1, 0, 2), (2, 1, 0),
(2, 0, 1), (0, 1, 2), (0, 2, 1)]
"""
R = RecursivelyEnumeratedSet([self], attrcall('simple_reflections'),
structure=None, enumeration='breadth')
return iter(R)
def orbit(self):
r"""
The orbit of ``self`` under the action of the Weyl group.
EXAMPLES:
`\rho` is a regular element whose orbit is in bijection
with the Weyl group. In particular, it has 6 elements for
the symmetric group `S_3`::
sage: L = RootSystem(["A", 2]).ambient_lattice()
sage: sorted(L.rho().orbit()) # the output order is not specified
[(1, 2, 0), (1, 0, 2), (2, 1, 0),
(2, 0, 1), (0, 1, 2), (0, 2, 1)]
sage: L = RootSystem(["A", 3]).weight_lattice()
sage: len(L.rho().orbit())
24
sage: len(L.fundamental_weights()[1].orbit())
4
sage: len(L.fundamental_weights()[2].orbit())
6
"""
return list(self._orbit_iter())
def _dot_orbit_iter(self):
"""
Iterate the orbit of ``self`` under the dot or affine action
of the Weyl group.
Call this method when the dot orbit just needs to be
iterated over.
EXAMPLES::
sage: L = RootSystem(['A', 2]).ambient_lattice()
sage: sorted(L.rho()._dot_orbit_iter()) # the output order is not specified
[(-2, 1, 4), (-2, 3, 2), (2, -1, 2),
(2, 1, 0), (0, -1, 4), (0, 3, 0)]
sage: sorted(L.rho()._orbit_iter()) # the output order is not specified
[(1, 2, 0), (1, 0, 2), (2, 1, 0),
(2, 0, 1), (0, 1, 2), (0, 2, 1)]
"""
I = self.parent().index_set()
def apply_action(la):
return [la.dot_action([i]) for i in I]
R = RecursivelyEnumeratedSet([self], apply_action, structure=None,
enumeration='breadth')
return iter(R)
def dot_orbit(self):
r"""
The orbit of ``self`` under the dot or affine action of
the Weyl group.
EXAMPLES::
sage: L = RootSystem(['A', 2]).ambient_lattice()
sage: sorted(L.rho().dot_orbit()) # the output order is not specified
[(-2, 1, 4), (-2, 3, 2), (2, -1, 2),
(2, 1, 0), (0, -1, 4), (0, 3, 0)]
sage: L = RootSystem(['B',2]).weight_lattice()
sage: sorted(L.fundamental_weights()[1].dot_orbit()) # the output order is not specified
[-4*Lambda[1], -4*Lambda[1] + 4*Lambda[2],
-3*Lambda[1] - 2*Lambda[2], -3*Lambda[1] + 4*Lambda[2],
Lambda[1], Lambda[1] - 6*Lambda[2],
2*Lambda[1] - 6*Lambda[2], 2*Lambda[1] - 2*Lambda[2]]
We compare the dot action orbit to the regular orbit::
sage: L = RootSystem(['A', 3]).weight_lattice()
sage: len(L.rho().dot_orbit())
24
sage: len((-L.rho()).dot_orbit())
1
sage: La = L.fundamental_weights()
sage: len(La[1].dot_orbit())
24
sage: len(La[1].orbit())
4
sage: len((-L.rho() + La[1]).dot_orbit())
4
sage: len(La[2].dot_orbit())
24
sage: len(La[2].orbit())
6
sage: len((-L.rho() + La[2]).dot_orbit())
6
"""
return list(self._dot_orbit_iter())
affine_orbit = dot_orbit
##########################################################################
#
##########################################################################
@abstract_method(optional=True)
def associated_coroot(self):
"""
Returns the coroot associated to this root
EXAMPLES::
sage: alpha = RootSystem(["A", 3]).root_space().simple_roots()
sage: alpha[1].associated_coroot()
alphacheck[1]
"""
def reflection(self, root, use_coroot = False):
r"""
Reflects ``self`` across the hyperplane orthogonal to ``root``.
If ``use_coroot`` is True, ``root`` is interpreted as a coroot.
EXAMPLES::
sage: R = RootSystem(['C',4])
sage: weight_lattice = R.weight_lattice()
sage: mu = weight_lattice.from_vector(vector([0,0,1,2]))
sage: coroot_lattice = R.coroot_lattice()
sage: alphavee = coroot_lattice.from_vector(vector([0,0,1,1]))
sage: mu.reflection(alphavee, use_coroot=True)
6*Lambda[2] - 5*Lambda[3] + 2*Lambda[4]
sage: root_lattice = R.root_lattice()
sage: beta = root_lattice.from_vector(vector([0,1,1,0]))
sage: mu.reflection(beta)
Lambda[1] - Lambda[2] + 3*Lambda[4]
"""
if use_coroot:
return self - self.scalar(root) * root.associated_coroot()
else:
return self - self.scalar(root.associated_coroot()) * root
##########################################################################
# Descents
##########################################################################
def has_descent(self, i, positive=False):
"""
Test if self has a descent at position `i`, that is if self is
on the strict negative side of the `i^{th}` simple reflection
hyperplane.
If positive if True, tests if it is on the strict positive
side instead.
EXAMPLES::
sage: space=RootSystem(['A',5]).weight_space()
sage: alpha=RootSystem(['A',5]).weight_space().simple_roots()
sage: [alpha[i].has_descent(1) for i in space.index_set()]
[False, True, False, False, False]
sage: [(-alpha[i]).has_descent(1) for i in space.index_set()]
[True, False, False, False, False]
sage: [alpha[i].has_descent(1, True) for i in space.index_set()]
[True, False, False, False, False]
sage: [(-alpha[i]).has_descent(1, True) for i in space.index_set()]
[False, True, False, False, False]
sage: (alpha[1]+alpha[2]+alpha[4]).has_descent(3)
True
sage: (alpha[1]+alpha[2]+alpha[4]).has_descent(1)
False
sage: (alpha[1]+alpha[2]+alpha[4]).has_descent(1, True)
True
"""
s = self.scalar(self.parent().simple_coroots()[i])
if positive:
return s > 0
else:
return s < 0
def first_descent(self, index_set=None, positive=False):
"""
Returns the first descent of pt
One can use the index_set option to restrict to the parabolic
subgroup indexed by index_set.
EXAMPLES::
sage: space=RootSystem(['A',5]).weight_space()
sage: alpha=space.simple_roots()
sage: (alpha[1]+alpha[2]+alpha[4]).first_descent()
3
sage: (alpha[1]+alpha[2]+alpha[4]).first_descent([1,2,5])
5
sage: (alpha[1]+alpha[2]+alpha[4]).first_descent([1,2,5,3,4])
5
"""
if index_set is None:
index_set = self.parent().index_set()
for i in index_set:
if self.has_descent(i, positive):
return i
return None
def descents(self, index_set=None, positive=False):
"""
Returns the descents of pt
EXAMPLES::
sage: space=RootSystem(['A',5]).weight_space()
sage: alpha=space.simple_roots()
sage: (alpha[1]+alpha[2]+alpha[4]).descents()
[3, 5]
"""
if index_set is None:
index_set=self.parent().index_set()
return [ i for i in index_set if self.has_descent(i, positive) ]
def to_dominant_chamber(self, index_set = None, positive = True, reduced_word = False):
r"""
Returns the unique dominant element in the Weyl group orbit of the vector ``self``.
If ``positive`` is False, returns the antidominant orbit element.
With the ``index_set`` optional parameter, this is done with
respect to the corresponding parabolic subgroup.
If ``reduced_word`` is True, returns the 2-tuple (``weight``, ``direction``)
where ``weight`` is the (anti)dominant orbit element and ``direction`` is a reduced word
for the Weyl group element sending ``weight`` to ``self``.
.. warning::
In infinite type, an orbit may not contain a dominant element.
In this case the function may go into an infinite loop.
For affine root systems, errors are generated if
the orbit does not contain the requested kind of representative.
If the input vector is of positive (resp. negative)
level, then there is a dominant (resp. antidominant) element in its orbit
but not an antidominant (resp. dominant) one. If the vector is of level zero,
then there are neither dominant nor antidominant orbit representatives, except
for multiples of the null root, which are themselves both dominant and antidominant
orbit representatives.
EXAMPLES::
sage: space=RootSystem(['A',5]).weight_space()
sage: alpha=RootSystem(['A',5]).weight_space().simple_roots()
sage: alpha[1].to_dominant_chamber()
Lambda[1] + Lambda[5]
sage: alpha[1].to_dominant_chamber([1,2])
Lambda[1] + Lambda[2] - Lambda[3]
sage: wl=RootSystem(['A',2,1]).weight_lattice(extended=True)
sage: mu=wl.from_vector(vector([1,-3,0]))
sage: mu.to_dominant_chamber(positive=False, reduced_word = True)
(-Lambda[1] - Lambda[2] - delta, [0, 2])
sage: R = RootSystem(['A',1,1])
sage: rl = R.root_lattice()
sage: nu = rl.zero()
sage: nu.to_dominant_chamber()
0
sage: nu.to_dominant_chamber(positive=False)
0
sage: mu = rl.from_vector(vector([0,1]))
sage: mu.to_dominant_chamber()
Traceback (most recent call last):
...
ValueError: alpha[1] is not in the orbit of the fundamental chamber
sage: mu.to_dominant_chamber(positive=False)
Traceback (most recent call last):
...
ValueError: alpha[1] is not in the orbit of the negative of the fundamental chamber
"""
if index_set is None:
# default index set is the entire Dynkin node set
index_set = self.parent().index_set()
cartan_type = self.parent().cartan_type()
# generate errors for infinite loop cases in affine type
if cartan_type.is_affine():
if index_set == self.parent().index_set():
# If the full affine Weyl group is being used
level = self.level()
if level > 0:
if not positive:
raise ValueError("%s is not in the orbit of the fundamental chamber"%(self))
elif level < 0:
if positive:
raise ValueError("%s is not in the orbit of the negative of the fundamental chamber"%(self))
elif not (self == self.parent().zero()):
# nonzero level zero weight
if positive:
raise ValueError("%s is not in the orbit of the fundamental chamber"%(self))
else:
raise ValueError("%s is not in the orbit of the negative of the fundamental chamber"%(self))
if reduced_word:
direction = []
while True:
# The first index where it is *not* yet on the positive side
i = self.first_descent(index_set, positive=(not positive))
if i is None:
if reduced_word:
return self, direction
else:
return self
else:
if reduced_word:
direction.append(i)
self = self.simple_reflection(i)
def reduced_word(self, index_set = None, positive = True):
r"""
Returns a reduced word for the inverse of the shortest Weyl group element that sends the vector ``self`` into the dominant chamber.
With the ``index_set`` optional parameter, this is done with
respect to the corresponding parabolic subgroup.
If ``positive`` is False, use the antidominant chamber instead.
EXAMPLES::
sage: space=RootSystem(['A',5]).weight_space()
sage: alpha=RootSystem(['A',5]).weight_space().simple_roots()
sage: alpha[1].reduced_word()
[2, 3, 4, 5]
sage: alpha[1].reduced_word([1,2])
[2]
"""
return self.to_dominant_chamber(index_set=index_set,positive=positive,reduced_word = True)[1]
def is_dominant(self, index_set = None, positive = True):
r"""
Returns whether self is dominant.
This is done with respect to the subrootsystem indicated by the subset of Dynkin nodes
index_set. If index_set is None then the entire Dynkin node set is used.
If positive is False then the dominance condition is replaced by antidominance.
EXAMPLES::
sage: L = RootSystem(['A',2]).ambient_lattice()
sage: Lambda = L.fundamental_weights()
sage: [x.is_dominant() for x in Lambda]
[True, True]
sage: [x.is_dominant(positive=False) for x in Lambda]
[False, False]
sage: (Lambda[1]-Lambda[2]).is_dominant()
False
sage: (-Lambda[1]+Lambda[2]).is_dominant()
False
sage: (Lambda[1]-Lambda[2]).is_dominant([1])
True
sage: (Lambda[1]-Lambda[2]).is_dominant([2])
False
sage: [x.is_dominant() for x in L.roots()]
[False, True, False, False, False, False]
sage: [x.is_dominant(positive=False) for x in L.roots()]
[False, False, False, False, True, False]
"""
return self.first_descent(index_set, not positive) is None
def is_dominant_weight(self): # Or is_dominant_integral_weight?
"""
Test whether ``self`` is a dominant element of the weight lattice.
EXAMPLES::
sage: L = RootSystem(['A',2]).ambient_lattice()
sage: Lambda = L.fundamental_weights()
sage: [x.is_dominant() for x in Lambda]
[True, True]
sage: (3*Lambda[1]+Lambda[2]).is_dominant()
True
sage: (Lambda[1]-Lambda[2]).is_dominant()
False
sage: (-Lambda[1]+Lambda[2]).is_dominant()
False
Tests that the scalar products with the coroots are all
nonnegative integers. For example, if `x` is the sum of a
dominant element of the weight lattice plus some other element
orthogonal to all coroots, then the implementation correctly
reports `x` to be a dominant weight::
sage: x = Lambda[1] + L([-1,-1,-1])
sage: x.is_dominant_weight()
True
"""
alphacheck = self.parent().simple_coroots()
from sage.rings.semirings.non_negative_integer_semiring import NN
return all(self.inner_product(alphacheck[i]) in NN
for i in self.parent().index_set())
##########################################################################
# weak order
##########################################################################
def succ(self, index_set=None):
r"""
Return the immediate successors of ``self`` for the weak order.
INPUT:
- ``index_set`` - a subset (as a list or iterable) of the
nodes of the Dynkin diagram; (default: ``None`` for all of them)
If ``index_set`` is specified, the successors for the
corresponding parabolic subsystem are returned.
EXAMPLES::
sage: L = RootSystem(['A',3]).weight_lattice()
sage: Lambda = L.fundamental_weights()
sage: Lambda[1].succ()
[-Lambda[1] + Lambda[2]]
sage: L.rho().succ()
[-Lambda[1] + 2*Lambda[2] + Lambda[3], 2*Lambda[1] - Lambda[2] + 2*Lambda[3], Lambda[1] + 2*Lambda[2] - Lambda[3]]
sage: (-L.rho()).succ()
[]
sage: L.rho().succ(index_set=[1])
[-Lambda[1] + 2*Lambda[2] + Lambda[3]]
sage: L.rho().succ(index_set=[2])
[2*Lambda[1] - Lambda[2] + 2*Lambda[3]]
"""
return [ self.simple_reflection(i) for i in self.descents(index_set=index_set, positive=True) ]
def pred(self, index_set=None):
r"""
Return the immediate predecessors of ``self`` for the weak order.
INPUT:
- ``index_set`` - a subset (as a list or iterable) of the
nodes of the Dynkin diagram; (default: ``None`` for all of them)
If ``index_set`` is specified, the successors for the
corresponding parabolic subsystem are returned.
EXAMPLES::
sage: L = RootSystem(['A',3]).weight_lattice()
sage: Lambda = L.fundamental_weights()
sage: Lambda[1].pred()
[]
sage: L.rho().pred()
[]
sage: (-L.rho()).pred()
[Lambda[1] - 2*Lambda[2] - Lambda[3], -2*Lambda[1] + Lambda[2] - 2*Lambda[3], -Lambda[1] - 2*Lambda[2] + Lambda[3]]
sage: (-L.rho()).pred(index_set=[1])
[Lambda[1] - 2*Lambda[2] - Lambda[3]]
"""
return [ self.simple_reflection(i) for i in self.descents(index_set) ]
def greater(self):
r"""
Returns the elements in the orbit of self which are
greater than self in the weak order.
EXAMPLES::
sage: L = RootSystem(['A',3]).ambient_lattice()
sage: e = L.basis()
sage: e[2].greater()
[(0, 0, 1, 0), (0, 0, 0, 1)]
sage: len(L.rho().greater())
24
sage: len((-L.rho()).greater())
1
sage: sorted([len(x.greater()) for x in L.rho().orbit()])
[1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 5, 5, 6, 6, 6, 8, 8, 8, 8, 12, 12, 12, 24]
"""
R = RecursivelyEnumeratedSet([self], attrcall('succ'), structure=None)
return list(R.naive_search_iterator())
def smaller(self):
r"""
Returns the elements in the orbit of self which are
smaller than self in the weak order.
EXAMPLES::
sage: L = RootSystem(['A',3]).ambient_lattice()
sage: e = L.basis()
sage: e[2].smaller()
[(0, 0, 1, 0), (0, 1, 0, 0), (1, 0, 0, 0)]
sage: len(L.rho().smaller())
1
sage: len((-L.rho()).smaller())
24
sage: sorted([len(x.smaller()) for x in L.rho().orbit()])
[1, 2, 2, 2, 3, 3, 3, 3, 4, 4, 4, 5, 5, 6, 6, 6, 8, 8, 8, 8, 12, 12, 12, 24]
"""
R = RecursivelyEnumeratedSet([self], attrcall('pred'), structure=None)
return list(R.naive_search_iterator())
def extraspecial_pair(self):
r"""
Return the extraspecial pair of ``self`` under the ordering
defined by
:meth:`~sage.combinat.root_system.root_lattice_realizations.RootLatticeRealizations.ParentMethods.positive_roots_by_height`.
The *extraspecial pair* of a positive root `\gamma` with some total
ordering `<` of the root lattice that respects height is the pair
of positive roots `(\alpha, \beta)` such that `\gamma = \alpha +
\beta` and `\alpha` is as small as possible.
EXAMPLES::
sage: Q = RootSystem(['G', 2]).root_lattice()
sage: Q.highest_root().extraspecial_pair()
(alpha[2], 3*alpha[1] + alpha[2])
"""
if self.is_positive_root():
r = self
else:
r = -self
p_roots = self.parent().positive_roots_by_height()
# We won't need any roots higher than us
p_roots = p_roots[:p_roots.index(r)]
for i, a in enumerate(p_roots):
for b in p_roots[i + 1:]:
if a + b == r:
return (a, b)
raise ValueError("Unable to find an extraspecial pair")
def height(self):
r"""
Return the height of ``self``.
The height of a root `\alpha = \sum_i a_i \alpha_i` is defined
to be `h(\alpha) := \sum_i a_i`.
EXAMPLES::
sage: Q = RootSystem(['G', 2]).root_lattice()
sage: Q.highest_root().height()
5
"""
return sum(self.coefficients())
##########################################################################
# Level
##########################################################################
def level(self):
"""
EXAMPLES::
sage: L = RootSystem(['A',2,1]).weight_lattice()
sage: L.rho().level()
3
"""
if not self.parent().cartan_type().is_affine():
raise ValueError("%s does not belong to a lattice of affine Cartan type"%self)
return self.scalar(self.parent().null_coroot())
@cached_in_parent_method
def to_simple_root(self, reduced_word=False):
r"""
Return (the index of) a simple root in the orbit of the positive root ``self``.
INPUT:
- ``self`` -- a positive root
- ``reduced_word`` -- a boolean (default: ``False``)
OUTPUT:
- The index `i` of a simple root `\alpha_i`.
If ``reduced_word`` is True, this returns instead a pair
``(i, word)``, where word is a sequence of reflections
mapping `\alpha_i` up the root poset to ``self``.
EXAMPLES::
sage: L = RootSystem(["A",3]).root_lattice()
sage: positive_roots = L.positive_roots()
sage: for alpha in sorted(positive_roots):
....: print("{} {}".format(alpha, alpha.to_simple_root()))
alpha[1] 1
alpha[1] + alpha[2] 2
alpha[1] + alpha[2] + alpha[3] 3
alpha[2] 2
alpha[2] + alpha[3] 3
alpha[3] 3
sage: for alpha in sorted(positive_roots):
....: print("{} {}".format(alpha, alpha.to_simple_root(reduced_word=True)))
alpha[1] (1, ())
alpha[1] + alpha[2] (2, (1,))
alpha[1] + alpha[2] + alpha[3] (3, (1, 2))
alpha[2] (2, ())
alpha[2] + alpha[3] (3, (2,))
alpha[3] (3, ())
ALGORITHM:
This method walks from ``self`` down to the antidominant
chamber by applying successively the simple reflection
given by the first descent. Since ``self`` is a positive
root, each step goes down the root poset, and one must
eventually cross a simple root `\alpha_i`.
.. SEEALSO::
- :meth:`first_descent`
- :meth:`to_dominant_chamber`
.. WARNING::
The behavior is not specified if the input is not a
positive root. For a finite root system, this is
currently caught (albeit with a not perfect message)::
sage: alpha = L.simple_roots()
sage: (2*alpha[1]).to_simple_root()
Traceback (most recent call last):
...
ValueError: -2*alpha[1] - 2*alpha[2] - 2*alpha[3] is not a positive root
For an infinite root system, this method may run into
an infinite recursion if the input is not a positive
root.
"""
F = self.parent().simple_roots().inverse_family()
try:
j = F[self]
if reduced_word:
return (j, ())
else:
return j
except KeyError:
pass
j = self.first_descent(positive=True)
if j is None:
raise ValueError("%s is not a positive root"%self)
result = self.simple_reflection(j).to_simple_root(reduced_word=reduced_word)
if reduced_word:
return (result[0], (j,) + result[1])
else:
return result
@cached_in_parent_method
def associated_reflection(self):
r"""
Given a positive root ``self``, returns a reduced word for the reflection orthogonal to ``self``.
Since the answer is cached, it is a tuple instead of a list.
EXAMPLES::
sage: RootSystem(['C',3]).root_lattice().simple_root(3).weyl_action([1,2]).associated_reflection()
(1, 2, 3, 2, 1)
sage: RootSystem(['C',3]).root_lattice().simple_root(2).associated_reflection()
(2,)
"""
i, reduced_word = self.to_simple_root(reduced_word=True)
return reduced_word + (i,) + tuple(reversed(reduced_word))
def translation(self, x):
"""
INPUT:
- ``self`` - an element `t` at level `0`
- ``x`` - an element of the same space
Returns `x` translated by `t`, that is `x+level(x) t`
EXAMPLES::
sage: L = RootSystem(['A',2,1]).weight_lattice()
sage: alpha = L.simple_roots()
sage: Lambda = L.fundamental_weights()
sage: t = alpha[2]
Let us look at the translation of an element of level `1`::
sage: Lambda[1].level()
1
sage: t.translation(Lambda[1])
-Lambda[0] + 2*Lambda[2]
sage: Lambda[1] + t
-Lambda[0] + 2*Lambda[2]
and of an element of level `0`::
sage: alpha [1].level()
0
sage: t.translation(alpha [1])
-Lambda[0] + 2*Lambda[1] - Lambda[2]
sage: alpha[1] + 0*t
-Lambda[0] + 2*Lambda[1] - Lambda[2]
The arguments are given in this seemingly unnatural order to
make it easy to construct the translation function::
sage: f = t.translation
sage: f(Lambda[1])
-Lambda[0] + 2*Lambda[2]
"""
if not self.level().is_zero():
raise ValueError("%s is not of level zero"%(self))
return x + x.level() * self
def weyl_action(self, element, inverse=False):
r"""
Act on ``self`` by an element of the Coxeter or Weyl group.
INPUT:
- ``element`` -- an element of a Coxeter or Weyl group
of the same Cartan type, or a tuple or a list (such as a
reduced word) of elements from the index set
- ``inverse`` -- a boolean (default: ``False``); whether to
act by the inverse element
EXAMPLES::
sage: wl = RootSystem(['A',3]).weight_lattice()
sage: mu = wl.from_vector(vector([1,0,-2]))
sage: mu
Lambda[1] - 2*Lambda[3]
sage: mudom, rw = mu.to_dominant_chamber(positive=False, reduced_word = True)
sage: mudom, rw
(-Lambda[2] - Lambda[3], [1, 2])
Acting by a (reduced) word::
sage: mudom.weyl_action(rw)
Lambda[1] - 2*Lambda[3]
sage: mu.weyl_action(rw, inverse = True)
-Lambda[2] - Lambda[3]
Acting by an element of the Coxeter or Weyl group on a vector in its own
lattice of definition (implemented by matrix multiplication on a vector)::
sage: w = wl.weyl_group().from_reduced_word([1, 2])
sage: mudom.weyl_action(w)
Lambda[1] - 2*Lambda[3]
Acting by an element of an isomorphic Coxeter or Weyl group (implemented by the
action of a corresponding reduced word)::
sage: W = WeylGroup(['A',3], prefix="s")
sage: w = W.from_reduced_word([1, 2])
sage: wl.weyl_group() == W
False
sage: mudom.weyl_action(w)
Lambda[1] - 2*Lambda[3]
"""
# TODO, some day: accept an iterator
if isinstance(element, (tuple, list, range)):
# Action by a (reduced) word
the_word = [x for x in element]
I = self.parent().index_set()
if not all(i in I for i in the_word):
raise ValueError("Not all members of %s are in the index set of the %s"%(element, self.parent()))
else:
if not isinstance(element, Element):
raise TypeError("%s should be an element of a Coxeter group"%(element))
W = element.parent()
if W is self.parent().weyl_group():
# Action by an element of the Coxeter or Weyl group of ``self``
if inverse is True:
element = element.inverse()
return element.action(self)
else:
# Action by an element of an isomorphic Coxeter or Weyl group
if not (W in CoxeterGroups() and W.cartan_type() == self.parent().cartan_type()):
raise TypeError("%s should be an element of a Coxeter group of type %s"%(element, self.parent().cartan_type()))
the_word = element.reduced_word()
if inverse is False:
the_word.reverse()
for i in the_word:
self = self.simple_reflection(i)
return self
def weyl_stabilizer(self, index_set=None):
r"""
Returns the subset of Dynkin nodes whose reflections fix ``self``.
If ``index_set`` is not None, only consider nodes in this set.
Note that if ``self`` is dominant or antidominant, then its stabilizer is the
parabolic subgroup defined by the returned node set.
EXAMPLES::
sage: wl = RootSystem(['A',2,1]).weight_lattice(extended = True)
sage: al = wl.null_root()
sage: al.weyl_stabilizer()
[0, 1, 2]
sage: wl = RootSystem(['A',4]).weight_lattice()
sage: mu = wl.from_vector(vector([1,1,0,0]))
sage: mu.weyl_stabilizer()
[3, 4]
sage: mu.weyl_stabilizer(index_set = [1,2,3])
[3]
"""
if index_set is None:
index_set = self.parent().cartan_type().index_set()
alphavee = self.parent().coroot_lattice().basis()
return [i for i in index_set if self.scalar(alphavee[i]) == 0]
def dot_action(self, w, inverse=False):
r"""
Act on ``self`` by ``w`` using the dot or affine action.
Let `w` be an element of the Weyl group. The *dot action*
or *affine action* is given by:
.. MATH::
w \bullet \lambda = w (\lambda + \rho) - \rho,
where `\rho` is the sum of the fundamental weights.
INPUT:
- ``w`` -- an element of a Coxeter or Weyl group of
the same Cartan type, or a tuple or a list (such
as a reduced word) of elements from the index set
- ``inverse`` -- a boolean (default: ``False``); whether
to act by the inverse element
EXAMPLES::
sage: P = RootSystem(['B',3]).weight_lattice()
sage: La = P.fundamental_weights()
sage: mu = La[1] + 2*La[2] - 3*La[3]
sage: mu.dot_action([1])
-3*Lambda[1] + 4*Lambda[2] - 3*Lambda[3]
sage: mu.dot_action([3])
Lambda[1] + Lambda[3]
sage: mu.dot_action([1,2,3])
-4*Lambda[1] + Lambda[2] + 3*Lambda[3]
We check that the origin of this action is at `-\rho`::
sage: all((-P.rho()).dot_action([i]) == -P.rho()
....: for i in P.index_set())
True
REFERENCES:
- :wikipedia:`Affine_action`
"""
rho = self.parent().rho()
return (self + rho).weyl_action(w, inverse=inverse) - rho
def is_parabolic_root(self, index_set):
r"""
Supposing that ``self`` is a root, is it in the parabolic subsystem with Dynkin nodes ``index_set``?
INPUT:
- ``index_set`` -- the Dynkin node set of the parabolic subsystem.
.. TODO:: This implementation is only valid in the root or weight lattice
EXAMPLES::
sage: alpha = RootSystem(['A',3]).root_lattice().from_vector(vector([1,1,0]))
sage: alpha.is_parabolic_root([1,3])
False
sage: alpha.is_parabolic_root([1,2])
True
sage: alpha.is_parabolic_root([2])
False
"""
for i in self.support():
if i not in index_set:
return False
return True
def is_short_root(self):
r"""
Return ``True`` if ``self`` is a short (real) root.
Returns False unless the parent is an irreducible root system of finite type
having two root lengths and ``self`` is of the shorter length.
There is no check of whether ``self`` is actually a root.
EXAMPLES::
sage: Q = RootSystem(['C',2]).root_lattice()
sage: al = Q.simple_root(1).weyl_action([1,2]); al
alpha[1] + alpha[2]
sage: al.is_short_root()
True
sage: bt = Q.simple_root(2).weyl_action([2,1,2]); bt
-2*alpha[1] - alpha[2]
sage: bt.is_short_root()
False
sage: RootSystem(['A',2]).root_lattice().simple_root(1).is_short_root()
False
An example in affine type::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[0].is_short_root()
False
sage: alpha[1].is_short_root()
False
sage: alpha[2].is_short_root()
True
"""
ct = self.parent().cartan_type()
if not ct.is_irreducible():
raise ValueError("Cartan type needs to be irreducible!")
if not ct.is_finite():
return self.norm_squared() == min(alpha.norm_squared()
for alpha in self.parent().simple_roots())
L = self.parent().root_system.ambient_space() # uses peculiarities of ambient embedding
ls = L(self)
return ls.scalar(ls) < L._maximum_root_length()
#Alternative implementation
#if ct.is_simply_laced():
# return False
#L = self.parent().root_system.ambient_space() # uses peculiarities of ambient embedding
#ls = L(self)
#lensq = ls.scalar(ls)
#if lensq > 2:
# return False
#if lensq == 1:
# return True
## now only types BCFG remain and the square length is 2
#if ct.type() == 'C' or ct.type() == 'G':
# return True
#return False
def to_dual_type_cospace(self):
r"""
Map ``self`` to the dual type cospace.
For example, if ``self`` is in the root lattice of type `['B',2]`, send it to
the coroot lattice of type `['C',2]`.
EXAMPLES::
sage: v = CartanType(['C',3]).root_system().weight_lattice().an_element(); v
2*Lambda[1] + 2*Lambda[2] + 3*Lambda[3]
sage: w = v.to_dual_type_cospace(); w
2*Lambdacheck[1] + 2*Lambdacheck[2] + 3*Lambdacheck[3]
sage: w.parent()
Coweight lattice of the Root system of type ['B', 3]
"""
return self.parent().dual_type_cospace().from_vector(self.to_vector())
def to_classical(self):
r"""
Map ``self`` to the classical lattice/space.
Only makes sense for affine type.
EXAMPLES::
sage: R = CartanType(['A',3,1]).root_system()
sage: alpha = R.root_lattice().an_element(); alpha
2*alpha[0] + 2*alpha[1] + 3*alpha[2]
sage: alb = alpha.to_classical(); alb
alpha[2] - 2*alpha[3]
sage: alb.parent()
Root lattice of the Root system of type ['A', 3]
sage: v = R.ambient_space().an_element(); v
2*e[0] + 2*e[1] + 3*e[2]
sage: v.to_classical()
(2, 2, 3, 0)
"""
return self.parent().classical()(self)
@abstract_method(optional=True)
def to_ambient(self):
r"""
Map ``self`` to the ambient space.
EXAMPLES::
sage: alpha = CartanType(['B',4]).root_system().root_lattice().an_element(); alpha
2*alpha[1] + 2*alpha[2] + 3*alpha[3]
sage: alpha.to_ambient()
(2, 0, 1, -3)
sage: mu = CartanType(['B',4]).root_system().weight_lattice().an_element(); mu
2*Lambda[1] + 2*Lambda[2] + 3*Lambda[3]
sage: mu.to_ambient()
(7, 5, 3, 0)
sage: v = CartanType(['B',4]).root_system().ambient_space().an_element(); v
(2, 2, 3, 0)
sage: v.to_ambient()
(2, 2, 3, 0)
sage: alphavee = CartanType(['B',4]).root_system().coroot_lattice().an_element(); alphavee
2*alphacheck[1] + 2*alphacheck[2] + 3*alphacheck[3]
sage: alphavee.to_ambient()
(2, 0, 1, -3)
"""
def is_long_root(self):
"""
Return ``True`` if ``self`` is a long (real) root.
EXAMPLES::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[0].is_long_root()
True
sage: alpha[1].is_long_root()
True
sage: alpha[2].is_long_root()
False
"""
alpha = self.parent().simple_roots()
norm_sq = self.norm_squared()
return max(sroot.norm_squared() for sroot in alpha) == norm_sq \
and all(c * alpha[i].norm_squared() / norm_sq in ZZ for i,c in self)
def is_imaginary_root(self):
r"""
Return ``True`` if ``self`` is an imaginary root.
A root `\alpha` is imaginary if it is not `W` conjugate
to a simple root where `W` is the corresponding Weyl group.
EXAMPLES::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[0].is_imaginary_root()
False
sage: elt = alpha[0] + alpha[1] + 2*alpha[2]
sage: elt.is_imaginary_root()
True
"""
return self.norm_squared() <= 0
def is_real_root(self):
r"""
Return ``True`` if ``self`` is a real root.
A root `\alpha` is real if it is `W` conjugate to a simple
root where `W` is the corresponding Weyl group.
EXAMPLES::
sage: Q = RootSystem(['B',2,1]).root_lattice()
sage: alpha = Q.simple_roots()
sage: alpha[0].is_real_root()
True
sage: elt = alpha[0] + alpha[1] + 2*alpha[2]
sage: elt.is_real_root()
False
"""
return self.norm_squared() > 0
| 42.161269 | 189 | 0.501307 |
4a1ec84ec011ea3a0f463a55930a1810edfcf081 | 6,810 | py | Python | bindings/python/ensmallen_graph/datasets/string/phascolarctobacteriumspcag266.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/phascolarctobacteriumspcag266.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | bindings/python/ensmallen_graph/datasets/string/phascolarctobacteriumspcag266.py | caufieldjh/ensmallen_graph | 14e98b1cdbc73193a84a913d7d4f2b2b3eb2c43a | [
"MIT"
] | null | null | null | """
This file offers the methods to automatically retrieve the graph Phascolarctobacterium sp. CAG266.
The graph is automatically retrieved from the STRING repository.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 23:35:36.250853
The undirected graph Phascolarctobacterium sp. CAG266 has 1752 nodes and
104800 weighted edges, of which none are self-loops. The graph is dense
as it has a density of 0.06832 and has 7 connected components, where the
component with most nodes has 1735 nodes and the component with the least
nodes has 2 nodes. The graph median node degree is 91, the mean node degree
is 119.63, and the node degree mode is 6. The top 5 most central nodes
are 1262915.BN574_01027 (degree 819), 1262915.BN574_01580 (degree 691),
1262915.BN574_00668 (degree 680), 1262915.BN574_01017 (degree 679) and
1262915.BN574_01333 (degree 616).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import PhascolarctobacteriumSpCag266
# Then load the graph
graph = PhascolarctobacteriumSpCag266()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
from typing import Dict
from ..automatic_graph_retrieval import AutomaticallyRetrievedGraph
from ...ensmallen_graph import EnsmallenGraph # pylint: disable=import-error
def PhascolarctobacteriumSpCag266(
directed: bool = False,
verbose: int = 2,
cache_path: str = "graphs/string",
**additional_graph_kwargs: Dict
) -> EnsmallenGraph:
"""Return new instance of the Phascolarctobacterium sp. CAG266 graph.
The graph is automatically retrieved from the STRING repository.
Parameters
-------------------
directed: bool = False,
Wether to load the graph as directed or undirected.
By default false.
verbose: int = 2,
Wether to show loading bars during the retrieval and building
of the graph.
cache_path: str = "graphs",
Where to store the downloaded graphs.
additional_graph_kwargs: Dict,
Additional graph kwargs.
Returns
-----------------------
Instace of Phascolarctobacterium sp. CAG266 graph.
Report
---------------------
At the time of rendering these methods (please see datetime below), the graph
had the following characteristics:
Datetime: 2021-02-02 23:35:36.250853
The undirected graph Phascolarctobacterium sp. CAG266 has 1752 nodes and
104800 weighted edges, of which none are self-loops. The graph is dense
as it has a density of 0.06832 and has 7 connected components, where the
component with most nodes has 1735 nodes and the component with the least
nodes has 2 nodes. The graph median node degree is 91, the mean node degree
is 119.63, and the node degree mode is 6. The top 5 most central nodes
are 1262915.BN574_01027 (degree 819), 1262915.BN574_01580 (degree 691),
1262915.BN574_00668 (degree 680), 1262915.BN574_01017 (degree 679) and
1262915.BN574_01333 (degree 616).
References
---------------------
Please cite the following if you use the data:
@article{szklarczyk2019string,
title={STRING v11: protein--protein association networks with increased coverage, supporting functional discovery in genome-wide experimental datasets},
author={Szklarczyk, Damian and Gable, Annika L and Lyon, David and Junge, Alexander and Wyder, Stefan and Huerta-Cepas, Jaime and Simonovic, Milan and Doncheva, Nadezhda T and Morris, John H and Bork, Peer and others},
journal={Nucleic acids research},
volume={47},
number={D1},
pages={D607--D613},
year={2019},
publisher={Oxford University Press}
}
Usage example
----------------------
The usage of this graph is relatively straightforward:
.. code:: python
# First import the function to retrieve the graph from the datasets
from ensmallen_graph.datasets.string import PhascolarctobacteriumSpCag266
# Then load the graph
graph = PhascolarctobacteriumSpCag266()
# Finally, you can do anything with it, for instance, compute its report:
print(graph)
# If you need to run a link prediction task with validation,
# you can split the graph using a connected holdout as follows:
train_graph, validation_graph = graph.connected_holdout(
# You can use an 80/20 split the holdout, for example.
train_size=0.8,
# The random state is used to reproduce the holdout.
random_state=42,
# Wether to show a loading bar.
verbose=True
)
# Remember that, if you need, you can enable the memory-time trade-offs:
train_graph.enable(
vector_sources=True,
vector_destinations=True,
vector_outbounds=True
)
# Consider using the methods made available in the Embiggen package
# to run graph embedding or link prediction tasks.
"""
return AutomaticallyRetrievedGraph(
graph_name="PhascolarctobacteriumSpCag266",
dataset="string",
directed=directed,
verbose=verbose,
cache_path=cache_path,
additional_graph_kwargs=additional_graph_kwargs
)()
| 35.65445 | 223 | 0.707489 |
4a1ec8d3630fb3ffe44f50119be80c6f2cbcd40c | 5,092 | py | Python | luoyangc/settings.example.py | luoyangC/luoyangc | a1e0d84e463d661fbd5e0333a4770d28048172ba | [
"MIT"
] | 1 | 2019-07-07T10:40:27.000Z | 2019-07-07T10:40:27.000Z | luoyangc/settings.example.py | luoyangC/luoyangc_django | a1e0d84e463d661fbd5e0333a4770d28048172ba | [
"MIT"
] | null | null | null | luoyangc/settings.example.py | luoyangC/luoyangc_django | a1e0d84e463d661fbd5e0333a4770d28048172ba | [
"MIT"
] | null | null | null | """
Django settings for luoyangc project.
Generated by 'django-admin startproject' using Django 2.0.
For more information on this file, see
https://docs.djangoproject.com/en/2.0/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/2.0/ref/settings/
"""
# 以下 * 部分需要替换为你自己的配置
import os
import sys
import datetime
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
sys.path.insert(0, BASE_DIR)
sys.path.insert(0, os.path.join(BASE_DIR, 'apps'))
sys.path.insert(0, os.path.join(BASE_DIR, 'extra_apps'))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/2.0/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = '*'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = ['*']
# 替换系统的user表
AUTH_USER_MODEL = 'users.UserProfile'
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
'apps.users.apps.UserConfig',
'apps.articles.apps.ArticlesConfig',
'apps.home.apps.HomeConfig',
'apps.operation.apps.OperationConfig',
'rest_framework',
'crispy_forms',
'django_filters',
'reversion',
'xadmin',
'mdeditor',
'corsheaders'
]
MIDDLEWARE = [
'corsheaders.middleware.CorsMiddleware',
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
CORS_ORIGIN_ALLOW_ALL = True
ROOT_URLCONF = 'luoyangc.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [os.path.join(BASE_DIR, 'templates')],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'luoyangc.wsgi.application'
# Database
# https://docs.djangoproject.com/en/2.0/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.mysql',
'NAME': '*',
'USER': '*',
'PASSWORD': '*',
'HOST': '*',
'PORT': '*',
'OPTIONS': {'init_command': 'SET default_storage_engine=INNODB;'}
}
}
# 数据缓存
CACHES = {
"default": {
"BACKEND": "django_redis.cache.RedisCache",
"LOCATION": "redis://*:*",
"OPTIONS": {
"CLIENT_CLASS": "django_redis.client.DefaultClient",
}
}
}
# Password validation
# https://docs.djangoproject.com/en/2.0/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/2.0/topics/i18n/
LANGUAGE_CODE = 'zh-Hans'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = False
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/2.0/howto/static-files/
STATIC_URL = '/static/'
STATICFILES_DIRS = (
os.path.join(BASE_DIR, "static"),
)
MEDIA_URL = '/media/'
MEDIA_ROOT = os.path.join(BASE_DIR, 'media')
# Django邮件相关配置
EMAIL_BACKEND = 'django.core.mail.backends.smtp.EmailBackend'
EMAIL_HOST = 'smtp.sina.com'
EMAIL_PORT = 465
EMAIL_USE_SSL = True
EMAIL_HOST_USER = '*'
EMAIL_FROM = '*'
EMAIL_HOST_PASSWORD = '*'
CONFIRM_DAYS = 7
REST_FRAMEWORK = {
'DEFAULT_FILTER_BACKENDS': ('django_filters.rest_framework.DjangoFilterBackend',)
}
REST_FRAMEWORK_EXTENSIONS = {
# 缓存时间
'DEFAULT_CACHE_RESPONSE_TIMEOUT': 60 * 60,
# 缓存存储
'DEFAULT_USE_CACHE': 'default',
}
# JWT相关配置
JWT_AUTH = {
# JWT的有效期
'JWT_EXPIRATION_DELTA': datetime.timedelta(days=7),
'JWT_AUTH_HEADER_PREFIX': 'JWT',
}
# 线上地址
HOST_URL = '*'
# 图灵机器人
API_KEY = '*'
# 阿里OSS
OSS_URL = 'https://luoyangc.oss-cn-shanghai.aliyuncs.com'
| 24.363636 | 92 | 0.651218 |
4a1eca1cc1987907cfcf8c73840c1a932a9fcdba | 1,085 | py | Python | tests/test_lists.py | wxnacy/wpy | 575def7e0653e92658df1aaeea71e381ac5de533 | [
"MIT"
] | null | null | null | tests/test_lists.py | wxnacy/wpy | 575def7e0653e92658df1aaeea71e381ac5de533 | [
"MIT"
] | null | null | null | tests/test_lists.py | wxnacy/wpy | 575def7e0653e92658df1aaeea71e381ac5de533 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding:utf-8 -*-
# Author: [email protected]
"""
随机方法
"""
from wpy.lists import sorted_plus
from wpy.lists import search
def test_sortd_plus():
"""排序增强"""
arr = [3, 5, 2, 4, 1]
sorted_plus(arr)
assert arr == [1, 2, 3, 4, 5]
arr = [{"age": 5, "id": 2}, {"age": 5, "id": 5}, {"age": 3, "id": 4}]
sorted_plus(arr, [('age', 1), ('id', -1)])
assert arr == [{"age": 3, "id": 4},{"age": 5, "id": 5}, {"age": 5, "id": 2}]
arr = [{"age": 5, "id": 2}, {"id": 5}, {"age": 3}]
sorted_plus(arr, [('age', -1)])
assert arr == [{"age": 5, "id": 2},{"age": 3}, {"id": 5}]
def test_search():
datas = [
"123wxn",
"wxn",
"test",
]
res = search(datas, 'wxn')
assert res == ['wxn', '123wxn']
datas = ['wxnacy', 'wen', 'testwxn', 'wxnsss']
res = search(datas, 'x')
assert res == ['wxnacy', 'wxnsss', 'testwxn']
res = search(datas, 'wxn')
assert res == ['wxnacy', 'wxnsss', 'testwxn']
res = search(['--space', '--param'], 'pa')
assert res == ['--param', '--space']
| 24.659091 | 80 | 0.474654 |
4a1ecb514ab2b196e96b6f9b1e39884d869e2944 | 17,983 | py | Python | tests/test_config_reader.py | tarkatronic/sceptre | 520deb27253866100234fd0cb334de38c65c2a89 | [
"Apache-2.0"
] | null | null | null | tests/test_config_reader.py | tarkatronic/sceptre | 520deb27253866100234fd0cb334de38c65c2a89 | [
"Apache-2.0"
] | null | null | null | tests/test_config_reader.py | tarkatronic/sceptre | 520deb27253866100234fd0cb334de38c65c2a89 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import os
from mock import patch, sentinel, MagicMock
import pytest
import yaml
import errno
from sceptre.context import SceptreContext
from sceptre.exceptions import DependencyDoesNotExistError
from sceptre.exceptions import VersionIncompatibleError
from sceptre.exceptions import ConfigFileNotFoundError
from sceptre.exceptions import InvalidSceptreDirectoryError
from sceptre.exceptions import InvalidConfigFileError
from freezegun import freeze_time
from click.testing import CliRunner
from sceptre.config.reader import ConfigReader
class TestConfigReader(object):
@patch("sceptre.config.reader.ConfigReader._check_valid_project_path")
def setup_method(self, test_method, mock_check_valid_project_path):
self.runner = CliRunner()
self.test_project_path = os.path.join(
os.getcwd(), "tests", "fixtures"
)
self.context = SceptreContext(
project_path=self.test_project_path,
command_path="A"
)
def test_config_reader_correctly_initialised(self):
config_reader = ConfigReader(self.context)
assert config_reader.context == self.context
def test_config_reader_with_invalid_path(self):
with pytest.raises(InvalidSceptreDirectoryError):
ConfigReader(SceptreContext("/path/does/not/exist", "example"))
def create_project(self):
"""
Creates a new random temporary directory with a config subdirectory
"""
with self.runner.isolated_filesystem():
project_path = os.path.abspath('./example')
config_dir = os.path.join(project_path, "config")
os.makedirs(config_dir)
return (project_path, config_dir)
def write_config(self, abs_path, config):
"""
Writes a configuration dict to the specified path as YAML
"""
if abs_path.endswith(".yaml"):
dir_path = os.path.split(abs_path)[0]
if not os.path.exists(dir_path):
try:
os.makedirs(dir_path)
except OSError as exc:
if exc.errno != errno.EEXIST:
raise
with open(abs_path, 'w') as config_file:
yaml.safe_dump(
config, stream=config_file, default_flow_style=False
)
@pytest.mark.parametrize("filepaths,target", [
(
["A/1.yaml"], "A/1.yaml"
),
(
["A/1.yaml", "A/B/1.yaml"], "A/B/1.yaml"
),
(
["A/1.yaml", "A/B/1.yaml", "A/B/C/1.yaml"], "A/B/C/1.yaml"
)
])
def test_read_reads_config_file(self, filepaths, target):
project_path, config_dir = self.create_project()
for rel_path in filepaths:
config = {"filepath": rel_path}
abs_path = os.path.join(config_dir, rel_path)
self.write_config(abs_path, config)
self.context.project_path = project_path
config = ConfigReader(self.context).read(target)
assert config == {
"project_path": project_path,
"stack_group_path": os.path.split(target)[0],
"filepath": target
}
def test_read_nested_configs(self):
with self.runner.isolated_filesystem():
project_path = os.path.abspath('./example')
config_dir = os.path.join(project_path, "config")
stack_group_dir_a = os.path.join(config_dir, "A")
stack_group_dir_b = os.path.join(stack_group_dir_a, "B")
stack_group_dir_c = os.path.join(stack_group_dir_b, "C")
os.makedirs(stack_group_dir_c)
config_filename = "config.yaml"
config_a = {"keyA": "A", "shared": "A"}
with open(os.path.join(stack_group_dir_a, config_filename), 'w') as\
config_file:
yaml.safe_dump(
config_a, stream=config_file, default_flow_style=False
)
config_b = {"keyB": "B", "parent": "{{ keyA }}", "shared": "B"}
with open(os.path.join(stack_group_dir_b, config_filename), 'w') as\
config_file:
yaml.safe_dump(
config_b, stream=config_file, default_flow_style=False
)
config_c = {"keyC": "C", "parent": "{{ keyB }}", "shared": "C"}
with open(os.path.join(stack_group_dir_c, config_filename), 'w') as\
config_file:
yaml.safe_dump(
config_c, stream=config_file, default_flow_style=False
)
self.context.project_path = project_path
reader = ConfigReader(self.context)
config_a = reader.read("A/config.yaml")
assert config_a == {
"project_path": project_path,
"stack_group_path": "A",
"keyA": "A",
"shared": "A"
}
config_b = reader.read("A/B/config.yaml")
assert config_b == {
"project_path": project_path,
"stack_group_path": "A/B",
"keyA": "A",
"keyB": "B",
"shared": "B",
"parent": "A"
}
config_c = reader.read(
"A/B/C/config.yaml"
)
assert config_c == {
"project_path": project_path,
"stack_group_path": "A/B/C",
"keyA": "A",
"keyB": "B",
"keyC": "C",
"shared": "C",
"parent": "B"
}
def test_read_reads_config_file_with_base_config(self):
with self.runner.isolated_filesystem():
project_path = os.path.abspath('./example')
config_dir = os.path.join(project_path, "config")
stack_group_dir = os.path.join(config_dir, "A")
os.makedirs(stack_group_dir)
config = {"config": "config"}
with open(os.path.join(stack_group_dir, "stack.yaml"), 'w') as\
config_file:
yaml.safe_dump(
config, stream=config_file, default_flow_style=False
)
base_config = {
"base_config": "base_config"
}
self.context.project_path = project_path
config = ConfigReader(self.context).read(
"A/stack.yaml", base_config
)
assert config == {
"project_path": project_path,
"stack_group_path": "A",
"config": "config",
"base_config": "base_config"
}
def test_read_with_nonexistant_filepath(self):
project_path, config_dir = self.create_project()
self.context.project_path = project_path
with pytest.raises(ConfigFileNotFoundError):
ConfigReader(self.context).read("stack.yaml")
def test_read_with_empty_config_file(self):
config_reader = ConfigReader(self.context)
config = config_reader.read(
"account/stack-group/region/subnets.yaml"
)
assert config == {
"project_path": self.test_project_path,
"stack_group_path": "account/stack-group/region"
}
def test_read_with_templated_config_file(self):
self.context.user_variables = {"variable_key": "user_variable_value"}
config_reader = ConfigReader(self.context)
config_reader.templating_vars["stack_group_config"] = {
"region": "region_region",
"project_code": "account_project_code",
"required_version": "'>1.0'",
"template_bucket_name": "stack_group_template_bucket_name"
}
os.environ["TEST_ENV_VAR"] = "environment_variable_value"
config = config_reader.read(
"account/stack-group/region/security_groups.yaml"
)
assert config == {
'project_path': self.context.project_path,
"stack_group_path": "account/stack-group/region",
"parameters": {
"param1": "user_variable_value",
"param2": "environment_variable_value",
"param3": "region_region",
"param4": "account_project_code",
"param5": ">1.0",
"param6": "stack_group_template_bucket_name"
}
}
def test_aborts_on_incompatible_version_requirement(self):
config = {
'required_version': '<0'
}
with pytest.raises(VersionIncompatibleError):
ConfigReader(self.context)._check_version(config)
@freeze_time("2012-01-01")
@pytest.mark.parametrize("stack_name,config,expected", [
(
"name",
{
"template_bucket_name": "bucket-name",
"template_key_prefix": "prefix",
"region": "eu-west-1"
},
{
"bucket_name": "bucket-name",
"bucket_key": "prefix/name/2012-01-01-00-00-00-000000Z.json"
}
),
(
"name",
{
"template_bucket_name": "bucket-name",
"region": "eu-west-1"
},
{
"bucket_name": "bucket-name",
"bucket_key": "name/2012-01-01-00-00-00-000000Z.json"
}
),
(
"name",
{
"template_bucket_name": "bucket-name",
},
{
"bucket_name": "bucket-name",
"bucket_key": "name/2012-01-01-00-00-00-000000Z.json"
}
),
(
"name", {}, None
)
]
)
def test_collect_s3_details(self, stack_name, config, expected):
details = ConfigReader._collect_s3_details(stack_name, config)
assert details == expected
@patch("sceptre.config.reader.ConfigReader._collect_s3_details")
@patch("sceptre.config.reader.Stack")
def test_construct_stacks_constructs_stack(
self, mock_Stack, mock_collect_s3_details
):
mock_Stack.return_value = sentinel.stack
sentinel.stack.dependencies = []
mock_collect_s3_details.return_value = sentinel.s3_details
self.context.project_path = os.path.abspath("tests/fixtures-vpc")
self.context.command_path = "account/stack-group/region/vpc.yaml"
stacks = ConfigReader(self.context).construct_stacks()
mock_Stack.assert_any_call(
name="account/stack-group/region/vpc",
project_code="account_project_code",
template_path=os.path.join(
self.context.project_path, "templates/path/to/template"
),
region="region_region",
profile="account_profile",
parameters={"param1": "val1"},
sceptre_user_data={},
hooks={},
s3_details=sentinel.s3_details,
dependencies=["child/level", "top/level"],
iam_role=None,
role_arn=None,
protected=False,
tags={},
external_name=None,
notifications=None,
on_failure=None,
stack_timeout=0,
required_version='>1.0',
template_bucket_name='stack_group_template_bucket_name',
template_key_prefix=None,
stack_group_config={
"custom_key": "custom_value"
}
)
assert stacks == ({sentinel.stack}, {sentinel.stack})
@pytest.mark.parametrize("command_path,filepaths,expected_stacks,expected_command_stacks,full_scan", [
(
"",
["A/1.yaml"],
{"A/1"},
{"A/1"},
False
),
(
"",
["A/1.yaml", "A/2.yaml", "A/3.yaml"],
{"A/3", "A/2", "A/1"},
{"A/3", "A/2", "A/1"},
False
),
(
"",
["A/1.yaml", "A/A/1.yaml"],
{"A/1", "A/A/1"},
{"A/1", "A/A/1"},
False
),
(
"",
["A/1.yaml", "A/A/1.yaml", "A/A/2.yaml"],
{"A/1", "A/A/1", "A/A/2"},
{"A/1", "A/A/1", "A/A/2"},
False
),
(
"",
["A/A/1.yaml", "A/B/1.yaml"],
{"A/A/1", "A/B/1"},
{"A/A/1", "A/B/1"},
False
),
(
"Abd",
["Abc/1.yaml", "Abd/1.yaml"],
{"Abd/1"},
{"Abd/1"},
False
),
(
"Abd",
["Abc/1.yaml", "Abd/Abc/1.yaml", "Abd/2.yaml"],
{"Abd/2", "Abd/Abc/1"},
{"Abd/2", "Abd/Abc/1"},
False
),
(
"Abd/Abc",
["Abc/1.yaml", "Abd/Abc/1.yaml", "Abd/2.yaml"],
{"Abd/Abc/1"},
{"Abd/Abc/1"},
False
),
(
"Ab",
["Abc/1.yaml", "Abd/1.yaml"],
set(),
set(),
False
),
(
"Abd/Abc",
["Abc/1.yaml", "Abd/Abc/1.yaml", "Abd/2.yaml"],
{"Abc/1", "Abd/Abc/1", "Abd/2"},
{"Abd/Abc/1"},
True
),
])
def test_construct_stacks_with_valid_config(
self, command_path, filepaths, expected_stacks, expected_command_stacks, full_scan
):
project_path, config_dir = self.create_project()
for rel_path in filepaths:
config = {
"region": "region",
"project_code": "project_code",
"template_path": rel_path
}
abs_path = os.path.join(config_dir, rel_path)
self.write_config(abs_path, config)
self.context.project_path = project_path
self.context.command_path = command_path
self.context.full_scan = full_scan
config_reader = ConfigReader(self.context)
all_stacks, command_stacks = config_reader.construct_stacks()
assert {str(stack) for stack in all_stacks} == expected_stacks
assert {str(stack) for stack in command_stacks} == expected_command_stacks
@pytest.mark.parametrize("filepaths, del_key", [
(["A/1.yaml"], "project_code"),
(["A/1.yaml"], "region"),
(["A/1.yaml"], "template_path"),
])
def test_missing_attr(
self, filepaths, del_key
):
project_path, config_dir = self.create_project()
for rel_path in filepaths:
config = {
"project_code": "project_code",
"region": "region",
"template_path": rel_path
}
# Delete the mandatory key to be tested.
del config[del_key]
abs_path = os.path.join(config_dir, rel_path)
self.write_config(abs_path, config)
self.context.project_path = project_path
try:
config_reader = ConfigReader(self.context)
all_stacks, command_stacks = config_reader.construct_stacks()
except InvalidConfigFileError as e:
# Test that the missing key is reported.
assert del_key in str(e)
except Exception:
raise
else:
assert False
@pytest.mark.parametrize("filepaths, dependency", [
(["A/1.yaml", "B/1.yaml", "B/2.yaml"], "A/1.yaml"),
(["A/1.yaml", "B/1.yaml", "B/2.yaml"], "B/1.yaml"),
])
def test_existing_dependency(
self, filepaths, dependency
):
project_path, config_dir = self.create_project()
for rel_path in filepaths:
# Set up config with reference to an existing stack
config = {
"project_code": "project_code",
"region": "region",
"template_path": rel_path,
"dependencies": [dependency]
}
abs_path = os.path.join(config_dir, rel_path)
self.write_config(abs_path, config)
self.context.project_path = project_path
try:
config_reader = ConfigReader(self.context)
all_stacks, command_stacks = config_reader.construct_stacks()
except Exception:
raise
else:
assert True
@pytest.mark.parametrize("filepaths, dependency", [
(["A/1.yaml", "B/1.yaml", "B/2.yaml"], "A/2.yaml"),
(["A/1.yaml", "B/1.yaml", "B/2.yaml"], "1.yaml"),
])
def test_missing_dependency(
self, filepaths, dependency
):
project_path, config_dir = self.create_project()
for rel_path in filepaths:
# Set up config with reference to non-existing stack
config = {
"project_code": "project_code",
"region": "region",
"template_path": rel_path,
"dependencies": [dependency]
}
abs_path = os.path.join(config_dir, rel_path)
self.write_config(abs_path, config)
self.context.project_path = project_path
try:
config_reader = ConfigReader(self.context)
all_stacks, command_stacks = config_reader.construct_stacks()
except DependencyDoesNotExistError as e:
# Test that the missing dependency is reported.
assert dependency in str(e)
except Exception:
raise
else:
assert False
def test_resolve_node_tag(self):
mock_loader = MagicMock(yaml.Loader)
mock_loader.resolve.return_value = "new_tag"
mock_node = MagicMock(yaml.Node)
mock_node.tag = "old_tag"
mock_node.value = "String"
config_reader = ConfigReader(self.context)
new_node = config_reader.resolve_node_tag(mock_loader, mock_node)
assert new_node.tag == 'new_tag'
| 33.301852 | 106 | 0.537063 |
4a1ecb8bb07cc8a6078a29fc7d8ab735447434d9 | 5,059 | py | Python | services/dy-csv-table/server/input-retriever.py | GitHK/osparc-services-forked | a8ab08ff7c32de8f1abde015c1515e8cf61426c0 | [
"MIT"
] | 2 | 2019-08-16T16:54:48.000Z | 2020-06-10T05:50:35.000Z | services/dy-csv-table/server/input-retriever.py | GitHK/osparc-services-forked | a8ab08ff7c32de8f1abde015c1515e8cf61426c0 | [
"MIT"
] | 63 | 2019-07-04T07:03:42.000Z | 2022-02-09T18:03:55.000Z | services/dy-csv-table/server/input-retriever.py | GitHK/osparc-services-forked | a8ab08ff7c32de8f1abde015c1515e8cf61426c0 | [
"MIT"
] | 9 | 2019-04-17T07:11:10.000Z | 2020-06-03T13:42:58.000Z | import asyncio
import logging
import os
import shutil
import sys
import tarfile
import tempfile
import zipfile
from pathlib import Path
from simcore_sdk import node_ports
logger = logging.getLogger(__name__)
_INPUTS_FOLDER = Path(os.environ.get("CSVTABLE_INPUT_PATH"))
_OUTPUTS_FOLDER = Path(os.environ.get("CSVTABLE_OUTPUT_PATH"))
_FILE_TYPE_PREFIX = "data:"
_KEY_VALUE_FILE_NAME = "key_values.json"
# clean the directory
shutil.rmtree(str(_INPUTS_FOLDER), ignore_errors=True)
if not _INPUTS_FOLDER.exists():
_INPUTS_FOLDER.mkdir()
logger.debug("Created input folder at %s", _INPUTS_FOLDER)
if not _OUTPUTS_FOLDER.exists():
_OUTPUTS_FOLDER.mkdir()
logger.debug("Created output folder at %s", _OUTPUTS_FOLDER)
def _no_relative_path_tar(members: tarfile.TarFile):
for tarinfo in members:
path = Path(tarinfo.name)
if path.is_absolute():
# absolute path are not allowed
continue
if path.match("/../"):
# relative paths are not allowed
continue
yield tarinfo
def _no_relative_path_zip(members: zipfile.ZipFile):
for zipinfo in members.infolist():
path = Path(zipinfo.filename)
if path.is_absolute():
# absolute path are not allowed
continue
if path.match("/../"):
# relative paths are not allowed
continue
yield zipinfo.filename
async def download_data():
logger.info("retrieving data from simcore...")
print("retrieving data from simcore...")
# get all files in the local system and copy them to the input folder
PORTS = await node_ports.ports()
for port in await PORTS.inputs:
if not port or port.value is None:
continue
local_path = await port.get()
dest_path = _INPUTS_FOLDER / port.key
dest_path.mkdir(exist_ok=True, parents=True)
# clean up destination directory
for path in dest_path.iterdir():
if path.is_file():
path.unlink()
elif path.is_dir():
shutil.rmtree(path)
# check if local_path is a compressed file
if zipfile.is_zipfile(local_path):
with zipfile.ZipFile(local_path) as zip_file:
zip_file.extractall(
dest_path, members=_no_relative_path_zip(zip_file))
else:
dest_path_name = _INPUTS_FOLDER / \
(port.key + ":" + Path(local_path).name)
shutil.move(local_path, dest_path_name)
shutil.rmtree(Path(local_path).parents[0])
async def copy_data():
logger.info("copying input data to right directory...")
origin_list = os.listdir(_INPUTS_FOLDER)
print("/inputs list", origin_list)
if len(origin_list) == 0:
return
filename = "input.csv"
origin = os.path.join(_INPUTS_FOLDER, ("input_1:"+filename))
if os.path.exists(origin):
destination_in = os.path.join("../csv-to-html-table/data", filename)
shutil.copy(origin, destination_in)
logger.info("data copied from %s to %s", origin, destination_in)
dest_folder = _OUTPUTS_FOLDER / "output_1"
dest_folder.mkdir(exist_ok=True, parents=True)
destination_out = os.path.join(dest_folder, filename)
shutil.copy(origin, destination_out)
logger.info("data copied from %s to %s", origin, destination_out)
else:
logger.info("input file not found %s", origin)
async def upload_data():
logger.info("uploading data to simcore...")
PORTS = await node_ports.ports()
outputs_path = Path(_OUTPUTS_FOLDER).expanduser()
for port in await PORTS.outputs:
logger.debug(
"uploading data to port '%s' with value '%s'...", port.key, port.value)
src_folder = outputs_path / port.key
list_files = list(src_folder.glob("*"))
if len(list_files) == 1:
# special case, direct upload
await port.set(list_files[0])
continue
# generic case let's create an archive
if len(list_files) > 1:
temp_file = tempfile.NamedTemporaryFile(suffix=".tgz")
temp_file.close()
for _file in list_files:
with tarfile.open(temp_file.name, mode='w:gz') as tar_ptr:
for file_path in list_files:
tar_ptr.add(
file_path, arcname=file_path.name, recursive=False)
try:
await port.set(temp_file.name)
finally:
# clean up
Path(temp_file.name).unlink()
logger.info("all data uploaded to simcore")
async def sync_data():
try:
await download_data()
await copy_data()
await upload_data()
except node_ports.exceptions.NodeportsException as exc:
logger.error("error when syncing '%s'", str(exc))
sys.exit(1)
finally:
logger.info("download and upload finished")
asyncio.get_event_loop().run_until_complete(sync_data())
| 32.850649 | 83 | 0.630955 |
4a1ecbab425d827997a92cf348fa63f83a09443a | 1,337 | py | Python | src/utils/graph2.py | nikmedoed/pybinar | 1452fc123ac45d8b712ecd81010f0dc9f63c35fe | [
"Apache-2.0"
] | null | null | null | src/utils/graph2.py | nikmedoed/pybinar | 1452fc123ac45d8b712ecd81010f0dc9f63c35fe | [
"Apache-2.0"
] | null | null | null | src/utils/graph2.py | nikmedoed/pybinar | 1452fc123ac45d8b712ecd81010f0dc9f63c35fe | [
"Apache-2.0"
] | null | null | null | # Copyright 2019 Nikita Muromtsev (nikmedoed)
# Licensed under the Apache License, Version 2.0 (the «License»)
from mpl_toolkits.mplot3d import Axes3D
import matplotlib.pyplot as plt
import numpy
import random
def randomColor():
return (random.uniform(0, 1), random.uniform(0, 1), random.uniform(0, 1))
def bulidXYZ (x, y, z, text="", ax=None, color=None):
if not ax:
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
if not color:
color = randomColor()
ax.scatter(x, y, z, c=color, s=100)
for i in range(len(text)):
ax.text(x[i], y[i], z[i], text[i], size=12, zorder=1, color='k')
return ax
def buildCell(cell, ax= None):
atoms = cell.atoms
atrans = list(zip(*atoms))
names = list(map(lambda x: x.name, cell.atoms))
if ax:
return bulidXYZ(*atrans, names, ax)
else:
plt.show()
def buildCells(cells):
fig = plt.figure()
ax = fig.add_subplot(111, projection='3d')
for i in cells:
ax = buildCell(i, ax)
plt.show()
if __name__ == "__main__":
bulidXYZ(
[0.7378, -0.7378, 1.1125, 1.1126, 1.1125, -1.0718, -1.0717, -1.0717],
[0.0000, 0.0000, -0.3153, -0.6256, 0.9409, -1.0210, 0.3422, 0.6788],
[0.0000, 0.0000, -0.9044, 0.7252, 0.1791, -0.1943, 0.9814, -0.7870]
)
plt.show() | 29.711111 | 77 | 0.600598 |
4a1ecccf4dd44a86768ab6bdefbeb816ef41ffab | 1,679 | py | Python | examples/get_data_from_tags.py | Arequ/robin_stocks | 66f3d9c8dd38e39546a7ee8e4f6115529314a78c | [
"MIT"
] | 1 | 2021-02-04T13:30:53.000Z | 2021-02-04T13:30:53.000Z | examples/get_data_from_tags.py | Arequ/robin_stocks | 66f3d9c8dd38e39546a7ee8e4f6115529314a78c | [
"MIT"
] | null | null | null | examples/get_data_from_tags.py | Arequ/robin_stocks | 66f3d9c8dd38e39546a7ee8e4f6115529314a78c | [
"MIT"
] | 1 | 2022-03-17T18:45:03.000Z | 2022-03-17T18:45:03.000Z | import os
import pyotp
import robin_stocks as r
from dotenv import load_dotenv
'''
This is an example script that will get all stocks that are part
of the "technology" tag.
NOTE: View the two_factor_log_in.py script to see how automatic
two-factor loggin in works.
'''
# load environment variables.
load_dotenv()
# Login using two-factor code.
totp = pyotp.TOTP(os.environ['robin_mfa']).now()
login = r.login(os.environ['robin_username'],
os.environ['robin_password'], store_session=True, mfa_code=totp)
# Get 500 technology stocks data.
stocks = r.request_get(
"https://api.robinhood.com/midlands/tags/tag/technology/")
print(
f"\nthere are a total of {stocks['membership_count']} technology stocks, currently viewing {len(stocks['instruments'])}")
# Turn the raw dictionary into a list of strings using the filter_data function.
# This list of strings are the urls for the quote data of each stock.
# The quote data can be retrieved using get_instrument_by_url() or
# by using request_get to query the url directly.
data = r.filter_data(stocks, 'instruments')
first = data[0]
first_data = r.request_get(first)
print("\n======the quote data for the first entry is=====\n")
print(first_data)
print("\ngetting the rest of the quote data now. This may take a minute....")
full_quote_data = [r.request_get(x) for x in data]
print("Now I am getting the filter data...")
#I can also filter the data
margin_quote_data = []
for entry in data:
quote_data = r.request_get(entry)
if float(quote_data['margin_initial_ratio']) > 0.5:
margin_quote_data.append(quote_data)
print(f"There are {len(margin_quote_data)} entries that fit the criteria.")
| 39.046512 | 125 | 0.740322 |
4a1ecd5e55e48f0818ea09c58a391c3b2c8376eb | 1,303 | py | Python | downloadGoogle.py | student-t/samples | 1eb47637389f5e38ab764b8a14597233b5071e0b | [
"Apache-2.0"
] | 13 | 2018-05-24T04:31:32.000Z | 2020-04-18T13:34:50.000Z | downloadGoogle.py | student-t/samples | 1eb47637389f5e38ab764b8a14597233b5071e0b | [
"Apache-2.0"
] | 1 | 2018-06-16T01:37:46.000Z | 2018-09-21T14:22:19.000Z | downloadGoogle.py | student-t/samples | 1eb47637389f5e38ab764b8a14597233b5071e0b | [
"Apache-2.0"
] | 5 | 2018-10-23T10:39:22.000Z | 2020-01-08T04:01:10.000Z | import requests
def download_file_from_google_drive(id, destination):
def get_confirm_token(response):
for key, value in response.cookies.items():
if key.startswith('download_warning'):
return value
return None
def save_response_content(response, destination):
CHUNK_SIZE = 32768
with open(destination, "wb") as f:
for chunk in response.iter_content(CHUNK_SIZE):
if chunk: # filter out keep-alive new chunks
f.write(chunk)
URL = "https://docs.google.com/uc?export=download"
session = requests.Session()
response = session.get(URL, params = { 'id' : id }, stream = True)
token = get_confirm_token(response)
if token:
params = { 'id' : id, 'confirm' : token }
response = session.get(URL, params = params, stream = True)
save_response_content(response, destination)
if __name__ == "__main__":
import sys
if len(sys.argv) is not 3:
print "Usage: python google_drive.py drive_file_id destination_file_path"
else:
# TAKE ID FROM SHAREABLE LINK
file_id = sys.argv[1]
# DESTINATION FILE ON YOUR DISK
destination = sys.argv[2]
download_file_from_google_drive(file_id, destination)
| 30.302326 | 81 | 0.634689 |
4a1ece3e764496ee102cd1771bccf1b70fb3ba69 | 3,754 | py | Python | python/generate_recipes.py | satyamedh/craftassist | d97cbc14bc25149d3ef41737231ab9f3cb7e392a | [
"MIT"
] | 669 | 2020-11-21T01:20:20.000Z | 2021-09-13T13:25:16.000Z | python/generate_recipes.py | satyamedh/craftassist | d97cbc14bc25149d3ef41737231ab9f3cb7e392a | [
"MIT"
] | 324 | 2020-12-07T18:20:34.000Z | 2021-09-14T17:17:18.000Z | python/generate_recipes.py | satyamedh/craftassist | d97cbc14bc25149d3ef41737231ab9f3cb7e392a | [
"MIT"
] | 89 | 2019-07-19T15:07:39.000Z | 2022-02-15T18:44:24.000Z | """
Copyright (c) Facebook, Inc. and its affiliates.
This file generates a craft_recipes.h file that is used by the C++ client.
"""
import argparse
parser = argparse.ArgumentParser()
parser.add_argument("items_ini")
parser.add_argument("crafting_txt")
parser.add_argument(
"-o", "--out-file", required=True, help="path to craft_recipes.h file to write"
)
args = parser.parse_args()
# read items.ini
name2idm = {}
with open(args.items_ini, "r") as f:
lines = f.readlines()
for line in reversed(lines): # reverse so lower ids are used for duplicate names
try:
name, val = line.split("=")
except ValueError:
continue
idm = tuple(map(int, val.split(":"))) if ":" in val else (int(val), 0)
name2idm[name] = idm
# read crafting.txt
def fill_recipe(recipe, idm, xs, ys):
for x in xs:
for y in ys:
idx = (y - 1) * 3 + (x - 1)
if recipe[idx] is None:
recipe[idx] = (idm, 1)
return
old_idm, count = recipe[idx]
if old_idm == idm:
recipe[idx] = (idm, count + 1)
return
raise Exception("Failed", recipe, idm, xs, ys)
idm2recipe = {} # idm -> (recipe, count)
with open(args.crafting_txt, "r") as f:
lines = f.readlines()
for line in lines:
# strip comments, empty lines
line = line.split("#")[0]
if line.strip() == "":
continue
assert "=" in line
result, ingredients = line.split("=")
# parse result, count
if "," in result:
# has count
result_name, count = tuple(map(str.strip, result.split(",")))
count = int(count)
else:
# has no count (default=1)
result_name = result.strip()
count = 1
try:
result_idm = name2idm[result_name.lower()]
except KeyError:
print("Ignoring:", line.strip())
continue
# parse ingredients, fill recipe array
recipe = [None] * 9 # array of (idm, count)
ingredients = tuple(map(str.strip, ingredients.split("|")))
for ingredient in ingredients:
# get ingredient idm
name, *locs = ingredient.replace(" ", "").split(",")
name = name.split("^-1")[0]
idm = name2idm[name.lower()]
# get crafting table locations
try:
for loc in locs:
if loc == "*":
fill_recipe(recipe, idm, (1, 2, 3), (1, 2, 3))
continue
x, y = loc.split(":")
if x == "*":
fill_recipe(recipe, idm, (1, 2, 3), (int(y),))
elif y == "*":
fill_recipe(recipe, idm, (int(x),), (1, 2, 3))
else:
fill_recipe(recipe, idm, (int(x),), (int(y),))
except:
print("Failed ing", ingredient.strip())
idm2recipe[result_idm] = (recipe, count)
# print header file
def format_recipe(idm, recipe, count):
key = (idm[0] << 8) | idm[1]
recipe = [((0, 0), 0) if r is None else r for r in recipe]
ingredients = ",".join(["{{{},{},{}}}".format(d, m, c) for ((d, m), c) in recipe])
val = "{{{{{{{}}}}},{}}}".format(ingredients, count)
return "{{{},{}}}".format(key, val)
HEADER = """
// Generated by python/craft_recipes.py
#pragma once
#include <array>
#include <unordered_map>
struct Ingredient {
uint16_t id;
uint8_t meta;
uint8_t count;
};
struct Recipe {
std::array<Ingredient, 9> ingredients;
uint8_t count;
};
// Map key = (id << 8) | meta
const std::unordered_map<uint32_t, Recipe> RECIPES{
"""
HEADER += ",".join([format_recipe(idm, *recipe) for (idm, recipe) in idm2recipe.items()])
HEADER += "};\n"
with open(args.out_file, "w") as f:
f.write(HEADER)
| 25.194631 | 89 | 0.550613 |
4a1ece9163f9aaa176a8d7724e2e0e55f114815d | 2,138 | py | Python | app/notifiers/discord_client.py | x1n5h3n/crypto-signal | 64011a43fcbb58affb2af2a2c62909c881d8140f | [
"MIT"
] | null | null | null | app/notifiers/discord_client.py | x1n5h3n/crypto-signal | 64011a43fcbb58affb2af2a2c62909c881d8140f | [
"MIT"
] | null | null | null | app/notifiers/discord_client.py | x1n5h3n/crypto-signal | 64011a43fcbb58affb2af2a2c62909c881d8140f | [
"MIT"
] | null | null | null | """Notify a user via discord
"""
import structlog
from discord_webhook import DiscordWebhook as Webhook
from notifiers.utils import NotifierUtils
__max_message_size__ = 2000
class DiscordNotifier(NotifierUtils):
"""Class for handling Discord notifications
"""
def __init__(self, webhook, username, avatar=None):
"""Initialize DiscordNotifier class
Args:
webhook (str): Discord web hook to allow message sending.
username (str): Display name for the discord bot.
avatar (str, optional): Defaults to None. Url of an image to use as an avatar.
"""
self.logger = structlog.get_logger()
self.discord_username = username
self.discord_client = Webhook(
url=webhook, username=username, avatar_url=avatar, rate_limit_retry=True)
def notify(self, message: str):
"""Sends the message.
Args:
message (str): The message to send.
"""
message_chunks = self.chunk_message(
message=message, max_message_size=__max_message_size__)
for message_chunk in message_chunks:
try:
self.discord_client.set_content(message_chunk)
self.discord_client.execute()
except Exception as e:
self.logger.info('Unable to send message using Discord !')
self.logger.debug(e)
def send_chart_messages(self, photo_url: str, messages=[]):
"""Send image chart
Args:
photo_url (str): The photo url to send.
"""
try:
self.discord_client.set_content('')
with open(photo_url, 'rb') as f:
self.discord_client.add_file(file=f.read(), filename=f.name)
self.discord_client.execute(remove_files=True)
except Exception as e:
self.logger.info('Unable to send chart messages using Discord !')
self.logger.debug(e)
self.send_messages(messages)
def send_messages(self, messages=[]):
if messages:
for message in messages:
self.notify(message)
| 33.40625 | 90 | 0.61927 |
4a1ecee48dacdd3bdf656617f7cb9bfa304c9d97 | 3,930 | py | Python | engine/data_sources/minio/minio_table.py | delftdata/valentine-system | 07624fed956b89cf6c8be4d982b9b553624e1ade | [
"Apache-2.0"
] | 3 | 2021-08-28T16:06:50.000Z | 2022-03-25T22:33:34.000Z | engine/data_sources/minio/minio_table.py | delftdata/valentine-system | 07624fed956b89cf6c8be4d982b9b553624e1ade | [
"Apache-2.0"
] | 6 | 2021-06-08T09:49:48.000Z | 2021-08-10T17:06:57.000Z | engine/data_sources/minio/minio_table.py | delftdata/valentine-system | 07624fed956b89cf6c8be4d982b9b553624e1ade | [
"Apache-2.0"
] | null | null | null | from typing import List, Dict
import pandas as pd
from minio import Minio
from .minio_column import MinioColumn
from .minio_utils import get_columns_from_minio_csv_file, get_pandas_df_from_minio_csv_file, correct_file_ending
from ..base_column import BaseColumn
from ..base_table import BaseTable
from ...utils.utils import is_date
class MinioTable(BaseTable):
def __init__(self, minio_client: Minio, table_name: str, db_name: str, load_data: bool):
self.minio_client = minio_client
self.__table_name = table_name # file name
self.__db_name = db_name # bucket name
self.__columns = dict()
self.__column_names = self.__get_column_names()
if load_data:
self.__get_columns_from_local_minio_tmp_copy()
def __str__(self):
__str: str = "\tTable: " + self.name + " | " + str(self.unique_identifier) + "\n"
for column in self.get_columns():
__str = __str + str(column.__str__())
return __str
@property
def unique_identifier(self) -> str:
return f'{self.__db_name}:{self.__table_name}'
@property
def db_belongs_uid(self) -> object:
return self.__db_name
@property
def name(self) -> str:
return correct_file_ending(self.__table_name).replace('/', '_').split('.')[0]
def get_columns(self) -> List[BaseColumn]:
if not self.__columns:
self.__get_columns_from_local_minio_tmp_copy()
return list(self.__columns.values())
def get_tables(self, load_data: bool = True) -> Dict[str, BaseTable]:
if not self.__columns:
if load_data:
self.__get_columns_from_local_minio_tmp_copy()
else:
column_names: List[str] = self.__get_column_names()
self.__columns = {column_name: MinioColumn(column_name, [], 'NULL', self.unique_identifier)
for column_name in column_names}
return {self.name: self}
def get_table_str_guids(self) -> List[str]:
return [str(self.unique_identifier)]
def remove_table(self, guid: object) -> BaseTable:
pass # Since its a single table we cannot delete it (overridden from BaseDB)
def add_table(self, table: BaseTable) -> None:
pass # Since its a single table we cannot add another table to it (overridden from BaseDB)
@property
def is_empty(self) -> bool:
return len(self.__column_names) == 0
def get_table_guids(self) -> List[object]:
return [self.unique_identifier]
def __get_column_names(self) -> List[str]:
return get_columns_from_minio_csv_file(self.minio_client, self.__db_name, self.__table_name)
def __get_columns_from_local_minio_tmp_copy(self):
table_df: pd.DataFrame = get_pandas_df_from_minio_csv_file(self.minio_client, self.__db_name, self.__table_name)
for (column_name, column_data) in table_df.iteritems():
d_type = str(column_data.dtype)
data = list(column_data.dropna().values)
if len(data) != 0:
d_type = self.__get_true_data_type(d_type, data)
self.__columns[column_name] = MinioColumn(column_name, data, d_type, self.unique_identifier)
else:
if d_type == "object":
self.__columns[column_name] = MinioColumn(column_name, data, "varchar", self.unique_identifier)
else:
self.__columns[column_name] = MinioColumn(column_name, data, d_type, self.unique_identifier)
@staticmethod
def __get_true_data_type(d_type, data):
if d_type == "object":
if is_date(data[0]):
d_type = "date"
else:
d_type = "varchar"
elif d_type.startswith("int"):
d_type = "int"
elif d_type.startswith("float"):
d_type = "float"
return d_type
| 38.910891 | 120 | 0.644275 |
4a1ecefe8bd68477e5ac23e4b68078d342ae40df | 1,188 | py | Python | profiles_api/serializers.py | ishelke8177/profiles-rest-api | 7a56946dec56e44ae552d91c6dcf49bd76d8c46a | [
"MIT"
] | 1 | 2019-10-22T02:57:13.000Z | 2019-10-22T02:57:13.000Z | profiles_api/serializers.py | ishelke8177/profiles-rest-api | 7a56946dec56e44ae552d91c6dcf49bd76d8c46a | [
"MIT"
] | 9 | 2019-12-04T23:10:53.000Z | 2022-02-10T12:55:20.000Z | profiles_api/serializers.py | ishelke8177/profiles-rest-api | 7a56946dec56e44ae552d91c6dcf49bd76d8c46a | [
"MIT"
] | null | null | null | from rest_framework import serializers
from profiles_api import models
class HelloSerializer(serializers.Serializer):
"""Serializes a name field for testing out APIView"""
name = serializers.CharField(max_length=10)
class UserProfileSerializer(serializers.ModelSerializer):
"""Serializes a user profile object"""
class Meta:
model = models.UserProfile
fields = ('id', 'email', 'name', 'password')
extra_kwargs = {
'password': {
'write_only': True,
'style': {'input_type': 'password'}
}
}
def create(self, validated_data):
"""Create and return a new user"""
user = models.UserProfile.objects.create_user(
email=validated_data['email'],
name=validated_data['name'],
password=validated_data['password']
)
return user
class ProfileFeedItemSerializer(serializers.ModelSerializer):
"""Serializes profile feed items"""
class Meta:
model = models.ProfileFeedItem
fields = ('id', 'user_profile', 'status_text', 'created_on')
extra_kwargs = {'user_profile': {'read_only': True}}
| 28.285714 | 68 | 0.625421 |
4a1ecf1f4f1ac073290607716b40c89cba5cb88e | 7,844 | py | Python | pyrobot/stock_frame.py | farleyrules/python-trading-robot | fefd96edd7b66ad6cf282f892f764c6d60028db7 | [
"MIT"
] | 1 | 2021-01-28T01:52:41.000Z | 2021-01-28T01:52:41.000Z | pyrobot/stock_frame.py | charleswwilson/python-trading-robot | cedc71b8bf5710cbfbdfa4256f1e745203ae2a9b | [
"MIT"
] | null | null | null | pyrobot/stock_frame.py | charleswwilson/python-trading-robot | cedc71b8bf5710cbfbdfa4256f1e745203ae2a9b | [
"MIT"
] | null | null | null | import numpy as np
import pandas as pd
from datetime import time
from datetime import datetime
from datetime import timezone
from typing import List
from typing import Dict
from typing import Union
from pandas.core.groupby import DataFrameGroupBy
from pandas.core.window import RollingGroupby
from pandas.core.window import Window
class StockFrame():
def __init__(self, data: List[Dict]) -> None:
"""Initalizes the Stock Data Frame Object.
Arguments:
----
data {List[Dict]} -- The data to convert to a frame. Normally, this is
returned from the historical prices endpoint.
"""
self._data = data
self._frame: pd.DataFrame = self.create_frame()
self._symbol_groups = None
self._symbol_rolling_groups = None
@property
def frame(self) -> pd.DataFrame:
"""The frame object.
Returns:
----
pd.DataFrame -- A pandas data frame with the price data.
"""
return self._frame
@property
def symbol_groups(self) -> DataFrameGroupBy:
"""Returns the Groups in the StockFrame.
Overview:
----
Often we will want to apply operations to a each symbol. The
`symbols_groups` property will return the dataframe grouped by
each symbol.
Returns:
----
{DataFrameGroupBy} -- A `pandas.core.groupby.GroupBy` object with each symbol.
"""
# Group by Symbol.
self._symbol_groups: DataFrameGroupBy = self._frame.groupby(
by='symbol',
as_index=False,
sort=True
)
return self._symbol_groups
def symbol_rolling_groups(self, size: int) -> RollingGroupby:
"""Grabs the windows for each group.
Arguments:
----
size {int} -- The size of the window.
Returns:
----
{RollingGroupby} -- A `pandas.core.window.RollingGroupby` object.
"""
# If we don't a symbols group, then create it.
if not self._symbol_groups:
self.symbol_groups
self._symbol_rolling_groups: RollingGroupby = self._symbol_groups.rolling(size)
return self._symbol_rolling_groups
def create_frame(self) -> pd.DataFrame:
"""Creates a new data frame with the data passed through.
Returns:
----
{pd.DataFrame} -- A pandas dataframe.
"""
# Make a data frame.
price_df = pd.DataFrame(data=self._data)
price_df = self._parse_datetime_column(price_df=price_df)
price_df = self._set_multi_index(price_df=price_df)
return price_df
def _parse_datetime_column(self, price_df: pd.DataFrame) -> pd.DataFrame:
"""Parses the datetime column passed through.
Arguments:
----
price_df {pd.DataFrame} -- The price data frame with a
datetime column.
Returns:
----
{pd.DataFrame} -- A pandas dataframe.
"""
price_df['datetime'] = pd.to_datetime(price_df['datetime'], unit='ms', origin='unix')
return price_df
def _set_multi_index(self, price_df: pd.DataFrame) -> pd.DataFrame:
"""Converts the dataframe to a multi-index data frame.
Arguments:
----
price_df {pd.DataFrame} -- The price data frame.
Returns:
----
pd.DataFrame -- A pandas dataframe.
"""
price_df = price_df.set_index(keys=['symbol','datetime'])
return price_df
def add_rows(self, data: Dict) -> None:
"""Adds a new row to our StockFrame.
Arguments:
----
data {Dict} -- A list of quotes.
Usage:
----
"""
column_names = ['open', 'close', 'high', 'low', 'volume']
for quote in data:
# Parse the Timestamp.
time_stamp = pd.to_datetime(
quote['datetime'],
unit='ms',
origin='unix'
)
# Define the Index Tuple.
row_id = (quote['symbol'], time_stamp)
# Define the values.
row_values = [
quote['open'],
quote['close'],
quote['high'],
quote['low'],
quote['volume']
]
# Create a new row.
new_row = pd.Series(data=row_values)
# Add the row.
self.frame.loc[row_id, column_names] = new_row.values
self.frame.sort_index(inplace=True)
def do_indicator_exist(self, column_names: List[str]) -> bool:
"""Checks to see if the indicator columns specified exist.
Overview:
----
The user can add multiple indicator columns to their StockFrame object
and in some cases we will need to modify those columns before making trades.
In those situations, this method, will help us check if those columns exist
before proceeding on in the code.
Arguments:
----
column_names {List[str]} -- A list of column names that will be checked.
Raises:
----
KeyError: If a column is not found in the StockFrame, a KeyError will be raised.
Returns:
----
bool -- `True` if all the columns exist.
"""
if set(column_names).issubset(self._frame.columns):
return True
else:
raise KeyError("The following indicator columns are missing from the StockFrame: {missing_columns}".format(
missing_columns=set(column_names).difference(self._frame.columns)
))
def _check_signals(self, indicators: dict) -> Union[pd.DataFrame, None]:
"""Returns the last row of the StockFrame if conditions are met.
Overview:
----
Before a trade is executed, we must check to make sure if the
conditions that warrant a `buy` or `sell` signal are met. This
method will take last row for each symbol in the StockFrame and
compare the indicator column values with the conditions specified
by the user.
If the conditions are met the row will be returned back to the user.
Arguments:
----
indicators {dict} -- A dictionary containing all the indicators to be checked
along with their buy and sell criteria.
Returns:
----
{Union[pd.DataFrame, None]} -- If signals are generated then, a pandas.DataFrame object
will be returned. If no signals are found then nothing will be returned.
"""
# Grab the last rows.
last_rows = self._symbol_groups.tail(1)
conditions = []
# Check to see if all the columns exist.
if self.do_indicator_exist(column_names=indicators.keys()):
for indicator in indicators:
column = last_rows[indicator]
buy_condition_target = indicators[indicator]['buy']
sell_condition_target = indicators[indicator]['sell']
buy_condition_operator = indicators[indicator]['buy_operator']
sell_condition_operator = indicators[indicator]['sell_operator']
condition_1: pd.Series = buy_condition_operator(column, buy_condition_target)
condition_2: pd.Series = sell_condition_operator(column, sell_condition_target)
condition_1 = condition_1.where(lambda x : x == True).dropna()
condition_2 = condition_2.where(lambda x : x == True).dropna()
conditions.append(('buys', condition_1))
conditions.append(('sells', condition_2))
return conditions | 30.169231 | 119 | 0.583886 |
4a1ecf2bfba6f67af3ad8c3160717c05e6ecaa9f | 12,236 | py | Python | intersight/model/vnic_plogi_settings.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 5 | 2021-12-16T15:13:32.000Z | 2022-03-29T16:09:54.000Z | intersight/model/vnic_plogi_settings.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 4 | 2022-01-25T19:05:51.000Z | 2022-03-29T20:18:37.000Z | intersight/model/vnic_plogi_settings.py | CiscoDevNet/intersight-python | 04b721f37c3044646a91c185c7259edfb991557a | [
"Apache-2.0"
] | 2 | 2020-07-07T15:01:08.000Z | 2022-01-31T04:27:35.000Z | """
Cisco Intersight
Cisco Intersight is a management platform delivered as a service with embedded analytics for your Cisco and 3rd party IT infrastructure. This platform offers an intelligent level of management that enables IT organizations to analyze, simplify, and automate their environments in more advanced ways than the prior generations of tools. Cisco Intersight provides an integrated and intuitive management experience for resources in the traditional data center as well as at the edge. With flexible deployment options to address complex security needs, getting started with Intersight is quick and easy. Cisco Intersight has deep integration with Cisco UCS and HyperFlex systems allowing for remote deployment, configuration, and ongoing maintenance. The model-based deployment works for a single system in a remote location or hundreds of systems in a data center and enables rapid, standardized configuration and deployment. It also streamlines maintaining those systems whether you are working with small or very large configurations. The Intersight OpenAPI document defines the complete set of properties that are returned in the HTTP response. From that perspective, a client can expect that no additional properties are returned, unless these properties are explicitly defined in the OpenAPI document. However, when a client uses an older version of the Intersight OpenAPI document, the server may send additional properties because the software is more recent than the client. In that case, the client may receive properties that it does not know about. Some generated SDKs perform a strict validation of the HTTP response body against the OpenAPI document. # noqa: E501
The version of the OpenAPI document: 1.0.9-4950
Contact: [email protected]
Generated by: https://openapi-generator.tech
"""
import re # noqa: F401
import sys # noqa: F401
from intersight.model_utils import ( # noqa: F401
ApiTypeError,
ModelComposed,
ModelNormal,
ModelSimple,
cached_property,
change_keys_js_to_python,
convert_js_args_to_python_args,
date,
datetime,
file_type,
none_type,
validate_get_composed_info,
)
def lazy_import():
from intersight.model.mo_base_complex_type import MoBaseComplexType
from intersight.model.vnic_plogi_settings_all_of import VnicPlogiSettingsAllOf
globals()['MoBaseComplexType'] = MoBaseComplexType
globals()['VnicPlogiSettingsAllOf'] = VnicPlogiSettingsAllOf
class VnicPlogiSettings(ModelComposed):
"""NOTE: This class is auto generated by OpenAPI Generator.
Ref: https://openapi-generator.tech
Do not edit the class manually.
Attributes:
allowed_values (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
with a capitalized key describing the allowed value and an allowed
value. These dicts store the allowed enum values.
attribute_map (dict): The key is attribute name
and the value is json key in definition.
discriminator_value_class_map (dict): A dict to go from the discriminator
variable value to the discriminator class name.
validations (dict): The key is the tuple path to the attribute
and the for var_name this is (var_name,). The value is a dict
that stores validations for max_length, min_length, max_items,
min_items, exclusive_maximum, inclusive_maximum, exclusive_minimum,
inclusive_minimum, and regex.
additional_properties_type (tuple): A tuple of classes accepted
as additional properties values.
"""
allowed_values = {
('class_id',): {
'VNIC.PLOGISETTINGS': "vnic.PlogiSettings",
},
('object_type',): {
'VNIC.PLOGISETTINGS': "vnic.PlogiSettings",
},
}
validations = {
('retries',): {
'inclusive_maximum': 255,
'inclusive_minimum': 0,
},
('timeout',): {
'inclusive_maximum': 255000,
'inclusive_minimum': 1000,
},
}
@cached_property
def additional_properties_type():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
"""
lazy_import()
return (bool, date, datetime, dict, float, int, list, str, none_type,) # noqa: E501
_nullable = True
@cached_property
def openapi_types():
"""
This must be a method because a model may have properties that are
of type self, this must run after the class is loaded
Returns
openapi_types (dict): The key is attribute name
and the value is attribute type.
"""
lazy_import()
return {
'class_id': (str,), # noqa: E501
'object_type': (str,), # noqa: E501
'retries': (int,), # noqa: E501
'timeout': (int,), # noqa: E501
}
@cached_property
def discriminator():
val = {
}
if not val:
return None
return {'class_id': val}
attribute_map = {
'class_id': 'ClassId', # noqa: E501
'object_type': 'ObjectType', # noqa: E501
'retries': 'Retries', # noqa: E501
'timeout': 'Timeout', # noqa: E501
}
required_properties = set([
'_data_store',
'_check_type',
'_spec_property_naming',
'_path_to_item',
'_configuration',
'_visited_composed_classes',
'_composed_instances',
'_var_name_to_model_instances',
'_additional_properties_model_instances',
])
@convert_js_args_to_python_args
def __init__(self, *args, **kwargs): # noqa: E501
"""VnicPlogiSettings - a model defined in OpenAPI
Args:
Keyword Args:
class_id (str): The fully-qualified name of the instantiated, concrete type. This property is used as a discriminator to identify the type of the payload when marshaling and unmarshaling data.. defaults to "vnic.PlogiSettings", must be one of ["vnic.PlogiSettings", ] # noqa: E501
object_type (str): The fully-qualified name of the instantiated, concrete type. The value should be the same as the 'ClassId' property.. defaults to "vnic.PlogiSettings", must be one of ["vnic.PlogiSettings", ] # noqa: E501
_check_type (bool): if True, values for parameters in openapi_types
will be type checked and a TypeError will be
raised if the wrong type is input.
Defaults to True
_path_to_item (tuple/list): This is a list of keys or values to
drill down to the model in received_data
when deserializing a response
_spec_property_naming (bool): True if the variable names in the input data
are serialized names, as specified in the OpenAPI document.
False if the variable names in the input data
are pythonic names, e.g. snake case (default)
_configuration (Configuration): the instance to use when
deserializing a file_type parameter.
If passed, type conversion is attempted
If omitted no type conversion is done.
_visited_composed_classes (tuple): This stores a tuple of
classes that we have traveled through so that
if we see that class again we will not use its
discriminator again.
When traveling through a discriminator, the
composed schema that is
is traveled through is added to this set.
For example if Animal has a discriminator
petType and we pass in "Dog", and the class Dog
allOf includes Animal, we move through Animal
once using the discriminator, and pick Dog.
Then in Dog, we will make an instance of the
Animal class but this time we won't travel
through its discriminator because we passed in
_visited_composed_classes = (Animal,)
retries (int): The number of times that the system tries to log in to a port after the first failure.. [optional] if omitted the server will use the default value of 8 # noqa: E501
timeout (int): The number of milliseconds that the system waits before it tries to log in again.. [optional] if omitted the server will use the default value of 20000 # noqa: E501
"""
class_id = kwargs.get('class_id', "vnic.PlogiSettings")
object_type = kwargs.get('object_type', "vnic.PlogiSettings")
_check_type = kwargs.pop('_check_type', True)
_spec_property_naming = kwargs.pop('_spec_property_naming', False)
_path_to_item = kwargs.pop('_path_to_item', ())
_configuration = kwargs.pop('_configuration', None)
_visited_composed_classes = kwargs.pop('_visited_composed_classes', ())
if args:
raise ApiTypeError(
"Invalid positional arguments=%s passed to %s. Remove those invalid positional arguments." % (
args,
self.__class__.__name__,
),
path_to_item=_path_to_item,
valid_classes=(self.__class__,),
)
self._data_store = {}
self._check_type = _check_type
self._spec_property_naming = _spec_property_naming
self._path_to_item = _path_to_item
self._configuration = _configuration
self._visited_composed_classes = _visited_composed_classes + (self.__class__,)
constant_args = {
'_check_type': _check_type,
'_path_to_item': _path_to_item,
'_spec_property_naming': _spec_property_naming,
'_configuration': _configuration,
'_visited_composed_classes': self._visited_composed_classes,
}
required_args = {
'class_id': class_id,
'object_type': object_type,
}
model_args = {}
model_args.update(required_args)
model_args.update(kwargs)
composed_info = validate_get_composed_info(
constant_args, model_args, self)
self._composed_instances = composed_info[0]
self._var_name_to_model_instances = composed_info[1]
self._additional_properties_model_instances = composed_info[2]
unused_args = composed_info[3]
for var_name, var_value in required_args.items():
setattr(self, var_name, var_value)
for var_name, var_value in kwargs.items():
if var_name in unused_args and \
self._configuration is not None and \
self._configuration.discard_unknown_keys and \
not self._additional_properties_model_instances:
# discard variable.
continue
setattr(self, var_name, var_value)
@cached_property
def _composed_schemas():
# we need this here to make our import statements work
# we must store _composed_schemas in here so the code is only run
# when we invoke this method. If we kept this at the class
# level we would get an error beause the class level
# code would be run when this module is imported, and these composed
# classes don't exist yet because their module has not finished
# loading
lazy_import()
return {
'anyOf': [
],
'allOf': [
MoBaseComplexType,
VnicPlogiSettingsAllOf,
],
'oneOf': [
],
}
| 47.610895 | 1,678 | 0.629618 |
4a1ecfc6e0db60190ed83d2bdfb53f87dd681b51 | 1,733 | py | Python | src/blight/actions/cc_for_cxx.py | trailofbits/blight | d4a148742db007ef3fcdbc5911fb013d67bbc9a1 | [
"Apache-2.0"
] | 51 | 2020-07-30T17:08:21.000Z | 2022-03-23T17:15:21.000Z | src/blight/actions/cc_for_cxx.py | trailofbits/blight | d4a148742db007ef3fcdbc5911fb013d67bbc9a1 | [
"Apache-2.0"
] | 31,392 | 2020-08-18T18:43:30.000Z | 2022-03-29T15:00:00.000Z | src/blight/actions/cc_for_cxx.py | trailofbits/canker | bb5808d143550b98c4e9a6d04ad644a1256bb6d1 | [
"Apache-2.0"
] | 4 | 2020-11-11T15:58:47.000Z | 2022-03-16T22:48:41.000Z | """
The `CCForCXX` action.
"""
from blight.action import CCAction
from blight.tool import CC
class CCForCXX(CCAction):
"""
An action for detecting whether the C compiler is being used as if it's
a C++ compiler, and correcting the build when so.
This action is used to fix a particular kind of misconfigured C++ build,
where the C++ compiler is referred to as if it were a C compiler.
For example, in Make:
```make
CC := clang++
CFLAGS := -std=c++17
all:
$(CC) $(CFLAGS) -o whatever foo.cpp bar.cpp
```
Whereas the correct use would be:
```make
CXX := clang++
CXXFLAGS := -std=c++17
all:
$(CXX) $(CXXFLAGS) -o whatever foo.cpp bar.cpp
```
This action fixes these builds by checking whether `CC` is being used
as a C++ compiler. If it is, it explicitly injects additional flags
to force the compiler into C++ mode.
"""
# NOTE(ww): type ignore here because mypy thinks this is a Liskov
# substitution principle violation -- it can't see that `CompilerAction`
# is safely specialized for `CompilerTool`.
def before_run(self, tool: CC) -> None: # type: ignore
# NOTE(ww): Currently, the only way we check whether CC is being used
# as a C++ compiler is by checking whether one of the `-std=c++XX`
# flags has been passed. This won't catch all cases; someone could use
# CC as a C++ compiler with the default C++ standard.
# Other options for detecting this:
# * Check for common C++-only linkages, like -lstdc++fs
# * Check whether tool.inputs contains files that look like C++
if tool.std.is_cxxstd():
tool.args[:0] = ["-x", "c++"]
| 31.509091 | 78 | 0.630698 |
4a1ed0139cf06c9521ae8d97f03b65ec3c9e51c8 | 15,040 | py | Python | simple_salesforce/metadata.py | ryanrmcintyre/simple-salesforce | a867721706b6370443bcafcd485e2703f5034d0e | [
"Apache-2.0"
] | null | null | null | simple_salesforce/metadata.py | ryanrmcintyre/simple-salesforce | a867721706b6370443bcafcd485e2703f5034d0e | [
"Apache-2.0"
] | null | null | null | simple_salesforce/metadata.py | ryanrmcintyre/simple-salesforce | a867721706b6370443bcafcd485e2703f5034d0e | [
"Apache-2.0"
] | null | null | null | """ Class to work with Salesforce Metadata API """
from base64 import b64encode, b64decode
from xml.etree import ElementTree as ET
from .util import call_salesforce
from .messages import DEPLOY_MSG,CHECK_DEPLOY_STATUS_MSG,\
CHECK_RETRIEVE_STATUS_MSG,RETRIEVE_MSG
class SfdcMetadataApi:
# pylint: disable=too-many-instance-attributes
""" Class to work with Salesforce Metadata API """
_METADATA_API_BASE_URI = "/services/Soap/m/{version}"
_XML_NAMESPACES = {
'soapenv': 'http://schemas.xmlsoap.org/soap/envelope/',
'mt': 'http://soap.sforce.com/2006/04/metadata'
}
# pylint: disable=R0913
def __init__(self, session, session_id, instance, sandbox, metadata_url,
headers, api_version):
""" Initialize and check session """
self.session = session
self._session_id = session_id
self._instance = instance
self._sandbox = sandbox
self.metadata_url = metadata_url
self.headers = headers
self._api_version = api_version
self._deploy_zip = None
# pylint: disable=R0914
# pylint: disable-msg=C0103
def deploy(self, zipfile, **kwargs):
""" Kicks off async deployment, returns deployment id
:param zipfile:
:type zipfile:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
client = kwargs.get('client', 'simple_salesforce_metahelper')
checkOnly = kwargs.get('checkOnly', False)
testLevel = kwargs.get('testLevel')
tests = kwargs.get('tests')
ignoreWarnings = kwargs.get('ignoreWarnings', False)
allowMissingFiles = kwargs.get('allowMissingFiles', False)
autoUpdatePackage = kwargs.get('autoUpdatePackage', False)
performRetrieve = kwargs.get('performRetrieve', False)
purgeOnDelete = kwargs.get('purgeOnDelete', False)
rollbackOnError = kwargs.get('rollbackOnError', False)
singlePackage = True
attributes = {
'client': client,
'checkOnly': checkOnly,
'sessionId': self._session_id,
'ZipFile': self._read_deploy_zip(zipfile),
'testLevel': testLevel,
'tests': tests,
'ignoreWarnings': ignoreWarnings,
'allowMissingFiles': allowMissingFiles,
'autoUpdatePackage': autoUpdatePackage,
'performRetrieve': performRetrieve,
'purgeOnDelete': purgeOnDelete,
'rollbackOnError': rollbackOnError,
'singlePackage': singlePackage,
}
if not self._sandbox:
attributes['allowMissingFiles'] = False
attributes['rollbackOnError'] = True
if testLevel:
test_level = "<met:testLevel>%s</met:testLevel>" % testLevel
attributes['testLevel'] = test_level
tests_tag = ''
if tests and \
str(tests).lower() == 'runspecifiedtests':
for test in tests:
tests_tag += '<met:runTests>%s</met:runTests>\n' % test
attributes['tests'] = tests_tag
request = DEPLOY_MSG.format(**attributes)
headers = {'Content-Type': 'text/xml', 'SOAPAction': 'deploy'}
result = call_salesforce(url=self.metadata_url + 'deployRequest',
method='POST',
session=self.session,
headers=self.headers,
additional_headers=headers,
data=request)
async_process_id = ET.fromstring(result.text).find(
'soapenv:Body/mt:deployResponse/mt:result/mt:id',
self._XML_NAMESPACES).text
state = ET.fromstring(result.text).find(
'soapenv:Body/mt:deployResponse/mt:result/mt:state',
self._XML_NAMESPACES).text
return async_process_id, state
@staticmethod
def _read_deploy_zip(zipfile):
"""
:param zipfile:
:type zipfile:
:return:
:rtype:
"""
if hasattr(zipfile, 'read'):
file = zipfile
file.seek(0)
should_close = False
else:
file = open(zipfile, 'rb')
should_close = True
raw = file.read()
if should_close:
file.close()
return b64encode(raw).decode("utf-8")
def _retrieve_deploy_result(self, async_process_id, **kwargs):
""" Retrieves status for specified deployment id
:param async_process_id:
:type async_process_id:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
client = kwargs.get('client', 'simple_salesforce_metahelper')
attributes = {
'client': client,
'sessionId': self._session_id,
'asyncProcessId': async_process_id,
'includeDetails': 'true'
}
mt_request = CHECK_DEPLOY_STATUS_MSG.format(**attributes)
headers = {
'Content-type': 'text/xml', 'SOAPAction': 'checkDeployStatus'
}
res = call_salesforce(
url=self.metadata_url + 'deployRequest/' + async_process_id,
method='POST',
session=self.session,
headers=self.headers,
additional_headers=headers,
data=mt_request)
root = ET.fromstring(res.text)
result = root.find(
'soapenv:Body/mt:checkDeployStatusResponse/mt:result',
self._XML_NAMESPACES)
if result is None:
raise Exception("Result node could not be found: %s" % res.text)
return result
@staticmethod
def get_component_error_count(value):
"""Get component error counts"""
try:
return int(value)
except ValueError:
return 0
def check_deploy_status(self, async_process_id, **kwargs):
"""
Checks whether deployment succeeded
:param async_process_id:
:type async_process_id:
:param kwargs:
:type kwargs:
:return:
:rtype:
"""
result = self._retrieve_deploy_result(async_process_id, **kwargs)
state = result.find('mt:status', self._XML_NAMESPACES).text
state_detail = result.find('mt:stateDetail', self._XML_NAMESPACES)
if state_detail is not None:
state_detail = state_detail.text
unit_test_errors = []
deployment_errors = []
failed_count = self.get_component_error_count(
result.find('mt:numberComponentErrors', self._XML_NAMESPACES).text)
if state == 'Failed' or failed_count > 0:
# Deployment failures
failures = result.findall('mt:details/mt:componentFailures',
self._XML_NAMESPACES)
for failure in failures:
deployment_errors.append({
'type': failure.find('mt:componentType',
self._XML_NAMESPACES).text,
'file': failure.find('mt:fileName',
self._XML_NAMESPACES).text,
'status': failure.find('mt:problemType',
self._XML_NAMESPACES).text,
'message': failure.find('mt:problem',
self._XML_NAMESPACES).text
})
# Unit test failures
failures = result.findall(
'mt:details/mt:runTestResult/mt:failures',
self._XML_NAMESPACES)
for failure in failures:
unit_test_errors.append({
'class': failure.find('mt:name', self._XML_NAMESPACES).text,
'method': failure.find('mt:methodName',
self._XML_NAMESPACES).text,
'message': failure.find('mt:message',
self._XML_NAMESPACES).text,
'stack_trace': failure.find('mt:stackTrace',
self._XML_NAMESPACES).text
})
deployment_detail = {
'total_count': result.find('mt:numberComponentsTotal',
self._XML_NAMESPACES).text,
'failed_count': result.find('mt:numberComponentErrors',
self._XML_NAMESPACES).text,
'deployed_count': result.find('mt:numberComponentsDeployed',
self._XML_NAMESPACES).text,
'errors': deployment_errors
}
unit_test_detail = {
'total_count': result.find('mt:numberTestsTotal',
self._XML_NAMESPACES).text,
'failed_count': result.find('mt:numberTestErrors',
self._XML_NAMESPACES).text,
'completed_count': result.find('mt:numberTestsCompleted',
self._XML_NAMESPACES).text,
'errors': unit_test_errors
}
return state, state_detail, deployment_detail, unit_test_detail
def download_unit_test_logs(self, async_process_id):
""" Downloads Apex logs for unit tests executed during specified
deployment """
result = self._retrieve_deploy_result(async_process_id)
print("Results: %s" % ET.tostring(result, encoding="us-ascii",
method="xml"))
def retrieve(self, async_process_id, **kwargs):
""" Submits retrieve request """
# Compose unpackaged XML
client = kwargs.get('client', 'simple_salesforce_metahelper')
single_package = kwargs.get('single_package', True)
if not isinstance(single_package, bool):
raise TypeError('single_package must be bool')
unpackaged = ''
if kwargs.get('unpackaged'):
for metadata_type in kwargs.get('unpackaged'):
if isinstance(metadata_type, dict):
members = kwargs.get('unpackaged')[metadata_type]
unpackaged += '<types>'
for member in members:
unpackaged += '<members>{member}</members>'.format(
member=member)
unpackaged += '<name>{metadata_type}</name></types>'.format(
metadata_type=metadata_type)
else:
raise TypeError('unpackaged metadata types must be a dict')
# Compose retrieve request XML
attributes = {
'client': client,
'sessionId': self._session_id,
'apiVersion': self._api_version,
'singlePackage': single_package,
'unpackaged': unpackaged
}
request = RETRIEVE_MSG.format(**attributes)
# Submit request
headers = {'Content-type': 'text/xml', 'SOAPAction': 'retrieve'}
res = call_salesforce(
url=self.metadata_url + 'deployRequest/' + async_process_id,
method='POST',
session=self.session,
headers=self.headers,
additional_headers=headers,
data=request)
# Parse results to get async Id and status
async_process_id = ET.fromstring(res.text).find(
'soapenv:Body/mt:retrieveResponse/mt:result/mt:id',
self._XML_NAMESPACES).text
state = ET.fromstring(res.text).find(
'soapenv:Body/mt:retrieveResponse/mt:result/mt:state',
self._XML_NAMESPACES).text
return async_process_id, state
def retrieve_retrieve_result(self, async_process_id, include_zip, **kwargs):
""" Retrieves status for specified retrieval id """
client = kwargs.get('client', 'simple_salesforce_metahelper')
attributes = {
'client': client,
'sessionId': self._session_id,
'asyncProcessId': async_process_id,
'includeZip': include_zip
}
mt_request = CHECK_RETRIEVE_STATUS_MSG.format(**attributes)
headers = {
'Content-type': 'text/xml', 'SOAPAction': 'checkRetrieveStatus'
}
res = call_salesforce(
url=self.metadata_url + 'deployRequest/' + async_process_id,
method='POST',
session=self.session,
headers=self.headers,
additional_headers=headers,
data=mt_request)
root = ET.fromstring(res.text)
result = root.find(
'soapenv:Body/mt:checkRetrieveStatusResponse/mt:result',
self._XML_NAMESPACES)
if result is None:
raise Exception("Result node could not be found: %s" % res.text)
return result
def retrieve_zip(self, async_process_id, **kwargs):
""" Retrieves ZIP file """
result = self._retrieve_retrieve_result(async_process_id, 'true',
**kwargs)
state = result.find('mt:status', self._XML_NAMESPACES).text
error_message = result.find('mt:errorMessage', self._XML_NAMESPACES)
if error_message is not None:
error_message = error_message.text
# Check if there are any messages
messages = []
message_list = result.findall('mt:details/mt:messages',
self._XML_NAMESPACES)
for message in message_list:
messages.append({
'file': message.find('mt:fileName', self._XML_NAMESPACES).text,
'message': message.find('mt:problem', self._XML_NAMESPACES).text
})
# Retrieve base64 encoded ZIP file
zipfile_base64 = result.find('mt:zipFile', self._XML_NAMESPACES).text
zipfile = b64decode(zipfile_base64)
return state, error_message, messages, zipfile
def check_retrieve_status(self, async_process_id, **kwargs):
""" Checks whether retrieval succeeded """
result = self._retrieve_retrieve_result(async_process_id, 'false',
**kwargs)
state = result.find('mt:status', self._XML_NAMESPACES).text
error_message = result.find('mt:errorMessage', self._XML_NAMESPACES)
if error_message is not None:
error_message = error_message.text
# Check if there are any messages
messages = []
message_list = result.findall('mt:details/mt:messages',
self._XML_NAMESPACES)
for message in message_list:
messages.append({
'file': message.find('mt:fileName', self._XML_NAMESPACES).text,
'message': message.find('mt:problem', self._XML_NAMESPACES).text
})
return state, error_message, messages
| 39.78836 | 80 | 0.564694 |
4a1ed07b8c38150e71e5ec2ecc864980143d42d7 | 331,838 | py | Python | zerver/lib/actions.py | ossd-sp22/zulip | ceb9dd5854186832d55066fa52378aec6160433c | [
"Apache-2.0"
] | null | null | null | zerver/lib/actions.py | ossd-sp22/zulip | ceb9dd5854186832d55066fa52378aec6160433c | [
"Apache-2.0"
] | null | null | null | zerver/lib/actions.py | ossd-sp22/zulip | ceb9dd5854186832d55066fa52378aec6160433c | [
"Apache-2.0"
] | null | null | null | import datetime
import hashlib
import itertools
import logging
import os
import time
from collections import defaultdict
from dataclasses import asdict, dataclass, field
from operator import itemgetter
from typing import (
IO,
AbstractSet,
Any,
Callable,
Collection,
Dict,
Iterable,
List,
Mapping,
Optional,
Sequence,
Set,
Tuple,
Union,
)
import django.db.utils
import orjson
from django.conf import settings
from django.contrib.contenttypes.models import ContentType
from django.core.exceptions import ValidationError
from django.db import IntegrityError, connection, transaction
from django.db.models import Count, Exists, F, OuterRef, Q, Sum
from django.db.models.query import QuerySet
from django.utils.html import escape
from django.utils.timezone import now as timezone_now
from django.utils.translation import gettext as _
from django.utils.translation import gettext_lazy
from django.utils.translation import override as override_language
from psycopg2.extras import execute_values
from psycopg2.sql import SQL
from typing_extensions import TypedDict
from analytics.lib.counts import COUNT_STATS, do_increment_logging_stat
from analytics.models import RealmCount
from confirmation import settings as confirmation_settings
from confirmation.models import (
Confirmation,
confirmation_url,
create_confirmation_link,
generate_key,
)
from zerver.decorator import statsd_increment
from zerver.lib import retention as retention
from zerver.lib.addressee import Addressee
from zerver.lib.alert_words import (
add_user_alert_words,
get_alert_word_automaton,
remove_user_alert_words,
)
from zerver.lib.avatar import avatar_url, avatar_url_from_dict
from zerver.lib.bot_config import ConfigError, get_bot_config, get_bot_configs, set_bot_config
from zerver.lib.bulk_create import bulk_create_users
from zerver.lib.cache import (
bot_dict_fields,
cache_delete,
cache_delete_many,
cache_set,
cache_set_many,
cache_with_key,
delete_user_profile_caches,
display_recipient_cache_key,
flush_user_profile,
get_stream_cache_key,
to_dict_cache_key_id,
user_profile_by_api_key_cache_key,
user_profile_delivery_email_cache_key,
)
from zerver.lib.create_user import create_user, get_display_email_address
from zerver.lib.email_mirror_helpers import encode_email_address
from zerver.lib.email_notifications import enqueue_welcome_emails
from zerver.lib.email_validation import (
email_reserved_for_system_bots_error,
get_existing_user_errors,
get_realm_email_validator,
validate_email_is_valid,
)
from zerver.lib.emoji import check_emoji_request, emoji_name_to_emoji_code, get_emoji_file_name
from zerver.lib.exceptions import (
InvitationError,
JsonableError,
MarkdownRenderingException,
StreamDoesNotExistError,
StreamWithIDDoesNotExistError,
ZephyrMessageAlreadySentException,
)
from zerver.lib.export import get_realm_exports_serialized
from zerver.lib.external_accounts import DEFAULT_EXTERNAL_ACCOUNTS
from zerver.lib.hotspots import get_next_hotspots
from zerver.lib.i18n import get_language_name
from zerver.lib.markdown import MessageRenderingResult, topic_links
from zerver.lib.markdown import version as markdown_version
from zerver.lib.mention import MentionBackend, MentionData, silent_mention_syntax_for_user
from zerver.lib.message import (
MessageDict,
SendMessageRequest,
access_message,
bulk_access_messages,
format_unread_message_details,
get_last_message_id,
get_raw_unread_data,
normalize_body,
render_markdown,
truncate_topic,
update_first_visible_message_id,
wildcard_mention_allowed,
)
from zerver.lib.notification_data import UserMessageNotificationsData, get_user_group_mentions_data
from zerver.lib.pysa import mark_sanitized
from zerver.lib.queue import queue_json_publish
from zerver.lib.realm_icon import realm_icon_url
from zerver.lib.realm_logo import get_realm_logo_data
from zerver.lib.retention import move_messages_to_archive
from zerver.lib.send_email import (
FromAddress,
clear_scheduled_emails,
clear_scheduled_invitation_emails,
send_email,
send_email_to_admins,
)
from zerver.lib.server_initialization import create_internal_realm, server_initialized
from zerver.lib.sessions import delete_user_sessions
from zerver.lib.storage import static_path
from zerver.lib.stream_color import pick_colors
from zerver.lib.stream_subscription import (
SubInfo,
bulk_get_private_peers,
bulk_get_subscriber_peer_info,
get_active_subscriptions_for_stream_id,
get_bulk_stream_subscriber_info,
get_stream_subscriptions_for_user,
get_subscribed_stream_ids_for_user,
get_subscriptions_for_send_message,
get_used_colors_for_user_ids,
get_user_ids_for_streams,
num_subscribers_for_stream_id,
subscriber_ids_with_stream_history_access,
)
from zerver.lib.stream_topic import StreamTopicTarget
from zerver.lib.stream_traffic import get_average_weekly_stream_traffic, get_streams_traffic
from zerver.lib.streams import (
access_stream_by_id,
access_stream_for_send_message,
can_access_stream_user_ids,
check_stream_access_based_on_stream_post_policy,
create_stream_if_needed,
get_default_value_for_history_public_to_subscribers,
get_stream_permission_policy_name,
get_web_public_streams_queryset,
render_stream_description,
send_stream_creation_event,
subscribed_to_stream,
)
from zerver.lib.string_validation import check_stream_name, check_stream_topic
from zerver.lib.subscription_info import build_stream_dict_for_never_sub, build_stream_dict_for_sub
from zerver.lib.timestamp import datetime_to_timestamp, timestamp_to_datetime
from zerver.lib.timezone import canonicalize_timezone
from zerver.lib.topic import (
ORIG_TOPIC,
RESOLVED_TOPIC_PREFIX,
TOPIC_LINKS,
TOPIC_NAME,
filter_by_exact_message_topic,
filter_by_topic_name_via_message,
messages_for_topic,
save_message_for_edit_use_case,
update_edit_history,
update_messages_for_topic_edit,
)
from zerver.lib.types import (
EditHistoryEvent,
NeverSubscribedStreamDict,
ProfileDataElementValue,
ProfileFieldData,
RawStreamDict,
RawSubscriptionDict,
RealmPlaygroundDict,
SubscriptionInfo,
SubscriptionStreamDict,
UnspecifiedValue,
)
from zerver.lib.upload import (
claim_attachment,
delete_avatar_image,
delete_export_tarball,
delete_message_image,
upload_emoji_image,
)
from zerver.lib.user_groups import (
access_user_group_by_id,
create_system_user_groups_for_realm,
create_user_group,
get_system_user_group_for_user,
)
from zerver.lib.user_mutes import add_user_mute, get_muting_users, get_user_mutes
from zerver.lib.user_status import update_user_status
from zerver.lib.user_topics import (
add_topic_mute,
get_topic_mutes,
get_users_muting_topic,
remove_topic_mute,
)
from zerver.lib.users import (
check_bot_name_available,
check_full_name,
format_user_row,
get_api_key,
user_profile_to_user_row,
)
from zerver.lib.utils import generate_api_key, log_statsd_event
from zerver.lib.validator import check_widget_content
from zerver.lib.widget import do_widget_post_save_actions, is_widget_message
from zerver.models import (
Attachment,
Client,
CustomProfileField,
CustomProfileFieldValue,
DefaultStream,
DefaultStreamGroup,
Draft,
EmailChangeStatus,
Message,
MultiuseInvite,
MutedUser,
PreregistrationUser,
Reaction,
Realm,
RealmAuditLog,
RealmDomain,
RealmEmoji,
RealmFilter,
RealmPlayground,
RealmUserDefault,
Recipient,
ScheduledEmail,
ScheduledMessage,
ScheduledMessageNotificationEmail,
Service,
Stream,
SubMessage,
Subscription,
UserActivity,
UserActivityInterval,
UserGroup,
UserGroupMembership,
UserHotspot,
UserMessage,
UserPresence,
UserProfile,
UserStatus,
UserTopic,
active_non_guest_user_ids,
active_user_ids,
custom_profile_fields_for_realm,
filter_to_valid_prereg_users,
get_active_streams,
get_bot_dicts_in_realm,
get_bot_services,
get_client,
get_default_stream_groups,
get_fake_email_domain,
get_huddle_recipient,
get_huddle_user_ids,
get_old_unclaimed_attachments,
get_realm,
get_realm_domains,
get_realm_playgrounds,
get_stream,
get_stream_by_id_in_realm,
get_system_bot,
get_user_by_delivery_email,
get_user_by_id_in_realm_including_cross_realm,
get_user_profile_by_id,
is_cross_realm_bot_email,
linkifiers_for_realm,
query_for_ids,
realm_filters_for_realm,
validate_attachment_request,
)
from zerver.tornado.django_api import send_event
if settings.BILLING_ENABLED:
from corporate.lib.stripe import (
downgrade_now_without_creating_additional_invoices,
update_license_ledger_if_needed,
)
ONBOARDING_TOTAL_MESSAGES = 1000
ONBOARDING_UNREAD_MESSAGES = 20
ONBOARDING_RECENT_TIMEDELTA = datetime.timedelta(weeks=1)
def create_historical_user_messages(*, user_id: int, message_ids: List[int]) -> None:
# Users can see and interact with messages sent to streams with
# public history for which they do not have a UserMessage because
# they were not a subscriber at the time the message was sent.
# In order to add emoji reactions or mutate message flags for
# those messages, we create UserMessage objects for those messages;
# these have the special historical flag which keeps track of the
# fact that the user did not receive the message at the time it was sent.
for message_id in message_ids:
UserMessage.objects.create(
user_profile_id=user_id,
message_id=message_id,
flags=UserMessage.flags.historical | UserMessage.flags.read,
)
def subscriber_info(user_id: int) -> Dict[str, Any]:
return {"id": user_id, "flags": ["read"]}
def bot_owner_user_ids(user_profile: UserProfile) -> Set[int]:
is_private_bot = (
user_profile.default_sending_stream
and user_profile.default_sending_stream.invite_only
or user_profile.default_events_register_stream
and user_profile.default_events_register_stream.invite_only
)
if is_private_bot:
return {user_profile.bot_owner_id}
else:
users = {user.id for user in user_profile.realm.get_human_admin_users()}
users.add(user_profile.bot_owner_id)
return users
def realm_user_count(realm: Realm) -> int:
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False).count()
def realm_user_count_by_role(realm: Realm) -> Dict[str, Any]:
human_counts = {
str(UserProfile.ROLE_REALM_ADMINISTRATOR): 0,
str(UserProfile.ROLE_REALM_OWNER): 0,
str(UserProfile.ROLE_MODERATOR): 0,
str(UserProfile.ROLE_MEMBER): 0,
str(UserProfile.ROLE_GUEST): 0,
}
for value_dict in list(
UserProfile.objects.filter(realm=realm, is_bot=False, is_active=True)
.values("role")
.annotate(Count("role"))
):
human_counts[str(value_dict["role"])] = value_dict["role__count"]
bot_count = UserProfile.objects.filter(realm=realm, is_bot=True, is_active=True).count()
return {
RealmAuditLog.ROLE_COUNT_HUMANS: human_counts,
RealmAuditLog.ROLE_COUNT_BOTS: bot_count,
}
def get_signups_stream(realm: Realm) -> Stream:
# This one-liner helps us work around a lint rule.
return get_stream("signups", realm)
def send_message_to_signup_notification_stream(
sender: UserProfile, realm: Realm, message: str, topic_name: str = _("signups")
) -> None:
signup_notifications_stream = realm.get_signup_notifications_stream()
if signup_notifications_stream is None:
return
with override_language(realm.default_language):
internal_send_stream_message(sender, signup_notifications_stream, topic_name, message)
def notify_new_user(user_profile: UserProfile) -> None:
user_count = realm_user_count(user_profile.realm)
sender_email = settings.NOTIFICATION_BOT
sender = get_system_bot(sender_email, user_profile.realm_id)
is_first_user = user_count == 1
if not is_first_user:
message = _("{user} just signed up for Zulip. (total: {user_count})").format(
user=silent_mention_syntax_for_user(user_profile), user_count=user_count
)
if settings.BILLING_ENABLED:
from corporate.lib.registration import generate_licenses_low_warning_message_if_required
licenses_low_warning_message = generate_licenses_low_warning_message_if_required(
user_profile.realm
)
if licenses_low_warning_message is not None:
message += "\n"
message += licenses_low_warning_message
send_message_to_signup_notification_stream(sender, user_profile.realm, message)
# We also send a notification to the Zulip administrative realm
admin_realm = get_realm(settings.SYSTEM_BOT_REALM)
admin_realm_sender = get_system_bot(sender_email, admin_realm.id)
try:
# Check whether the stream exists
signups_stream = get_signups_stream(admin_realm)
# We intentionally use the same strings as above to avoid translation burden.
message = _("{user} just signed up for Zulip. (total: {user_count})").format(
user=f"{user_profile.full_name} <`{user_profile.email}`>", user_count=user_count
)
internal_send_stream_message(
admin_realm_sender, signups_stream, user_profile.realm.display_subdomain, message
)
except Stream.DoesNotExist:
# If the signups stream hasn't been created in the admin
# realm, don't auto-create it to send to it; just do nothing.
pass
def notify_invites_changed(realm: Realm) -> None:
event = dict(type="invites_changed")
admin_ids = [user.id for user in realm.get_admin_users_and_bots()]
send_event(realm, event, admin_ids)
def add_new_user_history(user_profile: UserProfile, streams: Iterable[Stream]) -> None:
"""Give you the last ONBOARDING_TOTAL_MESSAGES messages on your public
streams, so you have something to look at in your home view once
you finish the tutorial. The most recent ONBOARDING_UNREAD_MESSAGES
are marked unread.
"""
one_week_ago = timezone_now() - ONBOARDING_RECENT_TIMEDELTA
recipient_ids = [stream.recipient_id for stream in streams if not stream.invite_only]
recent_messages = Message.objects.filter(
recipient_id__in=recipient_ids, date_sent__gt=one_week_ago
).order_by("-id")
message_ids_to_use = list(
reversed(recent_messages.values_list("id", flat=True)[0:ONBOARDING_TOTAL_MESSAGES])
)
if len(message_ids_to_use) == 0:
return
# Handle the race condition where a message arrives between
# bulk_add_subscriptions above and the Message query just above
already_ids = set(
UserMessage.objects.filter(
message_id__in=message_ids_to_use, user_profile=user_profile
).values_list("message_id", flat=True)
)
# Mark the newest ONBOARDING_UNREAD_MESSAGES as unread.
marked_unread = 0
ums_to_create = []
for message_id in reversed(message_ids_to_use):
if message_id in already_ids:
continue
um = UserMessage(user_profile=user_profile, message_id=message_id)
if marked_unread < ONBOARDING_UNREAD_MESSAGES:
marked_unread += 1
else:
um.flags = UserMessage.flags.read
ums_to_create.append(um)
UserMessage.objects.bulk_create(reversed(ums_to_create))
# Does the processing for a new user account:
# * Subscribes to default/invitation streams
# * Fills in some recent historical messages
# * Notifies other users in realm and Zulip about the signup
# * Deactivates PreregistrationUser objects
def process_new_human_user(
user_profile: UserProfile,
prereg_user: Optional[PreregistrationUser] = None,
default_stream_groups: Sequence[DefaultStreamGroup] = [],
realm_creation: bool = False,
) -> None:
realm = user_profile.realm
mit_beta_user = realm.is_zephyr_mirror_realm
if prereg_user is not None:
streams: List[Stream] = list(prereg_user.streams.all())
acting_user: Optional[UserProfile] = prereg_user.referred_by
else:
streams = []
acting_user = None
# If the user's invitation didn't explicitly list some streams, we
# add the default streams
if len(streams) == 0:
streams = get_default_subs(user_profile)
for default_stream_group in default_stream_groups:
default_stream_group_streams = default_stream_group.streams.all()
for stream in default_stream_group_streams:
if stream not in streams:
streams.append(stream)
bulk_add_subscriptions(
realm,
streams,
[user_profile],
from_user_creation=True,
acting_user=acting_user,
)
add_new_user_history(user_profile, streams)
# mit_beta_users don't have a referred_by field
if (
not mit_beta_user
and prereg_user is not None
and prereg_user.referred_by is not None
and prereg_user.referred_by.is_active
):
# This is a cross-realm private message.
with override_language(prereg_user.referred_by.default_language):
internal_send_private_message(
get_system_bot(settings.NOTIFICATION_BOT, prereg_user.referred_by.realm_id),
prereg_user.referred_by,
_("{user} accepted your invitation to join Zulip!").format(
user=f"{user_profile.full_name} <`{user_profile.email}`>"
),
)
revoke_preregistration_users(user_profile, prereg_user, realm_creation)
if not realm_creation and prereg_user is not None and prereg_user.referred_by is not None:
notify_invites_changed(user_profile.realm)
notify_new_user(user_profile)
# Clear any scheduled invitation emails to prevent them
# from being sent after the user is created.
clear_scheduled_invitation_emails(user_profile.delivery_email)
if realm.send_welcome_emails:
enqueue_welcome_emails(user_profile, realm_creation)
# We have an import loop here; it's intentional, because we want
# to keep all the onboarding code in zerver/lib/onboarding.py.
from zerver.lib.onboarding import send_initial_pms
send_initial_pms(user_profile)
def revoke_preregistration_users(
created_user_profile: UserProfile,
used_preregistration_user: Optional[PreregistrationUser],
realm_creation: bool,
) -> None:
if used_preregistration_user is None:
assert not realm_creation, "realm_creation should only happen with a PreregistrationUser"
if used_preregistration_user is not None:
used_preregistration_user.status = confirmation_settings.STATUS_ACTIVE
used_preregistration_user.save(update_fields=["status"])
# In the special case of realm creation, there can be no additional PreregistrationUser
# for us to want to modify - because other realm_creation PreregistrationUsers should be
# left usable for creating different realms.
if realm_creation:
return
# Mark any other PreregistrationUsers in the realm that are STATUS_ACTIVE as
# inactive so we can keep track of the PreregistrationUser we
# actually used for analytics.
if used_preregistration_user is not None:
PreregistrationUser.objects.filter(
email__iexact=created_user_profile.delivery_email, realm=created_user_profile.realm
).exclude(id=used_preregistration_user.id).update(
status=confirmation_settings.STATUS_REVOKED
)
else:
PreregistrationUser.objects.filter(
email__iexact=created_user_profile.delivery_email, realm=created_user_profile.realm
).update(status=confirmation_settings.STATUS_REVOKED)
def notify_created_user(user_profile: UserProfile) -> None:
user_row = user_profile_to_user_row(user_profile)
person = format_user_row(
user_profile.realm,
user_profile,
user_row,
# Since we don't know what the client
# supports at this point in the code, we
# just assume client_gravatar and
# user_avatar_url_field_optional = False :(
client_gravatar=False,
user_avatar_url_field_optional=False,
# We assume there's no custom profile
# field data for a new user; initial
# values are expected to be added in a
# later event.
custom_profile_field_data={},
)
event: Dict[str, Any] = dict(type="realm_user", op="add", person=person)
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
def created_bot_event(user_profile: UserProfile) -> Dict[str, Any]:
def stream_name(stream: Optional[Stream]) -> Optional[str]:
if not stream:
return None
return stream.name
default_sending_stream_name = stream_name(user_profile.default_sending_stream)
default_events_register_stream_name = stream_name(user_profile.default_events_register_stream)
bot = dict(
email=user_profile.email,
user_id=user_profile.id,
full_name=user_profile.full_name,
bot_type=user_profile.bot_type,
is_active=user_profile.is_active,
api_key=get_api_key(user_profile),
default_sending_stream=default_sending_stream_name,
default_events_register_stream=default_events_register_stream_name,
default_all_public_streams=user_profile.default_all_public_streams,
avatar_url=avatar_url(user_profile),
services=get_service_dicts_for_bot(user_profile.id),
)
# Set the owner key only when the bot has an owner.
# The default bots don't have an owner. So don't
# set the owner key while reactivating them.
if user_profile.bot_owner is not None:
bot["owner_id"] = user_profile.bot_owner.id
return dict(type="realm_bot", op="add", bot=bot)
def notify_created_bot(user_profile: UserProfile) -> None:
event = created_bot_event(user_profile)
send_event(user_profile.realm, event, bot_owner_user_ids(user_profile))
def create_users(
realm: Realm, name_list: Iterable[Tuple[str, str]], bot_type: Optional[int] = None
) -> None:
user_set = set()
for full_name, email in name_list:
user_set.add((email, full_name, True))
bulk_create_users(realm, user_set, bot_type)
def do_create_user(
email: str,
password: Optional[str],
realm: Realm,
full_name: str,
bot_type: Optional[int] = None,
role: Optional[int] = None,
bot_owner: Optional[UserProfile] = None,
tos_version: Optional[str] = None,
timezone: str = "",
avatar_source: str = UserProfile.AVATAR_FROM_GRAVATAR,
default_sending_stream: Optional[Stream] = None,
default_events_register_stream: Optional[Stream] = None,
default_all_public_streams: Optional[bool] = None,
prereg_user: Optional[PreregistrationUser] = None,
default_stream_groups: Sequence[DefaultStreamGroup] = [],
source_profile: Optional[UserProfile] = None,
realm_creation: bool = False,
*,
acting_user: Optional[UserProfile],
enable_marketing_emails: bool = True,
) -> UserProfile:
with transaction.atomic():
user_profile = create_user(
email=email,
password=password,
realm=realm,
full_name=full_name,
role=role,
bot_type=bot_type,
bot_owner=bot_owner,
tos_version=tos_version,
timezone=timezone,
avatar_source=avatar_source,
default_sending_stream=default_sending_stream,
default_events_register_stream=default_events_register_stream,
default_all_public_streams=default_all_public_streams,
source_profile=source_profile,
enable_marketing_emails=enable_marketing_emails,
)
event_time = user_profile.date_joined
if not acting_user:
acting_user = user_profile
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_CREATED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
}
).decode(),
)
if realm_creation:
# If this user just created a realm, make sure they are
# properly tagged as the creator of the realm.
realm_creation_audit_log = (
RealmAuditLog.objects.filter(event_type=RealmAuditLog.REALM_CREATED, realm=realm)
.order_by("id")
.last()
)
assert realm_creation_audit_log is not None
realm_creation_audit_log.acting_user = user_profile
realm_creation_audit_log.save(update_fields=["acting_user"])
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
)
if settings.BILLING_ENABLED:
update_license_ledger_if_needed(user_profile.realm, event_time)
system_user_group = get_system_user_group_for_user(user_profile)
UserGroupMembership.objects.create(user_profile=user_profile, user_group=system_user_group)
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
full_members_system_group = UserGroup.objects.get(
name="@role:fullmembers", realm=user_profile.realm, is_system_group=True
)
UserGroupMembership.objects.create(
user_profile=user_profile, user_group=full_members_system_group
)
# Note that for bots, the caller will send an additional event
# with bot-specific info like services.
notify_created_user(user_profile)
do_send_user_group_members_update_event("add_members", system_user_group, [user_profile.id])
if user_profile.role == UserProfile.ROLE_MEMBER and not user_profile.is_provisional_member:
do_send_user_group_members_update_event(
"add_members", full_members_system_group, [user_profile.id]
)
if bot_type is None:
process_new_human_user(
user_profile,
prereg_user=prereg_user,
default_stream_groups=default_stream_groups,
realm_creation=realm_creation,
)
if realm_creation:
assert realm.signup_notifications_stream is not None
bulk_add_subscriptions(
realm, [realm.signup_notifications_stream], [user_profile], acting_user=None
)
from zerver.lib.onboarding import send_initial_realm_messages
send_initial_realm_messages(realm)
return user_profile
def do_activate_mirror_dummy_user(
user_profile: UserProfile, *, acting_user: Optional[UserProfile]
) -> None:
"""Called to have a user "take over" a "mirror dummy" user
(i.e. is_mirror_dummy=True) account when they sign up with the
same email address.
Essentially, the result should be as though we had created the
UserProfile just now with do_create_user, except that the mirror
dummy user may appear as the recipient or sender of messages from
before their account was fully created.
TODO: This function likely has bugs resulting from this being a
parallel code path to do_create_user; e.g. it likely does not
handle preferences or default streams properly.
"""
with transaction.atomic():
change_user_is_active(user_profile, True)
user_profile.is_mirror_dummy = False
user_profile.set_unusable_password()
user_profile.date_joined = timezone_now()
user_profile.tos_version = settings.TERMS_OF_SERVICE_VERSION
user_profile.save(
update_fields=["date_joined", "password", "is_mirror_dummy", "tos_version"]
)
event_time = user_profile.date_joined
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_ACTIVATED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
}
).decode(),
)
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
)
if settings.BILLING_ENABLED:
update_license_ledger_if_needed(user_profile.realm, event_time)
notify_created_user(user_profile)
def do_reactivate_user(user_profile: UserProfile, *, acting_user: Optional[UserProfile]) -> None:
"""Reactivate a user that had previously been deactivated"""
with transaction.atomic():
change_user_is_active(user_profile, True)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_REACTIVATED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
}
).decode(),
)
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
)
if settings.BILLING_ENABLED:
update_license_ledger_if_needed(user_profile.realm, event_time)
notify_created_user(user_profile)
if user_profile.is_bot:
notify_created_bot(user_profile)
subscribed_recipient_ids = Subscription.objects.filter(
user_profile_id=user_profile.id, active=True, recipient__type=Recipient.STREAM
).values_list("recipient__type_id", flat=True)
subscribed_streams = Stream.objects.filter(id__in=subscribed_recipient_ids, deactivated=False)
subscriber_peer_info = bulk_get_subscriber_peer_info(
realm=user_profile.realm,
streams=subscribed_streams,
)
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
for stream in subscribed_streams:
altered_user_dict[stream.id] = {user_profile.id}
stream_dict = {stream.id: stream for stream in subscribed_streams}
send_peer_subscriber_events(
op="peer_add",
realm=user_profile.realm,
altered_user_dict=altered_user_dict,
stream_dict=stream_dict,
private_peer_dict=subscriber_peer_info.private_peer_dict,
)
def active_humans_in_realm(realm: Realm) -> Sequence[UserProfile]:
return UserProfile.objects.filter(realm=realm, is_active=True, is_bot=False)
@transaction.atomic(savepoint=False)
def update_users_in_full_members_system_group(
realm: Realm, affected_user_ids: Sequence[int] = []
) -> None:
full_members_system_group = UserGroup.objects.get(
realm=realm, name="@role:fullmembers", is_system_group=True
)
members_system_group = UserGroup.objects.get(
realm=realm, name="@role:members", is_system_group=True
)
full_member_group_users: List[Dict[str, Union[int, datetime.datetime]]] = list()
member_group_users: List[Dict[str, Union[int, datetime.datetime]]] = list()
if affected_user_ids:
full_member_group_users = list(
full_members_system_group.direct_members.filter(id__in=affected_user_ids).values(
"id", "role", "date_joined"
)
)
member_group_users = list(
members_system_group.direct_members.filter(id__in=affected_user_ids).values(
"id", "role", "date_joined"
)
)
else:
full_member_group_users = list(
full_members_system_group.direct_members.all().values("id", "role", "date_joined")
)
member_group_users = list(
members_system_group.direct_members.all().values("id", "role", "date_joined")
)
def is_provisional_member(user: Dict[str, Union[int, datetime.datetime]]) -> bool:
diff = (timezone_now() - user["date_joined"]).days
if diff < realm.waiting_period_threshold:
return True
return False
old_full_members = [
user
for user in full_member_group_users
if is_provisional_member(user) or user["role"] != UserProfile.ROLE_MEMBER
]
full_member_group_user_ids = [user["id"] for user in full_member_group_users]
members_excluding_full_members = [
user for user in member_group_users if user["id"] not in full_member_group_user_ids
]
new_full_members = [
user for user in members_excluding_full_members if not is_provisional_member(user)
]
old_full_member_ids = [user["id"] for user in old_full_members]
new_full_member_ids = [user["id"] for user in new_full_members]
if len(old_full_members) > 0:
remove_members_from_user_group(full_members_system_group, old_full_member_ids)
if len(new_full_members) > 0:
bulk_add_members_to_user_group(full_members_system_group, new_full_member_ids)
def promote_new_full_members() -> None:
for realm in Realm.objects.filter(deactivated=False).exclude(waiting_period_threshold=0):
update_users_in_full_members_system_group(realm)
@transaction.atomic(savepoint=False)
def do_set_realm_property(
realm: Realm, name: str, value: Any, *, acting_user: Optional[UserProfile]
) -> None:
"""Takes in a realm object, the name of an attribute to update, the
value to update and and the user who initiated the update.
"""
property_type = Realm.property_types[name]
assert isinstance(
value, property_type
), f"Cannot update {name}: {value} is not an instance of {property_type}"
old_value = getattr(realm, name)
setattr(realm, name, value)
realm.save(update_fields=[name])
event = dict(
type="realm",
op="update",
property=name,
value=value,
)
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: value,
"property": name,
}
).decode(),
)
if name == "email_address_visibility":
if Realm.EMAIL_ADDRESS_VISIBILITY_EVERYONE not in [old_value, value]:
# We use real email addresses on UserProfile.email only if
# EMAIL_ADDRESS_VISIBILITY_EVERYONE is configured, so
# changes between values that will not require changing
# that field, so we can save work and return here.
return
user_profiles = UserProfile.objects.filter(realm=realm, is_bot=False)
for user_profile in user_profiles:
user_profile.email = get_display_email_address(user_profile)
UserProfile.objects.bulk_update(user_profiles, ["email"])
for user_profile in user_profiles:
transaction.on_commit(
lambda: flush_user_profile(sender=UserProfile, instance=user_profile)
)
# TODO: Design a bulk event for this or force-reload all clients
send_user_email_update_event(user_profile)
if name == "waiting_period_threshold":
update_users_in_full_members_system_group(realm)
def do_set_realm_authentication_methods(
realm: Realm, authentication_methods: Dict[str, bool], *, acting_user: Optional[UserProfile]
) -> None:
old_value = realm.authentication_methods_dict()
with transaction.atomic():
for key, value in list(authentication_methods.items()):
index = getattr(realm.authentication_methods, key).number
realm.authentication_methods.set_bit(index, int(value))
realm.save(update_fields=["authentication_methods"])
updated_value = realm.authentication_methods_dict()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=timezone_now(),
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: updated_value,
"property": "authentication_methods",
}
).decode(),
)
event = dict(
type="realm",
op="update_dict",
property="default",
data=dict(authentication_methods=updated_value),
)
send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_message_editing(
realm: Realm,
allow_message_editing: bool,
message_content_edit_limit_seconds: int,
edit_topic_policy: int,
*,
acting_user: Optional[UserProfile],
) -> None:
old_values = dict(
allow_message_editing=realm.allow_message_editing,
message_content_edit_limit_seconds=realm.message_content_edit_limit_seconds,
edit_topic_policy=realm.edit_topic_policy,
)
realm.allow_message_editing = allow_message_editing
realm.message_content_edit_limit_seconds = message_content_edit_limit_seconds
realm.edit_topic_policy = edit_topic_policy
event_time = timezone_now()
updated_properties = dict(
allow_message_editing=allow_message_editing,
message_content_edit_limit_seconds=message_content_edit_limit_seconds,
edit_topic_policy=edit_topic_policy,
)
with transaction.atomic():
for updated_property, updated_value in updated_properties.items():
if updated_value == old_values[updated_property]:
continue
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_values[updated_property],
RealmAuditLog.NEW_VALUE: updated_value,
"property": updated_property,
}
).decode(),
)
realm.save(update_fields=list(updated_properties.keys()))
event = dict(
type="realm",
op="update_dict",
property="default",
data=updated_properties,
)
send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_notifications_stream(
realm: Realm, stream: Optional[Stream], stream_id: int, *, acting_user: Optional[UserProfile]
) -> None:
old_value = realm.notifications_stream_id
realm.notifications_stream = stream
with transaction.atomic():
realm.save(update_fields=["notifications_stream"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: stream_id,
"property": "notifications_stream",
}
).decode(),
)
event = dict(
type="realm",
op="update",
property="notifications_stream_id",
value=stream_id,
)
send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_signup_notifications_stream(
realm: Realm, stream: Optional[Stream], stream_id: int, *, acting_user: Optional[UserProfile]
) -> None:
old_value = realm.signup_notifications_stream_id
realm.signup_notifications_stream = stream
with transaction.atomic():
realm.save(update_fields=["signup_notifications_stream"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_PROPERTY_CHANGED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: stream_id,
"property": "signup_notifications_stream",
}
).decode(),
)
event = dict(
type="realm",
op="update",
property="signup_notifications_stream_id",
value=stream_id,
)
send_event(realm, event, active_user_ids(realm.id))
def do_set_realm_user_default_setting(
realm_user_default: RealmUserDefault,
name: str,
value: Any,
*,
acting_user: Optional[UserProfile],
) -> None:
old_value = getattr(realm_user_default, name)
realm = realm_user_default.realm
event_time = timezone_now()
with transaction.atomic(savepoint=False):
setattr(realm_user_default, name, value)
realm_user_default.save(update_fields=[name])
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_DEFAULT_USER_SETTINGS_CHANGED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: value,
"property": name,
}
).decode(),
)
event = dict(
type="realm_user_settings_defaults",
op="update",
property=name,
value=value,
)
send_event(realm, event, active_user_ids(realm.id))
def do_deactivate_realm(realm: Realm, *, acting_user: Optional[UserProfile]) -> None:
"""
Deactivate this realm. Do NOT deactivate the users -- we need to be able to
tell the difference between users that were intentionally deactivated,
e.g. by a realm admin, and users who can't currently use Zulip because their
realm has been deactivated.
"""
if realm.deactivated:
return
realm.deactivated = True
realm.save(update_fields=["deactivated"])
if settings.BILLING_ENABLED:
downgrade_now_without_creating_additional_invoices(realm)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_DEACTIVATED,
event_time=event_time,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
}
).decode(),
)
ScheduledEmail.objects.filter(realm=realm).delete()
for user in active_humans_in_realm(realm):
# Don't deactivate the users, but do delete their sessions so they get
# bumped to the login screen, where they'll get a realm deactivation
# notice when they try to log in.
delete_user_sessions(user)
# This event will only ever be received by clients with an active
# longpoll connection, because by this point clients will be
# unable to authenticate again to their event queue (triggering an
# immediate reload into the page explaining the realm was
# deactivated). So the purpose of sending this is to flush all
# active longpoll connections for the realm.
event = dict(type="realm", op="deactivated", realm_id=realm.id)
send_event(realm, event, active_user_ids(realm.id))
def do_reactivate_realm(realm: Realm) -> None:
realm.deactivated = False
with transaction.atomic():
realm.save(update_fields=["deactivated"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_REACTIVATED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(realm),
}
).decode(),
)
def do_change_realm_subdomain(
realm: Realm, new_subdomain: str, *, acting_user: Optional[UserProfile]
) -> None:
"""Changing a realm's subdomain is a highly disruptive operation,
because all existing clients will need to be updated to point to
the new URL. Further, requests to fetch data from existing event
queues will fail with an authentication error when this change
happens (because the old subdomain is no longer associated with
the realm), making it hard for us to provide a graceful update
experience for clients.
"""
old_subdomain = realm.subdomain
old_uri = realm.uri
# If the realm had been a demo organization scheduled for
# deleting, clear that state.
realm.demo_organization_scheduled_deletion_date = None
realm.string_id = new_subdomain
with transaction.atomic():
realm.save(update_fields=["string_id", "demo_organization_scheduled_deletion_date"])
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_SUBDOMAIN_CHANGED,
event_time=timezone_now(),
acting_user=acting_user,
extra_data={"old_subdomain": old_subdomain, "new_subdomain": new_subdomain},
)
# If a realm if being renamed multiple times, we should find all the placeholder
# realms and reset their deactivated_redirect field to point to the new realm uri
placeholder_realms = Realm.objects.filter(deactivated_redirect=old_uri, deactivated=True)
for placeholder_realm in placeholder_realms:
do_add_deactivated_redirect(placeholder_realm, realm.uri)
# The below block isn't executed in a transaction with the earlier code due to
# the functions called below being complex and potentially sending events,
# which we don't want to do in atomic blocks.
# When we change a realm's subdomain the realm with old subdomain is basically
# deactivated. We are creating a deactivated realm using old subdomain and setting
# it's deactivated redirect to new_subdomain so that we can tell the users that
# the realm has been moved to a new subdomain.
placeholder_realm = do_create_realm(old_subdomain, realm.name)
do_deactivate_realm(placeholder_realm, acting_user=None)
do_add_deactivated_redirect(placeholder_realm, realm.uri)
def do_add_deactivated_redirect(realm: Realm, redirect_url: str) -> None:
realm.deactivated_redirect = redirect_url
realm.save(update_fields=["deactivated_redirect"])
def do_scrub_realm(realm: Realm, *, acting_user: Optional[UserProfile]) -> None:
if settings.BILLING_ENABLED:
downgrade_now_without_creating_additional_invoices(realm)
users = UserProfile.objects.filter(realm=realm)
for user in users:
do_delete_messages_by_sender(user)
do_delete_avatar_image(user, acting_user=acting_user)
user.full_name = f"Scrubbed {generate_key()[:15]}"
scrubbed_email = f"scrubbed-{generate_key()[:15]}@{realm.host}"
user.email = scrubbed_email
user.delivery_email = scrubbed_email
user.save(update_fields=["full_name", "email", "delivery_email"])
do_remove_realm_custom_profile_fields(realm)
Attachment.objects.filter(realm=realm).delete()
RealmAuditLog.objects.create(
realm=realm,
event_time=timezone_now(),
acting_user=acting_user,
event_type=RealmAuditLog.REALM_SCRUBBED,
)
def do_delete_user(user_profile: UserProfile) -> None:
if user_profile.realm.is_zephyr_mirror_realm:
raise AssertionError("Deleting zephyr mirror users is not supported")
do_deactivate_user(user_profile, acting_user=None)
subscribed_huddle_recipient_ids = set(
Subscription.objects.filter(
user_profile=user_profile, recipient__type=Recipient.HUDDLE
).values_list("recipient_id", flat=True)
)
user_id = user_profile.id
realm = user_profile.realm
date_joined = user_profile.date_joined
personal_recipient = user_profile.recipient
with transaction.atomic():
user_profile.delete()
# Recipient objects don't get deleted through CASCADE, so we need to handle
# the user's personal recipient manually. This will also delete all Messages pointing
# to this recipient (all private messages sent to the user).
assert personal_recipient is not None
personal_recipient.delete()
replacement_user = create_user(
force_id=user_id,
email=f"deleteduser{user_id}@{get_fake_email_domain(realm)}",
password=None,
realm=realm,
full_name=f"Deleted User {user_id}",
active=False,
is_mirror_dummy=True,
force_date_joined=date_joined,
)
subs_to_recreate = [
Subscription(
user_profile=replacement_user,
recipient=recipient,
is_user_active=replacement_user.is_active,
)
for recipient in Recipient.objects.filter(id__in=subscribed_huddle_recipient_ids)
]
Subscription.objects.bulk_create(subs_to_recreate)
RealmAuditLog.objects.create(
realm=replacement_user.realm,
modified_user=replacement_user,
acting_user=None,
event_type=RealmAuditLog.USER_DELETED,
event_time=timezone_now(),
)
def change_user_is_active(user_profile: UserProfile, value: bool) -> None:
"""
Helper function for changing the .is_active field. Not meant as a standalone function
in production code as properly activating/deactivating users requires more steps.
This changes the is_active value and saves it, while ensuring
Subscription.is_user_active values are updated in the same db transaction.
"""
with transaction.atomic(savepoint=False):
user_profile.is_active = value
user_profile.save(update_fields=["is_active"])
Subscription.objects.filter(user_profile=user_profile).update(is_user_active=value)
def get_active_bots_owned_by_user(user_profile: UserProfile) -> QuerySet:
return UserProfile.objects.filter(is_bot=True, is_active=True, bot_owner=user_profile)
def do_deactivate_user(
user_profile: UserProfile, _cascade: bool = True, *, acting_user: Optional[UserProfile]
) -> None:
if not user_profile.is_active:
return
if _cascade:
# We need to deactivate bots before the target user, to ensure
# that a failure partway through this function cannot result
# in only the user being deactivated.
bot_profiles = get_active_bots_owned_by_user(user_profile)
for profile in bot_profiles:
do_deactivate_user(profile, _cascade=False, acting_user=acting_user)
with transaction.atomic():
if user_profile.realm.is_zephyr_mirror_realm: # nocoverage
# For zephyr mirror users, we need to make them a mirror dummy
# again; otherwise, other users won't get the correct behavior
# when trying to send messages to this person inside Zulip.
#
# Ideally, we need to also ensure their zephyr mirroring bot
# isn't running, but that's a separate issue.
user_profile.is_mirror_dummy = True
user_profile.save(update_fields=["is_mirror_dummy"])
change_user_is_active(user_profile, False)
clear_scheduled_emails(user_profile.id)
revoke_invites_generated_by_user(user_profile)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_DEACTIVATED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
}
).decode(),
)
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["active_users_log:is_bot:day"],
user_profile.is_bot,
event_time,
increment=-1,
)
if settings.BILLING_ENABLED:
update_license_ledger_if_needed(user_profile.realm, event_time)
delete_user_sessions(user_profile)
event = dict(
type="realm_user",
op="remove",
person=dict(user_id=user_profile.id, full_name=user_profile.full_name),
)
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
if user_profile.is_bot:
event = dict(
type="realm_bot",
op="remove",
bot=dict(user_id=user_profile.id, full_name=user_profile.full_name),
)
send_event(user_profile.realm, event, bot_owner_user_ids(user_profile))
@transaction.atomic(savepoint=False)
def do_deactivate_stream(
stream: Stream, log: bool = True, *, acting_user: Optional[UserProfile]
) -> None:
# We want to mark all messages in the to-be-deactivated stream as
# read for all users; otherwise they will pollute queries like
# "Get the user's first unread message". Since this can be an
# expensive operation, we do it via the deferred_work queue
# processor.
deferred_work_event = {
"type": "mark_stream_messages_as_read_for_everyone",
"stream_recipient_id": stream.recipient_id,
}
transaction.on_commit(lambda: queue_json_publish("deferred_work", deferred_work_event))
# Get the affected user ids *before* we deactivate everybody.
affected_user_ids = can_access_stream_user_ids(stream)
get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=True).update(
active=False
)
was_invite_only = stream.invite_only
stream.deactivated = True
stream.invite_only = True
# Preserve as much as possible the original stream name while giving it a
# special prefix that both indicates that the stream is deactivated and
# frees up the original name for reuse.
old_name = stream.name
# Prepend a substring of the hashed stream ID to the new stream name
streamID = str(stream.id)
stream_id_hash_object = hashlib.sha512(streamID.encode())
hashed_stream_id = stream_id_hash_object.hexdigest()[0:7]
new_name = (hashed_stream_id + "!DEACTIVATED:" + old_name)[: Stream.MAX_NAME_LENGTH]
stream.name = new_name[: Stream.MAX_NAME_LENGTH]
stream.save(update_fields=["name", "deactivated", "invite_only"])
# If this is a default stream, remove it, properly sending a
# notification to browser clients.
if DefaultStream.objects.filter(realm_id=stream.realm_id, stream_id=stream.id).exists():
do_remove_default_stream(stream)
default_stream_groups_for_stream = DefaultStreamGroup.objects.filter(streams__id=stream.id)
for group in default_stream_groups_for_stream:
do_remove_streams_from_default_stream_group(stream.realm, group, [stream])
# Remove the old stream information from remote cache.
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
cache_delete(old_cache_key)
stream_dict = stream.to_dict()
stream_dict.update(dict(name=old_name, invite_only=was_invite_only))
event = dict(type="stream", op="delete", streams=[stream_dict])
transaction.on_commit(lambda: send_event(stream.realm, event, affected_user_ids))
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_DEACTIVATED,
event_time=event_time,
)
def send_user_email_update_event(user_profile: UserProfile) -> None:
payload = dict(user_id=user_profile.id, new_email=user_profile.email)
event = dict(type="realm_user", op="update", person=payload)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
event,
active_user_ids(user_profile.realm_id),
)
)
@transaction.atomic(savepoint=False)
def do_change_user_delivery_email(user_profile: UserProfile, new_email: str) -> None:
delete_user_profile_caches([user_profile])
user_profile.delivery_email = new_email
if user_profile.email_address_is_realm_public():
user_profile.email = new_email
user_profile.save(update_fields=["email", "delivery_email"])
else:
user_profile.save(update_fields=["delivery_email"])
# We notify just the target user (and eventually org admins, only
# when email_address_visibility=EMAIL_ADDRESS_VISIBILITY_ADMINS)
# about their new delivery email, since that field is private.
payload = dict(user_id=user_profile.id, delivery_email=new_email)
event = dict(type="realm_user", op="update", person=payload)
transaction.on_commit(lambda: send_event(user_profile.realm, event, [user_profile.id]))
if user_profile.avatar_source == UserProfile.AVATAR_FROM_GRAVATAR:
# If the user is using Gravatar to manage their email address,
# their Gravatar just changed, and we need to notify other
# clients.
notify_avatar_url_change(user_profile)
if user_profile.email_address_is_realm_public():
# Additionally, if we're also changing the publicly visible
# email, we send a new_email event as well.
send_user_email_update_event(user_profile)
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_EMAIL_CHANGED,
event_time=event_time,
)
def do_start_email_change_process(user_profile: UserProfile, new_email: str) -> None:
old_email = user_profile.delivery_email
obj = EmailChangeStatus.objects.create(
new_email=new_email,
old_email=old_email,
user_profile=user_profile,
realm=user_profile.realm,
)
activation_url = create_confirmation_link(obj, Confirmation.EMAIL_CHANGE)
from zerver.context_processors import common_context
context = common_context(user_profile)
context.update(
old_email=old_email,
new_email=new_email,
activate_url=activation_url,
)
language = user_profile.default_language
send_email(
"zerver/emails/confirm_new_email",
to_emails=[new_email],
from_name=FromAddress.security_email_from_name(language=language),
from_address=FromAddress.tokenized_no_reply_address(),
language=language,
context=context,
realm=user_profile.realm,
)
def compute_irc_user_fullname(email: str) -> str:
return email.split("@")[0] + " (IRC)"
def compute_jabber_user_fullname(email: str) -> str:
return email.split("@")[0] + " (XMPP)"
@cache_with_key(
lambda realm, email, f: user_profile_delivery_email_cache_key(email, realm),
timeout=3600 * 24 * 7,
)
def create_mirror_user_if_needed(
realm: Realm, email: str, email_to_fullname: Callable[[str], str]
) -> UserProfile:
try:
return get_user_by_delivery_email(email, realm)
except UserProfile.DoesNotExist:
try:
# Forge a user for this person
return create_user(
email=email,
password=None,
realm=realm,
full_name=email_to_fullname(email),
active=False,
is_mirror_dummy=True,
)
except IntegrityError:
return get_user_by_delivery_email(email, realm)
def render_incoming_message(
message: Message,
content: str,
user_ids: Set[int],
realm: Realm,
mention_data: Optional[MentionData] = None,
email_gateway: bool = False,
) -> MessageRenderingResult:
realm_alert_words_automaton = get_alert_word_automaton(realm)
try:
rendering_result = render_markdown(
message=message,
content=content,
realm=realm,
realm_alert_words_automaton=realm_alert_words_automaton,
mention_data=mention_data,
email_gateway=email_gateway,
)
except MarkdownRenderingException:
raise JsonableError(_("Unable to render message"))
return rendering_result
class RecipientInfoResult(TypedDict):
active_user_ids: Set[int]
online_push_user_ids: Set[int]
pm_mention_email_disabled_user_ids: Set[int]
pm_mention_push_disabled_user_ids: Set[int]
stream_email_user_ids: Set[int]
stream_push_user_ids: Set[int]
wildcard_mention_user_ids: Set[int]
muted_sender_user_ids: Set[int]
um_eligible_user_ids: Set[int]
long_term_idle_user_ids: Set[int]
default_bot_user_ids: Set[int]
service_bot_tuples: List[Tuple[int, int]]
all_bot_user_ids: Set[int]
def get_recipient_info(
*,
realm_id: int,
recipient: Recipient,
sender_id: int,
stream_topic: Optional[StreamTopicTarget],
possibly_mentioned_user_ids: AbstractSet[int] = set(),
possible_wildcard_mention: bool = True,
) -> RecipientInfoResult:
stream_push_user_ids: Set[int] = set()
stream_email_user_ids: Set[int] = set()
wildcard_mention_user_ids: Set[int] = set()
muted_sender_user_ids: Set[int] = get_muting_users(sender_id)
if recipient.type == Recipient.PERSONAL:
# The sender and recipient may be the same id, so
# de-duplicate using a set.
message_to_user_ids = list({recipient.type_id, sender_id})
assert len(message_to_user_ids) in [1, 2]
elif recipient.type == Recipient.STREAM:
# Anybody calling us w/r/t a stream message needs to supply
# stream_topic. We may eventually want to have different versions
# of this function for different message types.
assert stream_topic is not None
user_ids_muting_topic = stream_topic.user_ids_muting_topic()
subscription_rows = (
get_subscriptions_for_send_message(
realm_id=realm_id,
stream_id=stream_topic.stream_id,
possible_wildcard_mention=possible_wildcard_mention,
possibly_mentioned_user_ids=possibly_mentioned_user_ids,
)
.annotate(
user_profile_email_notifications=F(
"user_profile__enable_stream_email_notifications"
),
user_profile_push_notifications=F("user_profile__enable_stream_push_notifications"),
user_profile_wildcard_mentions_notify=F("user_profile__wildcard_mentions_notify"),
)
.values(
"user_profile_id",
"push_notifications",
"email_notifications",
"wildcard_mentions_notify",
"user_profile_email_notifications",
"user_profile_push_notifications",
"user_profile_wildcard_mentions_notify",
"is_muted",
)
.order_by("user_profile_id")
)
message_to_user_ids = [row["user_profile_id"] for row in subscription_rows]
def should_send(setting: str, row: Dict[str, Any]) -> bool:
# This implements the structure that the UserProfile stream notification settings
# are defaults, which can be overridden by the stream-level settings (if those
# values are not null).
if row["is_muted"]:
return False
if row["user_profile_id"] in user_ids_muting_topic:
return False
if row[setting] is not None:
return row[setting]
return row["user_profile_" + setting]
stream_push_user_ids = {
row["user_profile_id"]
for row in subscription_rows
# Note: muting a stream overrides stream_push_notify
if should_send("push_notifications", row)
}
stream_email_user_ids = {
row["user_profile_id"]
for row in subscription_rows
# Note: muting a stream overrides stream_email_notify
if should_send("email_notifications", row)
}
if possible_wildcard_mention:
# If there's a possible wildcard mention, we need to
# determine the set of users who have enabled the
# "wildcard_mentions_notify" setting (that is, the set of
# users for whom wildcard mentions should be treated like
# personal mentions for notifications). This setting
# applies to both email and push notifications.
wildcard_mention_user_ids = {
row["user_profile_id"]
for row in subscription_rows
if should_send("wildcard_mentions_notify", row)
}
elif recipient.type == Recipient.HUDDLE:
message_to_user_ids = get_huddle_user_ids(recipient)
else:
raise ValueError("Bad recipient type")
message_to_user_id_set = set(message_to_user_ids)
user_ids = set(message_to_user_id_set)
# Important note: Because we haven't rendered Markdown yet, we
# don't yet know which of these possibly-mentioned users was
# actually mentioned in the message (in other words, the
# mention syntax might have been in a code block or otherwise
# escaped). `get_ids_for` will filter these extra user rows
# for our data structures not related to bots
user_ids |= possibly_mentioned_user_ids
if user_ids:
query = UserProfile.objects.filter(is_active=True).values(
"id",
"enable_online_push_notifications",
"enable_offline_email_notifications",
"enable_offline_push_notifications",
"is_bot",
"bot_type",
"long_term_idle",
)
# query_for_ids is fast highly optimized for large queries, and we
# need this codepath to be fast (it's part of sending messages)
query = query_for_ids(
query=query,
user_ids=sorted(user_ids),
field="id",
)
rows = list(query)
else:
# TODO: We should always have at least one user_id as a recipient
# of any message we send. Right now the exception to this
# rule is `notify_new_user`, which, at least in a possibly
# contrived test scenario, can attempt to send messages
# to an inactive bot. When we plug that hole, we can avoid
# this `else` clause and just `assert(user_ids)`.
#
# UPDATE: It's February 2020 (and a couple years after the above
# comment was written). We have simplified notify_new_user
# so that it should be a little easier to reason about.
# There is currently some cleanup to how we handle cross
# realm bots that is still under development. Once that
# effort is complete, we should be able to address this
# to-do.
rows = []
def get_ids_for(f: Callable[[Dict[str, Any]], bool]) -> Set[int]:
"""Only includes users on the explicit message to line"""
return {row["id"] for row in rows if f(row)} & message_to_user_id_set
def is_service_bot(row: Dict[str, Any]) -> bool:
return row["is_bot"] and (row["bot_type"] in UserProfile.SERVICE_BOT_TYPES)
active_user_ids = get_ids_for(lambda r: True)
online_push_user_ids = get_ids_for(
lambda r: r["enable_online_push_notifications"],
)
# We deal with only the users who have disabled this setting, since that
# will usually be much smaller a set than those who have enabled it (which
# is the default)
pm_mention_email_disabled_user_ids = get_ids_for(
lambda r: not r["enable_offline_email_notifications"]
)
pm_mention_push_disabled_user_ids = get_ids_for(
lambda r: not r["enable_offline_push_notifications"]
)
# Service bots don't get UserMessage rows.
um_eligible_user_ids = get_ids_for(
lambda r: not is_service_bot(r),
)
long_term_idle_user_ids = get_ids_for(
lambda r: r["long_term_idle"],
)
# These three bot data structures need to filter from the full set
# of users who either are receiving the message or might have been
# mentioned in it, and so can't use get_ids_for.
#
# Further in the do_send_messages code path, once
# `mentioned_user_ids` has been computed via Markdown, we'll filter
# these data structures for just those users who are either a
# direct recipient or were mentioned; for now, we're just making
# sure we have the data we need for that without extra database
# queries.
default_bot_user_ids = {
row["id"] for row in rows if row["is_bot"] and row["bot_type"] == UserProfile.DEFAULT_BOT
}
service_bot_tuples = [(row["id"], row["bot_type"]) for row in rows if is_service_bot(row)]
# We also need the user IDs of all bots, to avoid trying to send push/email
# notifications to them. This set will be directly sent to the event queue code
# where we determine notifiability of the message for users.
all_bot_user_ids = {row["id"] for row in rows if row["is_bot"]}
info: RecipientInfoResult = dict(
active_user_ids=active_user_ids,
online_push_user_ids=online_push_user_ids,
pm_mention_email_disabled_user_ids=pm_mention_email_disabled_user_ids,
pm_mention_push_disabled_user_ids=pm_mention_push_disabled_user_ids,
stream_push_user_ids=stream_push_user_ids,
stream_email_user_ids=stream_email_user_ids,
wildcard_mention_user_ids=wildcard_mention_user_ids,
muted_sender_user_ids=muted_sender_user_ids,
um_eligible_user_ids=um_eligible_user_ids,
long_term_idle_user_ids=long_term_idle_user_ids,
default_bot_user_ids=default_bot_user_ids,
service_bot_tuples=service_bot_tuples,
all_bot_user_ids=all_bot_user_ids,
)
return info
def get_service_bot_events(
sender: UserProfile,
service_bot_tuples: List[Tuple[int, int]],
mentioned_user_ids: Set[int],
active_user_ids: Set[int],
recipient_type: int,
) -> Dict[str, List[Dict[str, Any]]]:
event_dict: Dict[str, List[Dict[str, Any]]] = defaultdict(list)
# Avoid infinite loops by preventing messages sent by bots from generating
# Service events.
if sender.is_bot:
return event_dict
def maybe_add_event(user_profile_id: int, bot_type: int) -> None:
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
queue_name = "outgoing_webhooks"
elif bot_type == UserProfile.EMBEDDED_BOT:
queue_name = "embedded_bots"
else:
logging.error(
"Unexpected bot_type for Service bot id=%s: %s",
user_profile_id,
bot_type,
)
return
is_stream = recipient_type == Recipient.STREAM
# Important note: service_bot_tuples may contain service bots
# who were not actually mentioned in the message (e.g. if
# mention syntax for that bot appeared in a code block).
# Thus, it is important to filter any users who aren't part of
# either mentioned_user_ids (the actual mentioned users) or
# active_user_ids (the actual recipients).
#
# So even though this is implied by the logic below, we filter
# these not-actually-mentioned users here, to help keep this
# function future-proof.
if user_profile_id not in mentioned_user_ids and user_profile_id not in active_user_ids:
return
# Mention triggers, for stream messages
if is_stream and user_profile_id in mentioned_user_ids:
trigger = "mention"
# PM triggers for personal and huddle messages
elif (not is_stream) and (user_profile_id in active_user_ids):
trigger = "private_message"
else:
return
event_dict[queue_name].append(
{
"trigger": trigger,
"user_profile_id": user_profile_id,
}
)
for user_profile_id, bot_type in service_bot_tuples:
maybe_add_event(
user_profile_id=user_profile_id,
bot_type=bot_type,
)
return event_dict
def do_schedule_messages(send_message_requests: Sequence[SendMessageRequest]) -> List[int]:
scheduled_messages: List[ScheduledMessage] = []
for send_request in send_message_requests:
scheduled_message = ScheduledMessage()
scheduled_message.sender = send_request.message.sender
scheduled_message.recipient = send_request.message.recipient
topic_name = send_request.message.topic_name()
scheduled_message.set_topic_name(topic_name=topic_name)
scheduled_message.content = send_request.message.content
scheduled_message.sending_client = send_request.message.sending_client
scheduled_message.stream = send_request.stream
scheduled_message.realm = send_request.realm
assert send_request.deliver_at is not None
scheduled_message.scheduled_timestamp = send_request.deliver_at
if send_request.delivery_type == "send_later":
scheduled_message.delivery_type = ScheduledMessage.SEND_LATER
elif send_request.delivery_type == "remind":
scheduled_message.delivery_type = ScheduledMessage.REMIND
scheduled_messages.append(scheduled_message)
ScheduledMessage.objects.bulk_create(scheduled_messages)
return [scheduled_message.id for scheduled_message in scheduled_messages]
def build_message_send_dict(
message: Message,
stream: Optional[Stream] = None,
local_id: Optional[str] = None,
sender_queue_id: Optional[str] = None,
realm: Optional[Realm] = None,
widget_content_dict: Optional[Dict[str, Any]] = None,
email_gateway: bool = False,
mention_backend: Optional[MentionBackend] = None,
limit_unread_user_ids: Optional[Set[int]] = None,
) -> SendMessageRequest:
"""Returns a dictionary that can be passed into do_send_messages. In
production, this is always called by check_message, but some
testing code paths call it directly.
"""
if realm is None:
realm = message.sender.realm
if mention_backend is None:
mention_backend = MentionBackend(realm.id)
mention_data = MentionData(
mention_backend=mention_backend,
content=message.content,
)
if message.is_stream_message():
stream_id = message.recipient.type_id
stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget(
stream_id=stream_id,
topic_name=message.topic_name(),
)
else:
stream_topic = None
info = get_recipient_info(
realm_id=realm.id,
recipient=message.recipient,
sender_id=message.sender_id,
stream_topic=stream_topic,
possibly_mentioned_user_ids=mention_data.get_user_ids(),
possible_wildcard_mention=mention_data.message_has_wildcards(),
)
# Render our message_dicts.
assert message.rendered_content is None
rendering_result = render_incoming_message(
message,
message.content,
info["active_user_ids"],
realm,
mention_data=mention_data,
email_gateway=email_gateway,
)
message.rendered_content = rendering_result.rendered_content
message.rendered_content_version = markdown_version
links_for_embed = rendering_result.links_for_preview
mentioned_user_groups_map = get_user_group_mentions_data(
mentioned_user_ids=rendering_result.mentions_user_ids,
mentioned_user_group_ids=list(rendering_result.mentions_user_group_ids),
mention_data=mention_data,
)
# For single user as well as user group mentions, we set the `mentioned`
# flag on `UserMessage`
for group_id in rendering_result.mentions_user_group_ids:
members = mention_data.get_group_members(group_id)
rendering_result.mentions_user_ids.update(members)
# Only send data to Tornado about wildcard mentions if message
# rendering determined the message had an actual wildcard
# mention in it (and not e.g. wildcard mention syntax inside a
# code block).
if rendering_result.mentions_wildcard:
wildcard_mention_user_ids = info["wildcard_mention_user_ids"]
else:
wildcard_mention_user_ids = set()
"""
Once we have the actual list of mentioned ids from message
rendering, we can patch in "default bots" (aka normal bots)
who were directly mentioned in this message as eligible to
get UserMessage rows.
"""
mentioned_user_ids = rendering_result.mentions_user_ids
default_bot_user_ids = info["default_bot_user_ids"]
mentioned_bot_user_ids = default_bot_user_ids & mentioned_user_ids
info["um_eligible_user_ids"] |= mentioned_bot_user_ids
message_send_dict = SendMessageRequest(
stream=stream,
local_id=local_id,
sender_queue_id=sender_queue_id,
realm=realm,
mention_data=mention_data,
mentioned_user_groups_map=mentioned_user_groups_map,
message=message,
rendering_result=rendering_result,
active_user_ids=info["active_user_ids"],
online_push_user_ids=info["online_push_user_ids"],
pm_mention_email_disabled_user_ids=info["pm_mention_email_disabled_user_ids"],
pm_mention_push_disabled_user_ids=info["pm_mention_push_disabled_user_ids"],
stream_push_user_ids=info["stream_push_user_ids"],
stream_email_user_ids=info["stream_email_user_ids"],
muted_sender_user_ids=info["muted_sender_user_ids"],
um_eligible_user_ids=info["um_eligible_user_ids"],
long_term_idle_user_ids=info["long_term_idle_user_ids"],
default_bot_user_ids=info["default_bot_user_ids"],
service_bot_tuples=info["service_bot_tuples"],
all_bot_user_ids=info["all_bot_user_ids"],
wildcard_mention_user_ids=wildcard_mention_user_ids,
links_for_embed=links_for_embed,
widget_content=widget_content_dict,
limit_unread_user_ids=limit_unread_user_ids,
)
return message_send_dict
def do_send_messages(
send_message_requests_maybe_none: Sequence[Optional[SendMessageRequest]],
email_gateway: bool = False,
mark_as_read: Sequence[int] = [],
) -> List[int]:
"""See
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
for high-level documentation on this subsystem.
"""
# Filter out messages which didn't pass internal_prep_message properly
send_message_requests = [
send_request
for send_request in send_message_requests_maybe_none
if send_request is not None
]
# Save the message receipts in the database
user_message_flags: Dict[int, Dict[int, List[str]]] = defaultdict(dict)
with transaction.atomic():
Message.objects.bulk_create(send_request.message for send_request in send_message_requests)
# Claim attachments in message
for send_request in send_message_requests:
if do_claim_attachments(
send_request.message, send_request.rendering_result.potential_attachment_path_ids
):
send_request.message.has_attachment = True
send_request.message.save(update_fields=["has_attachment"])
ums: List[UserMessageLite] = []
for send_request in send_message_requests:
# Service bots (outgoing webhook bots and embedded bots) don't store UserMessage rows;
# they will be processed later.
mentioned_user_ids = send_request.rendering_result.mentions_user_ids
# Extend the set with users who have muted the sender.
mark_as_read_user_ids = send_request.muted_sender_user_ids
mark_as_read_user_ids.update(mark_as_read)
user_messages = create_user_messages(
message=send_request.message,
rendering_result=send_request.rendering_result,
um_eligible_user_ids=send_request.um_eligible_user_ids,
long_term_idle_user_ids=send_request.long_term_idle_user_ids,
stream_push_user_ids=send_request.stream_push_user_ids,
stream_email_user_ids=send_request.stream_email_user_ids,
mentioned_user_ids=mentioned_user_ids,
mark_as_read_user_ids=mark_as_read_user_ids,
limit_unread_user_ids=send_request.limit_unread_user_ids,
)
for um in user_messages:
user_message_flags[send_request.message.id][um.user_profile_id] = um.flags_list()
ums.extend(user_messages)
send_request.message.service_queue_events = get_service_bot_events(
sender=send_request.message.sender,
service_bot_tuples=send_request.service_bot_tuples,
mentioned_user_ids=mentioned_user_ids,
active_user_ids=send_request.active_user_ids,
recipient_type=send_request.message.recipient.type,
)
bulk_insert_ums(ums)
for send_request in send_message_requests:
do_widget_post_save_actions(send_request)
# This next loop is responsible for notifying other parts of the
# Zulip system about the messages we just committed to the database:
# * Notifying clients via send_event
# * Triggering outgoing webhooks via the service event queue.
# * Updating the `first_message_id` field for streams without any message history.
# * Implementing the Welcome Bot reply hack
# * Adding links to the embed_links queue for open graph processing.
for send_request in send_message_requests:
realm_id: Optional[int] = None
if send_request.message.is_stream_message():
if send_request.stream is None:
stream_id = send_request.message.recipient.type_id
send_request.stream = Stream.objects.select_related().get(id=stream_id)
# assert needed because stubs for django are missing
assert send_request.stream is not None
realm_id = send_request.stream.realm_id
# Deliver events to the real-time push system, as well as
# enqueuing any additional processing triggered by the message.
wide_message_dict = MessageDict.wide_dict(send_request.message, realm_id)
user_flags = user_message_flags.get(send_request.message.id, {})
"""
TODO: We may want to limit user_ids to only those users who have
UserMessage rows, if only for minor performance reasons.
For now we queue events for all subscribers/sendees of the
message, since downstream code may still do notifications
that don't require UserMessage rows.
Our automated tests have gotten better on this codepath,
but we may have coverage gaps, so we should be careful
about changing the next line.
"""
user_ids = send_request.active_user_ids | set(user_flags.keys())
sender_id = send_request.message.sender_id
# We make sure the sender is listed first in the `users` list;
# this results in the sender receiving the message first if
# there are thousands of recipients, decreasing perceived latency.
if sender_id in user_ids:
user_list = [sender_id] + list(user_ids - {sender_id})
else:
user_list = list(user_ids)
class UserData(TypedDict):
id: int
flags: List[str]
mentioned_user_group_id: Optional[int]
users: List[UserData] = []
for user_id in user_list:
flags = user_flags.get(user_id, [])
user_data: UserData = dict(id=user_id, flags=flags, mentioned_user_group_id=None)
if user_id in send_request.mentioned_user_groups_map:
user_data["mentioned_user_group_id"] = send_request.mentioned_user_groups_map[
user_id
]
users.append(user_data)
sender = send_request.message.sender
message_type = wide_message_dict["type"]
active_users_data = [
ActivePresenceIdleUserData(
alerted="has_alert_word" in user_flags.get(user_id, []),
notifications_data=UserMessageNotificationsData.from_user_id_sets(
user_id=user_id,
flags=user_flags.get(user_id, []),
private_message=(message_type == "private"),
online_push_user_ids=send_request.online_push_user_ids,
pm_mention_push_disabled_user_ids=send_request.pm_mention_push_disabled_user_ids,
pm_mention_email_disabled_user_ids=send_request.pm_mention_email_disabled_user_ids,
stream_push_user_ids=send_request.stream_push_user_ids,
stream_email_user_ids=send_request.stream_email_user_ids,
wildcard_mention_user_ids=send_request.wildcard_mention_user_ids,
muted_sender_user_ids=send_request.muted_sender_user_ids,
all_bot_user_ids=send_request.all_bot_user_ids,
),
)
for user_id in send_request.active_user_ids
]
presence_idle_user_ids = get_active_presence_idle_user_ids(
realm=sender.realm,
sender_id=sender.id,
active_users_data=active_users_data,
)
event = dict(
type="message",
message=send_request.message.id,
message_dict=wide_message_dict,
presence_idle_user_ids=presence_idle_user_ids,
online_push_user_ids=list(send_request.online_push_user_ids),
pm_mention_push_disabled_user_ids=list(send_request.pm_mention_push_disabled_user_ids),
pm_mention_email_disabled_user_ids=list(
send_request.pm_mention_email_disabled_user_ids
),
stream_push_user_ids=list(send_request.stream_push_user_ids),
stream_email_user_ids=list(send_request.stream_email_user_ids),
wildcard_mention_user_ids=list(send_request.wildcard_mention_user_ids),
muted_sender_user_ids=list(send_request.muted_sender_user_ids),
all_bot_user_ids=list(send_request.all_bot_user_ids),
)
if send_request.message.is_stream_message():
# Note: This is where authorization for single-stream
# get_updates happens! We only attach stream data to the
# notify new_message request if it's a public stream,
# ensuring that in the tornado server, non-public stream
# messages are only associated to their subscribed users.
# assert needed because stubs for django are missing
assert send_request.stream is not None
if send_request.stream.is_public():
event["realm_id"] = send_request.stream.realm_id
event["stream_name"] = send_request.stream.name
if send_request.stream.invite_only:
event["invite_only"] = True
if send_request.stream.first_message_id is None:
send_request.stream.first_message_id = send_request.message.id
send_request.stream.save(update_fields=["first_message_id"])
if send_request.local_id is not None:
event["local_id"] = send_request.local_id
if send_request.sender_queue_id is not None:
event["sender_queue_id"] = send_request.sender_queue_id
send_event(send_request.realm, event, users)
if send_request.links_for_embed:
event_data = {
"message_id": send_request.message.id,
"message_content": send_request.message.content,
"message_realm_id": send_request.realm.id,
"urls": list(send_request.links_for_embed),
}
queue_json_publish("embed_links", event_data)
if send_request.message.recipient.type == Recipient.PERSONAL:
welcome_bot_id = get_system_bot(
settings.WELCOME_BOT, send_request.message.sender.realm_id
).id
if (
welcome_bot_id in send_request.active_user_ids
and welcome_bot_id != send_request.message.sender_id
):
from zerver.lib.onboarding import send_welcome_bot_response
send_welcome_bot_response(send_request)
for queue_name, events in send_request.message.service_queue_events.items():
for event in events:
queue_json_publish(
queue_name,
{
"message": wide_message_dict,
"trigger": event["trigger"],
"user_profile_id": event["user_profile_id"],
},
)
return [send_request.message.id for send_request in send_message_requests]
class UserMessageLite:
"""
The Django ORM is too slow for bulk operations. This class
is optimized for the simple use case of inserting a bunch of
rows into zerver_usermessage.
"""
def __init__(self, user_profile_id: int, message_id: int, flags: int) -> None:
self.user_profile_id = user_profile_id
self.message_id = message_id
self.flags = flags
def flags_list(self) -> List[str]:
return UserMessage.flags_list_for_flags(self.flags)
def create_user_messages(
message: Message,
rendering_result: MessageRenderingResult,
um_eligible_user_ids: AbstractSet[int],
long_term_idle_user_ids: AbstractSet[int],
stream_push_user_ids: AbstractSet[int],
stream_email_user_ids: AbstractSet[int],
mentioned_user_ids: AbstractSet[int],
mark_as_read_user_ids: Set[int],
limit_unread_user_ids: Optional[Set[int]],
) -> List[UserMessageLite]:
# These properties on the Message are set via
# render_markdown by code in the Markdown inline patterns
ids_with_alert_words = rendering_result.user_ids_with_alert_words
sender_id = message.sender.id
is_stream_message = message.is_stream_message()
base_flags = 0
if rendering_result.mentions_wildcard:
base_flags |= UserMessage.flags.wildcard_mentioned
if message.recipient.type in [Recipient.HUDDLE, Recipient.PERSONAL]:
base_flags |= UserMessage.flags.is_private
# For long_term_idle (aka soft-deactivated) users, we are allowed
# to optimize by lazily not creating UserMessage rows that would
# have the default 0 flag set (since the soft-reactivation logic
# knows how to create those when the user comes back). We need to
# create the UserMessage rows for these long_term_idle users
# non-lazily in a few cases:
#
# * There are nonzero flags (e.g. the user was mentioned), since
# that case is rare and this saves a lot of complexity in
# soft-reactivation.
#
# * If the user is going to be notified (e.g. they get push/email
# notifications for every message on a stream), since in that
# case the notifications code will call `access_message` on the
# message to re-verify permissions, and for private streams,
# will get an error if the UserMessage row doesn't exist yet.
#
# See https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html#soft-deactivation
# for details on this system.
user_messages = []
for user_profile_id in um_eligible_user_ids:
flags = base_flags
if (
(user_profile_id == sender_id and message.sent_by_human())
or user_profile_id in mark_as_read_user_ids
or (limit_unread_user_ids is not None and user_profile_id not in limit_unread_user_ids)
):
flags |= UserMessage.flags.read
if user_profile_id in mentioned_user_ids:
flags |= UserMessage.flags.mentioned
if user_profile_id in ids_with_alert_words:
flags |= UserMessage.flags.has_alert_word
if (
user_profile_id in long_term_idle_user_ids
and user_profile_id not in stream_push_user_ids
and user_profile_id not in stream_email_user_ids
and is_stream_message
and int(flags) == 0
):
continue
um = UserMessageLite(
user_profile_id=user_profile_id,
message_id=message.id,
flags=flags,
)
user_messages.append(um)
return user_messages
def bulk_insert_ums(ums: List[UserMessageLite]) -> None:
"""
Doing bulk inserts this way is much faster than using Django,
since we don't have any ORM overhead. Profiling with 1000
users shows a speedup of 0.436 -> 0.027 seconds, so we're
talking about a 15x speedup.
"""
if not ums:
return
vals = [(um.user_profile_id, um.message_id, um.flags) for um in ums]
query = SQL(
"""
INSERT into
zerver_usermessage (user_profile_id, message_id, flags)
VALUES %s
"""
)
with connection.cursor() as cursor:
execute_values(cursor.cursor, query, vals)
def verify_submessage_sender(
*,
message_id: int,
message_sender_id: int,
submessage_sender_id: int,
) -> None:
"""Even though our submessage architecture is geared toward
collaboration among all message readers, we still enforce
the the first person to attach a submessage to the message
must be the original sender of the message.
"""
if message_sender_id == submessage_sender_id:
return
if SubMessage.objects.filter(
message_id=message_id,
sender_id=message_sender_id,
).exists():
return
raise JsonableError(_("You cannot attach a submessage to this message."))
def do_add_submessage(
realm: Realm,
sender_id: int,
message_id: int,
msg_type: str,
content: str,
) -> None:
"""Should be called while holding a SELECT FOR UPDATE lock
(e.g. via access_message(..., lock_message=True)) on the
Message row, to prevent race conditions.
"""
submessage = SubMessage(
sender_id=sender_id,
message_id=message_id,
msg_type=msg_type,
content=content,
)
submessage.save()
event = dict(
type="submessage",
msg_type=msg_type,
message_id=message_id,
submessage_id=submessage.id,
sender_id=sender_id,
content=content,
)
ums = UserMessage.objects.filter(message_id=message_id)
target_user_ids = [um.user_profile_id for um in ums]
transaction.on_commit(lambda: send_event(realm, event, target_user_ids))
def notify_reaction_update(
user_profile: UserProfile, message: Message, reaction: Reaction, op: str
) -> None:
user_dict = {
"user_id": user_profile.id,
"email": user_profile.email,
"full_name": user_profile.full_name,
}
event: Dict[str, Any] = {
"type": "reaction",
"op": op,
"user_id": user_profile.id,
# TODO: We plan to remove this redundant user_dict object once
# clients are updated to support accessing use user_id. See
# https://github.com/zulip/zulip/pull/14711 for details.
"user": user_dict,
"message_id": message.id,
"emoji_name": reaction.emoji_name,
"emoji_code": reaction.emoji_code,
"reaction_type": reaction.reaction_type,
}
# Update the cached message since new reaction is added.
update_to_dict_cache([message])
# Recipients for message update events, including reactions, are
# everyone who got the original message, plus subscribers of
# streams with the access to stream's full history.
#
# This means reactions won't live-update in preview narrows for a
# stream the user isn't yet subscribed to; this is the right
# performance tradeoff to avoid sending every reaction to public
# stream messages to all users.
#
# To ensure that reactions do live-update for any user who has
# actually participated in reacting to a message, we add a
# "historical" UserMessage row for any user who reacts to message,
# subscribing them to future notifications, even if they are not
# subscribed to the stream.
user_ids = set(
UserMessage.objects.filter(message=message.id).values_list("user_profile_id", flat=True)
)
if message.recipient.type == Recipient.STREAM:
stream_id = message.recipient.type_id
stream = Stream.objects.get(id=stream_id)
user_ids |= subscriber_ids_with_stream_history_access(stream)
transaction.on_commit(lambda: send_event(user_profile.realm, event, list(user_ids)))
def do_add_reaction(
user_profile: UserProfile,
message: Message,
emoji_name: str,
emoji_code: str,
reaction_type: str,
) -> None:
"""Should be called while holding a SELECT FOR UPDATE lock
(e.g. via access_message(..., lock_message=True)) on the
Message row, to prevent race conditions.
"""
reaction = Reaction(
user_profile=user_profile,
message=message,
emoji_name=emoji_name,
emoji_code=emoji_code,
reaction_type=reaction_type,
)
reaction.save()
notify_reaction_update(user_profile, message, reaction, "add")
def check_add_reaction(
user_profile: UserProfile,
message_id: int,
emoji_name: str,
emoji_code: Optional[str],
reaction_type: Optional[str],
) -> None:
message, user_message = access_message(user_profile, message_id, lock_message=True)
if emoji_code is None:
# The emoji_code argument is only required for rare corner
# cases discussed in the long block comment below. For simple
# API clients, we allow specifying just the name, and just
# look up the code using the current name->code mapping.
emoji_code = emoji_name_to_emoji_code(message.sender.realm, emoji_name)[0]
if reaction_type is None:
reaction_type = emoji_name_to_emoji_code(message.sender.realm, emoji_name)[1]
if Reaction.objects.filter(
user_profile=user_profile,
message=message,
emoji_code=emoji_code,
reaction_type=reaction_type,
).exists():
raise JsonableError(_("Reaction already exists."))
query = Reaction.objects.filter(
message=message, emoji_code=emoji_code, reaction_type=reaction_type
)
if query.exists():
# If another user has already reacted to this message with
# same emoji code, we treat the new reaction as a vote for the
# existing reaction. So the emoji name used by that earlier
# reaction takes precedence over whatever was passed in this
# request. This is necessary to avoid a message having 2
# "different" emoji reactions with the same emoji code (and
# thus same image) on the same message, which looks ugly.
#
# In this "voting for an existing reaction" case, we shouldn't
# check whether the emoji code and emoji name match, since
# it's possible that the (emoji_type, emoji_name, emoji_code)
# triple for this existing reaction may not pass validation
# now (e.g. because it is for a realm emoji that has been
# since deactivated). We still want to allow users to add a
# vote any old reaction they see in the UI even if that is a
# deactivated custom emoji, so we just use the emoji name from
# the existing reaction with no further validation.
reaction = query.first()
assert reaction is not None
emoji_name = reaction.emoji_name
else:
# Otherwise, use the name provided in this request, but verify
# it is valid in the user's realm (e.g. not a deactivated
# realm emoji).
check_emoji_request(user_profile.realm, emoji_name, emoji_code, reaction_type)
if user_message is None:
# See called function for more context.
create_historical_user_messages(user_id=user_profile.id, message_ids=[message.id])
do_add_reaction(user_profile, message, emoji_name, emoji_code, reaction_type)
def do_remove_reaction(
user_profile: UserProfile, message: Message, emoji_code: str, reaction_type: str
) -> None:
"""Should be called while holding a SELECT FOR UPDATE lock
(e.g. via access_message(..., lock_message=True)) on the
Message row, to prevent race conditions.
"""
reaction = Reaction.objects.filter(
user_profile=user_profile,
message=message,
emoji_code=emoji_code,
reaction_type=reaction_type,
).get()
reaction.delete()
notify_reaction_update(user_profile, message, reaction, "remove")
def do_send_typing_notification(
realm: Realm, sender: UserProfile, recipient_user_profiles: List[UserProfile], operator: str
) -> None:
sender_dict = {"user_id": sender.id, "email": sender.email}
# Include a list of recipients in the event body to help identify where the typing is happening
recipient_dicts = [
{"user_id": profile.id, "email": profile.email} for profile in recipient_user_profiles
]
event = dict(
type="typing",
message_type="private",
op=operator,
sender=sender_dict,
recipients=recipient_dicts,
)
# Only deliver the notification to active user recipients
user_ids_to_notify = [user.id for user in recipient_user_profiles if user.is_active]
send_event(realm, event, user_ids_to_notify)
# check_send_typing_notification:
# Checks the typing notification and sends it
def check_send_typing_notification(sender: UserProfile, user_ids: List[int], operator: str) -> None:
realm = sender.realm
if sender.id not in user_ids:
user_ids.append(sender.id)
# If any of the user_ids being sent in are invalid, we will
# just reject the whole request, since a partial list of user_ids
# can create confusion related to huddles. Plus it's a good
# sign that a client is confused (or possibly even malicious) if
# we get bad user_ids.
user_profiles = []
for user_id in user_ids:
try:
# We include cross-bot realms as possible recipients,
# so that clients can know which huddle conversation
# is relevant here.
user_profile = get_user_by_id_in_realm_including_cross_realm(user_id, sender.realm)
except UserProfile.DoesNotExist:
raise JsonableError(_("Invalid user ID {}").format(user_id))
user_profiles.append(user_profile)
do_send_typing_notification(
realm=realm,
sender=sender,
recipient_user_profiles=user_profiles,
operator=operator,
)
def do_send_stream_typing_notification(
sender: UserProfile, operator: str, stream: Stream, topic: str
) -> None:
sender_dict = {"user_id": sender.id, "email": sender.email}
event = dict(
type="typing",
message_type="stream",
op=operator,
sender=sender_dict,
stream_id=stream.id,
topic=topic,
)
user_ids_to_notify = get_user_ids_for_streams({stream.id})[stream.id]
send_event(sender.realm, event, user_ids_to_notify)
def ensure_stream(
realm: Realm,
stream_name: str,
invite_only: bool = False,
stream_description: str = "",
*,
acting_user: Optional[UserProfile],
) -> Stream:
return create_stream_if_needed(
realm,
stream_name,
invite_only=invite_only,
stream_description=stream_description,
acting_user=acting_user,
)[0]
def get_recipient_from_user_profiles(
recipient_profiles: Sequence[UserProfile],
forwarded_mirror_message: bool,
forwarder_user_profile: Optional[UserProfile],
sender: UserProfile,
) -> Recipient:
# Avoid mutating the passed in list of recipient_profiles.
recipient_profiles_map = {user_profile.id: user_profile for user_profile in recipient_profiles}
if forwarded_mirror_message:
# In our mirroring integrations with some third-party
# protocols, bots subscribed to the third-party protocol
# forward to Zulip messages that they received in the
# third-party service. The permissions model for that
# forwarding is that users can only submit to Zulip private
# messages they personally received, and here we do the check
# for whether forwarder_user_profile is among the private
# message recipients of the message.
assert forwarder_user_profile is not None
if forwarder_user_profile.id not in recipient_profiles_map:
raise ValidationError(_("User not authorized for this query"))
# If the private message is just between the sender and
# another person, force it to be a personal internally
if len(recipient_profiles_map) == 2 and sender.id in recipient_profiles_map:
del recipient_profiles_map[sender.id]
assert recipient_profiles_map
if len(recipient_profiles_map) == 1:
[user_profile] = recipient_profiles_map.values()
return Recipient(
id=user_profile.recipient_id,
type=Recipient.PERSONAL,
type_id=user_profile.id,
)
# Otherwise, we need a huddle. Make sure the sender is included in huddle messages
recipient_profiles_map[sender.id] = sender
user_ids = set(recipient_profiles_map)
return get_huddle_recipient(user_ids)
def validate_recipient_user_profiles(
user_profiles: Sequence[UserProfile], sender: UserProfile, allow_deactivated: bool = False
) -> Sequence[UserProfile]:
recipient_profiles_map: Dict[int, UserProfile] = {}
# We exempt cross-realm bots from the check that all the recipients
# are in the same realm.
realms = set()
if not is_cross_realm_bot_email(sender.email):
realms.add(sender.realm_id)
for user_profile in user_profiles:
if (
not user_profile.is_active
and not user_profile.is_mirror_dummy
and not allow_deactivated
) or user_profile.realm.deactivated:
raise ValidationError(
_("'{email}' is no longer using Zulip.").format(email=user_profile.email)
)
recipient_profiles_map[user_profile.id] = user_profile
if not is_cross_realm_bot_email(user_profile.email):
realms.add(user_profile.realm_id)
if len(realms) > 1:
raise ValidationError(_("You can't send private messages outside of your organization."))
return list(recipient_profiles_map.values())
def recipient_for_user_profiles(
user_profiles: Sequence[UserProfile],
forwarded_mirror_message: bool,
forwarder_user_profile: Optional[UserProfile],
sender: UserProfile,
allow_deactivated: bool = False,
) -> Recipient:
recipient_profiles = validate_recipient_user_profiles(
user_profiles, sender, allow_deactivated=allow_deactivated
)
return get_recipient_from_user_profiles(
recipient_profiles, forwarded_mirror_message, forwarder_user_profile, sender
)
def already_sent_mirrored_message_id(message: Message) -> Optional[int]:
if message.recipient.type == Recipient.HUDDLE:
# For huddle messages, we use a 10-second window because the
# timestamps aren't guaranteed to actually match between two
# copies of the same message.
time_window = datetime.timedelta(seconds=10)
else:
time_window = datetime.timedelta(seconds=0)
query = Message.objects.filter(
sender=message.sender,
recipient=message.recipient,
content=message.content,
sending_client=message.sending_client,
date_sent__gte=message.date_sent - time_window,
date_sent__lte=message.date_sent + time_window,
)
messages = filter_by_exact_message_topic(
query=query,
message=message,
)
if messages.exists():
return messages[0].id
return None
def extract_stream_indicator(s: str) -> Union[str, int]:
# Users can pass stream name as either an id or a name,
# and if they choose to pass a name, they may JSON encode
# it for legacy reasons.
try:
data = orjson.loads(s)
except orjson.JSONDecodeError:
# If there was no JSON encoding, then we just
# have a raw stream name.
return s
# We should stop supporting this odd use case
# once we improve our documentation.
if isinstance(data, list):
if len(data) != 1: # nocoverage
raise JsonableError(_("Expected exactly one stream"))
data = data[0]
if isinstance(data, str):
# We had a JSON-encoded stream name.
return data
if isinstance(data, int):
# We had a stream id.
return data
raise JsonableError(_("Invalid data type for stream"))
def extract_private_recipients(s: str) -> Union[List[str], List[int]]:
# We try to accept multiple incoming formats for recipients.
# See test_extract_recipients() for examples of what we allow.
try:
data = orjson.loads(s)
except orjson.JSONDecodeError:
data = s
if isinstance(data, str):
data = data.split(",")
if not isinstance(data, list):
raise JsonableError(_("Invalid data type for recipients"))
if not data:
# We don't complain about empty message recipients here
return data
if isinstance(data[0], str):
return get_validated_emails(data)
if not isinstance(data[0], int):
raise JsonableError(_("Invalid data type for recipients"))
return get_validated_user_ids(data)
def get_validated_user_ids(user_ids: Collection[int]) -> List[int]:
for user_id in user_ids:
if not isinstance(user_id, int):
raise JsonableError(_("Recipient lists may contain emails or user IDs, but not both."))
return list(set(user_ids))
def get_validated_emails(emails: Collection[str]) -> List[str]:
for email in emails:
if not isinstance(email, str):
raise JsonableError(_("Recipient lists may contain emails or user IDs, but not both."))
return list(filter(bool, {email.strip() for email in emails}))
def check_send_stream_message(
sender: UserProfile,
client: Client,
stream_name: str,
topic: str,
body: str,
realm: Optional[Realm] = None,
) -> int:
addressee = Addressee.for_stream_name(stream_name, topic)
message = check_message(sender, client, addressee, body, realm)
return do_send_messages([message])[0]
def check_send_stream_message_by_id(
sender: UserProfile,
client: Client,
stream_id: int,
topic: str,
body: str,
realm: Optional[Realm] = None,
) -> int:
addressee = Addressee.for_stream_id(stream_id, topic)
message = check_message(sender, client, addressee, body, realm)
return do_send_messages([message])[0]
def check_send_private_message(
sender: UserProfile, client: Client, receiving_user: UserProfile, body: str
) -> int:
addressee = Addressee.for_user_profile(receiving_user)
message = check_message(sender, client, addressee, body)
return do_send_messages([message])[0]
# check_send_message:
# Returns the id of the sent message. Has same argspec as check_message.
def check_send_message(
sender: UserProfile,
client: Client,
message_type_name: str,
message_to: Union[Sequence[int], Sequence[str]],
topic_name: Optional[str],
message_content: str,
realm: Optional[Realm] = None,
forged: bool = False,
forged_timestamp: Optional[float] = None,
forwarder_user_profile: Optional[UserProfile] = None,
local_id: Optional[str] = None,
sender_queue_id: Optional[str] = None,
widget_content: Optional[str] = None,
*,
skip_stream_access_check: bool = False,
) -> int:
addressee = Addressee.legacy_build(sender, message_type_name, message_to, topic_name)
try:
message = check_message(
sender,
client,
addressee,
message_content,
realm,
forged,
forged_timestamp,
forwarder_user_profile,
local_id,
sender_queue_id,
widget_content,
skip_stream_access_check=skip_stream_access_check,
)
except ZephyrMessageAlreadySentException as e:
return e.message_id
return do_send_messages([message])[0]
def check_schedule_message(
sender: UserProfile,
client: Client,
message_type_name: str,
message_to: Union[Sequence[str], Sequence[int]],
topic_name: Optional[str],
message_content: str,
delivery_type: str,
deliver_at: datetime.datetime,
realm: Optional[Realm] = None,
forwarder_user_profile: Optional[UserProfile] = None,
) -> int:
addressee = Addressee.legacy_build(sender, message_type_name, message_to, topic_name)
send_request = check_message(
sender,
client,
addressee,
message_content,
realm=realm,
forwarder_user_profile=forwarder_user_profile,
)
send_request.deliver_at = deliver_at
send_request.delivery_type = delivery_type
recipient = send_request.message.recipient
if delivery_type == "remind" and (
recipient.type != Recipient.STREAM and recipient.type_id != sender.id
):
raise JsonableError(_("Reminders can only be set for streams."))
return do_schedule_messages([send_request])[0]
def validate_message_edit_payload(
message: Message,
stream_id: Optional[int],
topic_name: Optional[str],
propagate_mode: Optional[str],
content: Optional[str],
) -> None:
"""
Checks that the data sent is well-formed. Does not handle editability, permissions etc.
"""
if topic_name is None and content is None and stream_id is None:
raise JsonableError(_("Nothing to change"))
if not message.is_stream_message():
if stream_id is not None:
raise JsonableError(_("Private messages cannot be moved to streams."))
if topic_name is not None:
raise JsonableError(_("Private messages cannot have topics."))
if propagate_mode != "change_one" and topic_name is None and stream_id is None:
raise JsonableError(_("Invalid propagate_mode without topic edit"))
if topic_name is not None:
check_stream_topic(topic_name)
if stream_id is not None and content is not None:
raise JsonableError(_("Cannot change message content while changing stream"))
# Right now, we prevent users from editing widgets.
if content is not None and is_widget_message(message):
raise JsonableError(_("Widgets cannot be edited."))
def can_edit_content_or_topic(
message: Message,
user_profile: UserProfile,
is_no_topic_msg: bool,
content: Optional[str] = None,
topic_name: Optional[str] = None,
) -> bool:
# You have permission to edit the message (both content and topic) if you sent it.
if message.sender_id == user_profile.id:
return True
# You cannot edit the content of message sent by someone else.
if content is not None:
return False
assert topic_name is not None
# The following cases are the various reasons a user might be
# allowed to edit topics.
# We allow anyone to edit (no topic) messages to help tend them.
if is_no_topic_msg:
return True
# The can_edit_topic_of_any_message helper returns whether the user can edit the topic
# or not based on edit_topic_policy setting and the user's role.
if user_profile.can_edit_topic_of_any_message():
return True
return False
def check_update_message(
user_profile: UserProfile,
message_id: int,
stream_id: Optional[int] = None,
topic_name: Optional[str] = None,
propagate_mode: str = "change_one",
send_notification_to_old_thread: bool = True,
send_notification_to_new_thread: bool = True,
content: Optional[str] = None,
) -> int:
"""This will update a message given the message id and user profile.
It checks whether the user profile has the permission to edit the message
and raises a JsonableError if otherwise.
It returns the number changed.
"""
message, ignored_user_message = access_message(user_profile, message_id)
if not user_profile.realm.allow_message_editing:
raise JsonableError(_("Your organization has turned off message editing"))
# The zerver/views/message_edit.py call point already strips this
# via REQ_topic; so we can delete this line if we arrange a
# contract where future callers in the embedded bots system strip
# use REQ_topic as well (or otherwise are guaranteed to strip input).
if topic_name is not None:
topic_name = topic_name.strip()
if topic_name == message.topic_name():
topic_name = None
validate_message_edit_payload(message, stream_id, topic_name, propagate_mode, content)
is_no_topic_msg = message.topic_name() == "(no topic)"
if content is not None or topic_name is not None:
if not can_edit_content_or_topic(
message, user_profile, is_no_topic_msg, content, topic_name
):
raise JsonableError(_("You don't have permission to edit this message"))
# If there is a change to the content, check that it hasn't been too long
# Allow an extra 20 seconds since we potentially allow editing 15 seconds
# past the limit, and in case there are network issues, etc. The 15 comes
# from (min_seconds_to_edit + seconds_left_buffer) in message_edit.js; if
# you change this value also change those two parameters in message_edit.js.
edit_limit_buffer = 20
if content is not None and user_profile.realm.message_content_edit_limit_seconds > 0:
deadline_seconds = user_profile.realm.message_content_edit_limit_seconds + edit_limit_buffer
if (timezone_now() - message.date_sent) > datetime.timedelta(seconds=deadline_seconds):
raise JsonableError(_("The time limit for editing this message has passed"))
# If there is a change to the topic, check that the user is allowed to
# edit it and that it has not been too long. If this is not the user who
# sent the message, they are not the admin, and the time limit for editing
# topics is passed, raise an error.
if (
topic_name is not None
and message.sender != user_profile
and not user_profile.is_realm_admin
and not user_profile.is_moderator
and not is_no_topic_msg
):
deadline_seconds = Realm.DEFAULT_COMMUNITY_TOPIC_EDITING_LIMIT_SECONDS + edit_limit_buffer
if (timezone_now() - message.date_sent) > datetime.timedelta(seconds=deadline_seconds):
raise JsonableError(_("The time limit for editing this message's topic has passed"))
rendering_result = None
links_for_embed: Set[str] = set()
prior_mention_user_ids: Set[int] = set()
mention_data: Optional[MentionData] = None
if content is not None:
if content.rstrip() == "":
content = "(deleted)"
content = normalize_body(content)
mention_backend = MentionBackend(user_profile.realm_id)
mention_data = MentionData(
mention_backend=mention_backend,
content=content,
)
user_info = get_user_info_for_message_updates(message.id)
prior_mention_user_ids = user_info["mention_user_ids"]
# We render the message using the current user's realm; since
# the cross-realm bots never edit messages, this should be
# always correct.
# Note: If rendering fails, the called code will raise a JsonableError.
rendering_result = render_incoming_message(
message,
content,
user_info["message_user_ids"],
user_profile.realm,
mention_data=mention_data,
)
links_for_embed |= rendering_result.links_for_preview
if message.is_stream_message() and rendering_result.mentions_wildcard:
stream = access_stream_by_id(user_profile, message.recipient.type_id)[0]
if not wildcard_mention_allowed(message.sender, stream):
raise JsonableError(
_("You do not have permission to use wildcard mentions in this stream.")
)
new_stream = None
number_changed = 0
if stream_id is not None:
assert message.is_stream_message()
if not user_profile.can_move_messages_between_streams():
raise JsonableError(_("You don't have permission to move this message"))
try:
access_stream_by_id(user_profile, message.recipient.type_id)
except JsonableError:
raise JsonableError(
_(
"You don't have permission to move this message due to missing access to its stream"
)
)
new_stream = access_stream_by_id(user_profile, stream_id, require_active=True)[0]
check_stream_access_based_on_stream_post_policy(user_profile, new_stream)
number_changed = do_update_message(
user_profile,
message,
new_stream,
topic_name,
propagate_mode,
send_notification_to_old_thread,
send_notification_to_new_thread,
content,
rendering_result,
prior_mention_user_ids,
mention_data,
)
if links_for_embed:
event_data = {
"message_id": message.id,
"message_content": message.content,
# The choice of `user_profile.realm_id` rather than
# `sender.realm_id` must match the decision made in the
# `render_incoming_message` call earlier in this function.
"message_realm_id": user_profile.realm_id,
"urls": list(links_for_embed),
}
queue_json_publish("embed_links", event_data)
return number_changed
def check_default_stream_group_name(group_name: str) -> None:
if group_name.strip() == "":
raise JsonableError(_("Invalid default stream group name '{}'").format(group_name))
if len(group_name) > DefaultStreamGroup.MAX_NAME_LENGTH:
raise JsonableError(
_("Default stream group name too long (limit: {} characters)").format(
DefaultStreamGroup.MAX_NAME_LENGTH,
)
)
for i in group_name:
if ord(i) == 0:
raise JsonableError(
_("Default stream group name '{}' contains NULL (0x00) characters.").format(
group_name,
)
)
def send_rate_limited_pm_notification_to_bot_owner(
sender: UserProfile, realm: Realm, content: str
) -> None:
"""
Sends a PM error notification to a bot's owner if one hasn't already
been sent in the last 5 minutes.
"""
if sender.realm.is_zephyr_mirror_realm or sender.realm.deactivated:
return
if not sender.is_bot or sender.bot_owner is None:
return
# Don't send these notifications for cross-realm bot messages
# (e.g. from EMAIL_GATEWAY_BOT) since the owner for
# EMAIL_GATEWAY_BOT is probably the server administrator, not
# the owner of the bot who could potentially fix the problem.
if sender.realm != realm:
return
# We warn the user once every 5 minutes to avoid a flood of
# PMs on a misconfigured integration, re-using the
# UserProfile.last_reminder field, which is not used for bots.
last_reminder = sender.last_reminder
waitperiod = datetime.timedelta(minutes=UserProfile.BOT_OWNER_STREAM_ALERT_WAITPERIOD)
if last_reminder and timezone_now() - last_reminder <= waitperiod:
return
internal_send_private_message(
get_system_bot(settings.NOTIFICATION_BOT, sender.bot_owner.realm_id),
sender.bot_owner,
content,
)
sender.last_reminder = timezone_now()
sender.save(update_fields=["last_reminder"])
def send_pm_if_empty_stream(
stream: Optional[Stream],
realm: Realm,
sender: UserProfile,
stream_name: Optional[str] = None,
stream_id: Optional[int] = None,
) -> None:
"""If a bot sends a message to a stream that doesn't exist or has no
subscribers, sends a notification to the bot owner (if not a
cross-realm bot) so that the owner can correct the issue."""
if not sender.is_bot or sender.bot_owner is None:
return
arg_dict = {
"bot_identity": f"`{sender.delivery_email}`",
"stream_id": stream_id,
"stream_name": f"#**{stream_name}**",
"new_stream_link": "#streams/new",
}
if sender.bot_owner is not None:
with override_language(sender.bot_owner.default_language):
if stream is None:
if stream_id is not None:
content = _(
"Your bot {bot_identity} tried to send a message to stream ID "
"{stream_id}, but there is no stream with that ID."
).format(**arg_dict)
else:
assert stream_name is not None
content = _(
"Your bot {bot_identity} tried to send a message to stream "
"{stream_name}, but that stream does not exist. "
"Click [here]({new_stream_link}) to create it."
).format(**arg_dict)
else:
if num_subscribers_for_stream_id(stream.id) > 0:
return
content = _(
"Your bot {bot_identity} tried to send a message to "
"stream {stream_name}. The stream exists but "
"does not have any subscribers."
).format(**arg_dict)
send_rate_limited_pm_notification_to_bot_owner(sender, realm, content)
def validate_stream_name_with_pm_notification(
stream_name: str, realm: Realm, sender: UserProfile
) -> Stream:
stream_name = stream_name.strip()
check_stream_name(stream_name)
try:
stream = get_stream(stream_name, realm)
send_pm_if_empty_stream(stream, realm, sender)
except Stream.DoesNotExist:
send_pm_if_empty_stream(None, realm, sender, stream_name=stream_name)
raise StreamDoesNotExistError(escape(stream_name))
return stream
def validate_stream_id_with_pm_notification(
stream_id: int, realm: Realm, sender: UserProfile
) -> Stream:
try:
stream = get_stream_by_id_in_realm(stream_id, realm)
send_pm_if_empty_stream(stream, realm, sender)
except Stream.DoesNotExist:
send_pm_if_empty_stream(None, realm, sender, stream_id=stream_id)
raise StreamWithIDDoesNotExistError(stream_id)
return stream
def check_private_message_policy(
realm: Realm, sender: UserProfile, user_profiles: Sequence[UserProfile]
) -> None:
if realm.private_message_policy == Realm.PRIVATE_MESSAGE_POLICY_DISABLED:
if sender.is_bot or (len(user_profiles) == 1 and user_profiles[0].is_bot):
# We allow PMs only between users and bots, to avoid
# breaking the tutorial as well as automated
# notifications from system bots to users.
return
raise JsonableError(_("Private messages are disabled in this organization."))
# check_message:
# Returns message ready for sending with do_send_message on success or the error message (string) on error.
def check_message(
sender: UserProfile,
client: Client,
addressee: Addressee,
message_content_raw: str,
realm: Optional[Realm] = None,
forged: bool = False,
forged_timestamp: Optional[float] = None,
forwarder_user_profile: Optional[UserProfile] = None,
local_id: Optional[str] = None,
sender_queue_id: Optional[str] = None,
widget_content: Optional[str] = None,
email_gateway: bool = False,
*,
skip_stream_access_check: bool = False,
mention_backend: Optional[MentionBackend] = None,
limit_unread_user_ids: Optional[Set[int]] = None,
) -> SendMessageRequest:
"""See
https://zulip.readthedocs.io/en/latest/subsystems/sending-messages.html
for high-level documentation on this subsystem.
"""
stream = None
message_content = normalize_body(message_content_raw)
if realm is None:
realm = sender.realm
if addressee.is_stream():
topic_name = addressee.topic()
topic_name = truncate_topic(topic_name)
stream_name = addressee.stream_name()
stream_id = addressee.stream_id()
if stream_name is not None:
stream = validate_stream_name_with_pm_notification(stream_name, realm, sender)
elif stream_id is not None:
stream = validate_stream_id_with_pm_notification(stream_id, realm, sender)
else:
stream = addressee.stream()
assert stream is not None
# To save a database round trip, we construct the Recipient
# object for the Stream rather than fetching it from the
# database using the stream.recipient foreign key.
#
# This is simpler than ensuring that code paths that fetch a
# Stream that will be used for sending a message have a
# `select_related("recipient"), which would also needlessly
# expand Stream objects in memory (all the fields of Recipient
# are already known given the Stream object).
recipient = Recipient(
id=stream.recipient_id,
type_id=stream.id,
type=Recipient.STREAM,
)
if not skip_stream_access_check:
access_stream_for_send_message(
sender=sender, stream=stream, forwarder_user_profile=forwarder_user_profile
)
else:
# Defensive assertion - the only currently supported use case
# for this option is for outgoing webhook bots and since this
# is security-sensitive code, it's beneficial to ensure nothing
# else can sneak past the access check.
assert sender.bot_type == sender.OUTGOING_WEBHOOK_BOT
if realm.mandatory_topics and topic_name == "(no topic)":
raise JsonableError(_("Topics are required in this organization"))
elif addressee.is_private():
user_profiles = addressee.user_profiles()
mirror_message = client and client.name in [
"zephyr_mirror",
"irc_mirror",
"jabber_mirror",
"JabberMirror",
]
check_private_message_policy(realm, sender, user_profiles)
# API super-users who set the `forged` flag are allowed to
# forge messages sent by any user, so we disable the
# `forwarded_mirror_message` security check in that case.
forwarded_mirror_message = mirror_message and not forged
try:
recipient = recipient_for_user_profiles(
user_profiles, forwarded_mirror_message, forwarder_user_profile, sender
)
except ValidationError as e:
assert isinstance(e.messages[0], str)
raise JsonableError(e.messages[0])
else:
# This is defensive code--Addressee already validates
# the message type.
raise AssertionError("Invalid message type")
message = Message()
message.sender = sender
message.content = message_content
message.recipient = recipient
if addressee.is_stream():
message.set_topic_name(topic_name)
if forged and forged_timestamp is not None:
# Forged messages come with a timestamp
message.date_sent = timestamp_to_datetime(forged_timestamp)
else:
message.date_sent = timezone_now()
message.sending_client = client
# We render messages later in the process.
assert message.rendered_content is None
if client.name == "zephyr_mirror":
id = already_sent_mirrored_message_id(message)
if id is not None:
raise ZephyrMessageAlreadySentException(id)
widget_content_dict = None
if widget_content is not None:
try:
widget_content_dict = orjson.loads(widget_content)
except orjson.JSONDecodeError:
raise JsonableError(_("Widgets: API programmer sent invalid JSON content"))
try:
check_widget_content(widget_content_dict)
except ValidationError as error:
raise JsonableError(
_("Widgets: {error_msg}").format(
error_msg=error.message,
)
)
message_send_dict = build_message_send_dict(
message=message,
stream=stream,
local_id=local_id,
sender_queue_id=sender_queue_id,
realm=realm,
widget_content_dict=widget_content_dict,
email_gateway=email_gateway,
mention_backend=mention_backend,
limit_unread_user_ids=limit_unread_user_ids,
)
if stream is not None and message_send_dict.rendering_result.mentions_wildcard:
if not wildcard_mention_allowed(sender, stream):
raise JsonableError(
_("You do not have permission to use wildcard mentions in this stream.")
)
return message_send_dict
def _internal_prep_message(
realm: Realm,
sender: UserProfile,
addressee: Addressee,
content: str,
email_gateway: bool = False,
mention_backend: Optional[MentionBackend] = None,
limit_unread_user_ids: Optional[Set[int]] = None,
) -> Optional[SendMessageRequest]:
"""
Create a message object and checks it, but doesn't send it or save it to the database.
The internal function that calls this can therefore batch send a bunch of created
messages together as one database query.
Call do_send_messages with a list of the return values of this method.
"""
# Remove any null bytes from the content
if len(content) > settings.MAX_MESSAGE_LENGTH:
content = content[0:3900] + "\n\n[message was too long and has been truncated]"
# If we have a stream name, and the stream doesn't exist, we
# create it here (though this code path should probably be removed
# eventually, moving that responsibility to the caller). If
# addressee.stream_name() is None (i.e. we're sending to a stream
# by ID), we skip this, as the stream object must already exist.
if addressee.is_stream():
stream_name = addressee.stream_name()
if stream_name is not None:
ensure_stream(realm, stream_name, acting_user=sender)
try:
return check_message(
sender,
get_client("Internal"),
addressee,
content,
realm=realm,
email_gateway=email_gateway,
mention_backend=mention_backend,
limit_unread_user_ids=limit_unread_user_ids,
)
except JsonableError as e:
logging.exception(
"Error queueing internal message by %s: %s",
sender.delivery_email,
e.msg,
stack_info=True,
)
return None
def internal_prep_stream_message(
sender: UserProfile,
stream: Stream,
topic: str,
content: str,
email_gateway: bool = False,
limit_unread_user_ids: Optional[Set[int]] = None,
) -> Optional[SendMessageRequest]:
"""
See _internal_prep_message for details of how this works.
"""
realm = stream.realm
addressee = Addressee.for_stream(stream, topic)
return _internal_prep_message(
realm=realm,
sender=sender,
addressee=addressee,
content=content,
email_gateway=email_gateway,
limit_unread_user_ids=limit_unread_user_ids,
)
def internal_prep_stream_message_by_name(
realm: Realm,
sender: UserProfile,
stream_name: str,
topic: str,
content: str,
) -> Optional[SendMessageRequest]:
"""
See _internal_prep_message for details of how this works.
"""
addressee = Addressee.for_stream_name(stream_name, topic)
return _internal_prep_message(
realm=realm,
sender=sender,
addressee=addressee,
content=content,
)
def internal_prep_private_message(
realm: Realm,
sender: UserProfile,
recipient_user: UserProfile,
content: str,
mention_backend: Optional[MentionBackend] = None,
) -> Optional[SendMessageRequest]:
"""
See _internal_prep_message for details of how this works.
"""
addressee = Addressee.for_user_profile(recipient_user)
return _internal_prep_message(
realm=realm,
sender=sender,
addressee=addressee,
content=content,
mention_backend=mention_backend,
)
def internal_send_private_message(
sender: UserProfile, recipient_user: UserProfile, content: str
) -> Optional[int]:
realm = recipient_user.realm
message = internal_prep_private_message(realm, sender, recipient_user, content)
if message is None:
return None
message_ids = do_send_messages([message])
return message_ids[0]
def internal_send_stream_message(
sender: UserProfile,
stream: Stream,
topic: str,
content: str,
email_gateway: bool = False,
limit_unread_user_ids: Optional[Set[int]] = None,
) -> Optional[int]:
message = internal_prep_stream_message(
sender, stream, topic, content, email_gateway, limit_unread_user_ids=limit_unread_user_ids
)
if message is None:
return None
message_ids = do_send_messages([message])
return message_ids[0]
def internal_send_stream_message_by_name(
realm: Realm,
sender: UserProfile,
stream_name: str,
topic: str,
content: str,
) -> Optional[int]:
message = internal_prep_stream_message_by_name(
realm,
sender,
stream_name,
topic,
content,
)
if message is None:
return None
message_ids = do_send_messages([message])
return message_ids[0]
def internal_send_huddle_message(
realm: Realm, sender: UserProfile, emails: List[str], content: str
) -> Optional[int]:
addressee = Addressee.for_private(emails, realm)
message = _internal_prep_message(
realm=realm,
sender=sender,
addressee=addressee,
content=content,
)
if message is None:
return None
message_ids = do_send_messages([message])
return message_ids[0]
def validate_user_access_to_subscribers(
user_profile: Optional[UserProfile], stream: Stream
) -> None:
"""Validates whether the user can view the subscribers of a stream. Raises a JsonableError if:
* The user and the stream are in different realms
* The realm is MIT and the stream is not invite only.
* The stream is invite only, requesting_user is passed, and that user
does not subscribe to the stream.
"""
validate_user_access_to_subscribers_helper(
user_profile,
{
"realm_id": stream.realm_id,
"is_web_public": stream.is_web_public,
"invite_only": stream.invite_only,
},
# We use a lambda here so that we only compute whether the
# user is subscribed if we have to
lambda user_profile: subscribed_to_stream(user_profile, stream.id),
)
def validate_user_access_to_subscribers_helper(
user_profile: Optional[UserProfile],
stream_dict: Mapping[str, Any],
check_user_subscribed: Callable[[UserProfile], bool],
) -> None:
"""Helper for validate_user_access_to_subscribers that doesn't require
a full stream object. This function is a bit hard to read,
because it is carefully optimized for performance in the two code
paths we call it from:
* In `bulk_get_subscriber_user_ids`, we already know whether the
user was subscribed via `sub_dict`, and so we want to avoid a
database query at all (especially since it calls this in a loop);
* In `validate_user_access_to_subscribers`, we want to only check
if the user is subscribed when we absolutely have to, since it
costs a database query.
The `check_user_subscribed` argument is a function that reports
whether the user is subscribed to the stream.
Note also that we raise a ValidationError in cases where the
caller is doing the wrong thing (maybe these should be
AssertionErrors), and JsonableError for 400 type errors.
"""
if user_profile is None:
raise ValidationError("Missing user to validate access for")
if user_profile.realm_id != stream_dict["realm_id"]:
raise ValidationError("Requesting user not in given realm")
# Even guest users can access subscribers to web-public streams,
# since they can freely become subscribers to these streams.
if stream_dict["is_web_public"]:
return
# With the exception of web-public streams, a guest must
# be subscribed to a stream (even a public one) in order
# to see subscribers.
if user_profile.is_guest:
if check_user_subscribed(user_profile):
return
# We could explicitly handle the case where guests aren't
# subscribed here in an `else` statement or we can fall
# through to the subsequent logic. Tim prefers the latter.
# Adding an `else` would ensure better code coverage.
if not user_profile.can_access_public_streams() and not stream_dict["invite_only"]:
raise JsonableError(_("Subscriber data is not available for this stream"))
# Organization administrators can view subscribers for all streams.
if user_profile.is_realm_admin:
return
if stream_dict["invite_only"] and not check_user_subscribed(user_profile):
raise JsonableError(_("Unable to retrieve subscribers for private stream"))
def bulk_get_subscriber_user_ids(
stream_dicts: Collection[Mapping[str, Any]],
user_profile: UserProfile,
subscribed_stream_ids: Set[int],
) -> Dict[int, List[int]]:
"""sub_dict maps stream_id => whether the user is subscribed to that stream."""
target_stream_dicts = []
for stream_dict in stream_dicts:
stream_id = stream_dict["id"]
is_subscribed = stream_id in subscribed_stream_ids
try:
validate_user_access_to_subscribers_helper(
user_profile,
stream_dict,
lambda user_profile: is_subscribed,
)
except JsonableError:
continue
target_stream_dicts.append(stream_dict)
recip_to_stream_id = {stream["recipient_id"]: stream["id"] for stream in target_stream_dicts}
recipient_ids = sorted(stream["recipient_id"] for stream in target_stream_dicts)
result: Dict[int, List[int]] = {stream["id"]: [] for stream in stream_dicts}
if not recipient_ids:
return result
"""
The raw SQL below leads to more than a 2x speedup when tested with
20k+ total subscribers. (For large realms with lots of default
streams, this function deals with LOTS of data, so it is important
to optimize.)
"""
query = SQL(
"""
SELECT
zerver_subscription.recipient_id,
zerver_subscription.user_profile_id
FROM
zerver_subscription
WHERE
zerver_subscription.recipient_id in %(recipient_ids)s AND
zerver_subscription.active AND
zerver_subscription.is_user_active
ORDER BY
zerver_subscription.recipient_id,
zerver_subscription.user_profile_id
"""
)
cursor = connection.cursor()
cursor.execute(query, {"recipient_ids": tuple(recipient_ids)})
rows = cursor.fetchall()
cursor.close()
"""
Using groupby/itemgetter here is important for performance, at scale.
It makes it so that all interpreter overhead is just O(N) in nature.
"""
for recip_id, recip_rows in itertools.groupby(rows, itemgetter(0)):
user_profile_ids = [r[1] for r in recip_rows]
stream_id = recip_to_stream_id[recip_id]
result[stream_id] = list(user_profile_ids)
return result
def get_subscribers_query(stream: Stream, requesting_user: Optional[UserProfile]) -> QuerySet:
# TODO: Make a generic stub for QuerySet
"""Build a query to get the subscribers list for a stream, raising a JsonableError if:
'realm' is optional in stream.
The caller can refine this query with select_related(), values(), etc. depending
on whether it wants objects or just certain fields
"""
validate_user_access_to_subscribers(requesting_user, stream)
return get_active_subscriptions_for_stream_id(stream.id, include_deactivated_users=False)
def get_subscriber_ids(stream: Stream, requesting_user: Optional[UserProfile] = None) -> List[str]:
subscriptions_query = get_subscribers_query(stream, requesting_user)
return subscriptions_query.values_list("user_profile_id", flat=True)
@dataclass
class StreamInfo:
email_address: str
stream_weekly_traffic: Optional[int]
subscribers: List[int]
def send_subscription_add_events(
realm: Realm,
sub_info_list: List[SubInfo],
subscriber_dict: Dict[int, Set[int]],
) -> None:
info_by_user: Dict[int, List[SubInfo]] = defaultdict(list)
for sub_info in sub_info_list:
info_by_user[sub_info.user.id].append(sub_info)
stream_ids = {sub_info.stream.id for sub_info in sub_info_list}
recent_traffic = get_streams_traffic(stream_ids=stream_ids)
# We generally only have a few streams, so we compute stream
# data in its own loop.
stream_info_dict: Dict[int, StreamInfo] = {}
for sub_info in sub_info_list:
stream = sub_info.stream
if stream.id not in stream_info_dict:
email_address = encode_email_address(stream, show_sender=True)
stream_weekly_traffic = get_average_weekly_stream_traffic(
stream.id, stream.date_created, recent_traffic
)
if stream.is_in_zephyr_realm and not stream.invite_only:
subscribers = []
else:
subscribers = list(subscriber_dict[stream.id])
stream_info_dict[stream.id] = StreamInfo(
email_address=email_address,
stream_weekly_traffic=stream_weekly_traffic,
subscribers=subscribers,
)
for user_id, sub_infos in info_by_user.items():
sub_dicts = []
for sub_info in sub_infos:
stream = sub_info.stream
stream_info = stream_info_dict[stream.id]
subscription = sub_info.sub
sub_dict = stream.to_dict()
for field_name in Subscription.API_FIELDS:
sub_dict[field_name] = getattr(subscription, field_name)
sub_dict["in_home_view"] = not subscription.is_muted
sub_dict["email_address"] = stream_info.email_address
sub_dict["stream_weekly_traffic"] = stream_info.stream_weekly_traffic
sub_dict["subscribers"] = stream_info.subscribers
sub_dicts.append(sub_dict)
# Send a notification to the user who subscribed.
event = dict(type="subscription", op="add", subscriptions=sub_dicts)
send_event(realm, event, [user_id])
SubT = Tuple[List[SubInfo], List[SubInfo]]
def bulk_add_subscriptions(
realm: Realm,
streams: Collection[Stream],
users: Iterable[UserProfile],
color_map: Mapping[str, str] = {},
from_user_creation: bool = False,
*,
acting_user: Optional[UserProfile],
) -> SubT:
users = list(users)
user_ids = [user.id for user in users]
# Sanity check out callers
for stream in streams:
assert stream.realm_id == realm.id
for user in users:
assert user.realm_id == realm.id
recipient_ids = [stream.recipient_id for stream in streams]
recipient_id_to_stream = {stream.recipient_id: stream for stream in streams}
recipient_color_map = {}
for stream in streams:
color: Optional[str] = color_map.get(stream.name, None)
if color is not None:
recipient_color_map[stream.recipient_id] = color
used_colors_for_user_ids: Dict[int, Set[str]] = get_used_colors_for_user_ids(user_ids)
existing_subs = Subscription.objects.filter(
user_profile_id__in=user_ids,
recipient__type=Recipient.STREAM,
recipient_id__in=recipient_ids,
)
subs_by_user: Dict[int, List[Subscription]] = defaultdict(list)
for sub in existing_subs:
subs_by_user[sub.user_profile_id].append(sub)
already_subscribed: List[SubInfo] = []
subs_to_activate: List[SubInfo] = []
subs_to_add: List[SubInfo] = []
for user_profile in users:
my_subs = subs_by_user[user_profile.id]
# Make a fresh set of all new recipient ids, and then we will
# remove any for which our user already has a subscription
# (and we'll re-activate any subscriptions as needed).
new_recipient_ids: Set[int] = {stream.recipient_id for stream in streams}
for sub in my_subs:
if sub.recipient_id in new_recipient_ids:
new_recipient_ids.remove(sub.recipient_id)
stream = recipient_id_to_stream[sub.recipient_id]
sub_info = SubInfo(user_profile, sub, stream)
if sub.active:
already_subscribed.append(sub_info)
else:
subs_to_activate.append(sub_info)
used_colors = used_colors_for_user_ids.get(user_profile.id, set())
user_color_map = pick_colors(used_colors, recipient_color_map, list(new_recipient_ids))
for recipient_id in new_recipient_ids:
stream = recipient_id_to_stream[recipient_id]
color = user_color_map[recipient_id]
sub = Subscription(
user_profile=user_profile,
is_user_active=user_profile.is_active,
active=True,
color=color,
recipient_id=recipient_id,
)
sub_info = SubInfo(user_profile, sub, stream)
subs_to_add.append(sub_info)
bulk_add_subs_to_db_with_logging(
realm=realm,
acting_user=acting_user,
subs_to_add=subs_to_add,
subs_to_activate=subs_to_activate,
)
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
for sub_info in subs_to_add + subs_to_activate:
altered_user_dict[sub_info.stream.id].add(sub_info.user.id)
stream_dict = {stream.id: stream for stream in streams}
new_streams = [stream_dict[stream_id] for stream_id in altered_user_dict]
subscriber_peer_info = bulk_get_subscriber_peer_info(
realm=realm,
streams=new_streams,
)
# We now send several types of events to notify browsers. The
# first batches of notifications are sent only to the user(s)
# being subscribed; we can skip these notifications when this is
# being called from the new user creation flow.
if not from_user_creation:
send_stream_creation_events_for_private_streams(
realm=realm,
stream_dict=stream_dict,
altered_user_dict=altered_user_dict,
)
send_subscription_add_events(
realm=realm,
sub_info_list=subs_to_add + subs_to_activate,
subscriber_dict=subscriber_peer_info.subscribed_ids,
)
send_peer_subscriber_events(
op="peer_add",
realm=realm,
altered_user_dict=altered_user_dict,
stream_dict=stream_dict,
private_peer_dict=subscriber_peer_info.private_peer_dict,
)
return (
subs_to_add + subs_to_activate,
already_subscribed,
)
# This function contains all the database changes as part of
# subscribing users to streams; we use a transaction to ensure that
# the RealmAuditLog entries are created atomically with the
# Subscription object creation (and updates).
@transaction.atomic(savepoint=False)
def bulk_add_subs_to_db_with_logging(
realm: Realm,
acting_user: Optional[UserProfile],
subs_to_add: List[SubInfo],
subs_to_activate: List[SubInfo],
) -> None:
Subscription.objects.bulk_create(info.sub for info in subs_to_add)
sub_ids = [info.sub.id for info in subs_to_activate]
Subscription.objects.filter(id__in=sub_ids).update(active=True)
# Log subscription activities in RealmAuditLog
event_time = timezone_now()
event_last_message_id = get_last_message_id()
all_subscription_logs: (List[RealmAuditLog]) = []
for sub_info in subs_to_add:
all_subscription_logs.append(
RealmAuditLog(
realm=realm,
acting_user=acting_user,
modified_user=sub_info.user,
modified_stream=sub_info.stream,
event_last_message_id=event_last_message_id,
event_type=RealmAuditLog.SUBSCRIPTION_CREATED,
event_time=event_time,
)
)
for sub_info in subs_to_activate:
all_subscription_logs.append(
RealmAuditLog(
realm=realm,
acting_user=acting_user,
modified_user=sub_info.user,
modified_stream=sub_info.stream,
event_last_message_id=event_last_message_id,
event_type=RealmAuditLog.SUBSCRIPTION_ACTIVATED,
event_time=event_time,
)
)
# Now since we have all log objects generated we can do a bulk insert
RealmAuditLog.objects.bulk_create(all_subscription_logs)
def send_stream_creation_events_for_private_streams(
realm: Realm,
stream_dict: Dict[int, Stream],
altered_user_dict: Dict[int, Set[int]],
) -> None:
for stream_id, stream_users_ids in altered_user_dict.items():
stream = stream_dict[stream_id]
if not stream.is_public():
# Users newly added to invite-only streams
# need a `create` notification. The former, because
# they need the stream to exist before
# they get the "subscribe" notification, and the latter so
# they can manage the new stream.
# Realm admins already have all created private streams.
realm_admin_ids = {user.id for user in realm.get_admin_users_and_bots()}
notify_user_ids = list(stream_users_ids - realm_admin_ids)
if notify_user_ids:
send_stream_creation_event(stream, notify_user_ids)
def send_peer_subscriber_events(
op: str,
realm: Realm,
stream_dict: Dict[int, Stream],
altered_user_dict: Dict[int, Set[int]],
private_peer_dict: Dict[int, Set[int]],
) -> None:
# Send peer_add/peer_remove events to other users who are tracking the
# subscribers lists of streams in their browser; everyone for
# public streams and only existing subscribers for private streams.
assert op in ["peer_add", "peer_remove"]
private_stream_ids = [
stream_id for stream_id in altered_user_dict if stream_dict[stream_id].invite_only
]
for stream_id in private_stream_ids:
altered_user_ids = altered_user_dict[stream_id]
peer_user_ids = private_peer_dict[stream_id] - altered_user_ids
if peer_user_ids and altered_user_ids:
event = dict(
type="subscription",
op=op,
stream_ids=[stream_id],
user_ids=sorted(list(altered_user_ids)),
)
send_event(realm, event, peer_user_ids)
public_stream_ids = [
stream_id
for stream_id in altered_user_dict
if not stream_dict[stream_id].invite_only and not stream_dict[stream_id].is_in_zephyr_realm
]
if public_stream_ids:
user_streams: Dict[int, Set[int]] = defaultdict(set)
public_peer_ids = set(active_non_guest_user_ids(realm.id))
for stream_id in public_stream_ids:
altered_user_ids = altered_user_dict[stream_id]
peer_user_ids = public_peer_ids - altered_user_ids
if peer_user_ids and altered_user_ids:
if len(altered_user_ids) == 1:
# If we only have one user, we will try to
# find other streams they have (un)subscribed to
# (where it's just them). This optimization
# typically works when a single user is subscribed
# to multiple default public streams during
# new-user registration.
#
# This optimization depends on all public streams
# having the same peers for any single user, which
# isn't the case for private streams.
altered_user_id = list(altered_user_ids)[0]
user_streams[altered_user_id].add(stream_id)
else:
event = dict(
type="subscription",
op=op,
stream_ids=[stream_id],
user_ids=sorted(list(altered_user_ids)),
)
send_event(realm, event, peer_user_ids)
for user_id, stream_ids in user_streams.items():
peer_user_ids = public_peer_ids - {user_id}
event = dict(
type="subscription",
op=op,
stream_ids=sorted(list(stream_ids)),
user_ids=[user_id],
)
send_event(realm, event, peer_user_ids)
def send_peer_remove_events(
realm: Realm,
streams: List[Stream],
altered_user_dict: Dict[int, Set[int]],
) -> None:
private_streams = [stream for stream in streams if stream.invite_only]
private_peer_dict = bulk_get_private_peers(
realm=realm,
private_streams=private_streams,
)
stream_dict = {stream.id: stream for stream in streams}
send_peer_subscriber_events(
op="peer_remove",
realm=realm,
stream_dict=stream_dict,
altered_user_dict=altered_user_dict,
private_peer_dict=private_peer_dict,
)
def get_available_notification_sounds() -> List[str]:
notification_sounds_path = static_path("audio/notification_sounds")
available_notification_sounds = []
for file_name in os.listdir(notification_sounds_path):
root, ext = os.path.splitext(file_name)
if "." in root: # nocoverage
# Exclude e.g. zulip.abcd1234.ogg (generated by production hash-naming)
# to avoid spurious duplicates.
continue
if ext == ".ogg":
available_notification_sounds.append(root)
return sorted(available_notification_sounds)
def notify_subscriptions_removed(
realm: Realm, user_profile: UserProfile, streams: Iterable[Stream]
) -> None:
payload = [dict(name=stream.name, stream_id=stream.id) for stream in streams]
event = dict(type="subscription", op="remove", subscriptions=payload)
send_event(realm, event, [user_profile.id])
SubAndRemovedT = Tuple[List[Tuple[UserProfile, Stream]], List[Tuple[UserProfile, Stream]]]
def bulk_remove_subscriptions(
realm: Realm,
users: Iterable[UserProfile],
streams: Iterable[Stream],
*,
acting_user: Optional[UserProfile],
) -> SubAndRemovedT:
users = list(users)
streams = list(streams)
# Sanity check our callers
for stream in streams:
assert stream.realm_id == realm.id
for user in users:
assert user.realm_id == realm.id
stream_dict = {stream.id: stream for stream in streams}
existing_subs_by_user = get_bulk_stream_subscriber_info(users, streams)
def get_non_subscribed_subs() -> List[Tuple[UserProfile, Stream]]:
stream_ids = {stream.id for stream in streams}
not_subscribed: List[Tuple[UserProfile, Stream]] = []
for user_profile in users:
user_sub_stream_info = existing_subs_by_user[user_profile.id]
subscribed_stream_ids = {sub_info.stream.id for sub_info in user_sub_stream_info}
not_subscribed_stream_ids = stream_ids - subscribed_stream_ids
for stream_id in not_subscribed_stream_ids:
stream = stream_dict[stream_id]
not_subscribed.append((user_profile, stream))
return not_subscribed
not_subscribed = get_non_subscribed_subs()
subs_to_deactivate: List[SubInfo] = []
sub_ids_to_deactivate: List[int] = []
# This loop just flattens out our data into big lists for
# bulk operations.
for sub_infos in existing_subs_by_user.values():
for sub_info in sub_infos:
subs_to_deactivate.append(sub_info)
sub_ids_to_deactivate.append(sub_info.sub.id)
# We do all the database changes in a transaction to ensure
# RealmAuditLog entries are atomically created when making changes.
with transaction.atomic():
occupied_streams_before = list(get_occupied_streams(realm))
Subscription.objects.filter(
id__in=sub_ids_to_deactivate,
).update(active=False)
occupied_streams_after = list(get_occupied_streams(realm))
# Log subscription activities in RealmAuditLog
event_time = timezone_now()
event_last_message_id = get_last_message_id()
all_subscription_logs = [
RealmAuditLog(
realm=sub_info.user.realm,
acting_user=acting_user,
modified_user=sub_info.user,
modified_stream=sub_info.stream,
event_last_message_id=event_last_message_id,
event_type=RealmAuditLog.SUBSCRIPTION_DEACTIVATED,
event_time=event_time,
)
for sub_info in subs_to_deactivate
]
# Now since we have all log objects generated we can do a bulk insert
RealmAuditLog.objects.bulk_create(all_subscription_logs)
altered_user_dict: Dict[int, Set[int]] = defaultdict(set)
streams_by_user: Dict[int, List[Stream]] = defaultdict(list)
for sub_info in subs_to_deactivate:
stream = sub_info.stream
streams_by_user[sub_info.user.id].append(stream)
altered_user_dict[stream.id].add(sub_info.user.id)
for user_profile in users:
if len(streams_by_user[user_profile.id]) == 0:
continue
notify_subscriptions_removed(realm, user_profile, streams_by_user[user_profile.id])
event = {
"type": "mark_stream_messages_as_read",
"user_profile_id": user_profile.id,
"stream_recipient_ids": [stream.recipient_id for stream in streams],
}
queue_json_publish("deferred_work", event)
send_peer_remove_events(
realm=realm,
streams=streams,
altered_user_dict=altered_user_dict,
)
new_vacant_streams = set(occupied_streams_before) - set(occupied_streams_after)
new_vacant_private_streams = [stream for stream in new_vacant_streams if stream.invite_only]
if new_vacant_private_streams:
# Deactivate any newly-vacant private streams
for stream in new_vacant_private_streams:
do_deactivate_stream(stream, acting_user=acting_user)
return (
[(sub_info.user, sub_info.stream) for sub_info in subs_to_deactivate],
not_subscribed,
)
def do_change_subscription_property(
user_profile: UserProfile,
sub: Subscription,
stream: Stream,
property_name: str,
value: Any,
*,
acting_user: Optional[UserProfile],
) -> None:
database_property_name = property_name
event_property_name = property_name
database_value = value
event_value = value
# For this property, is_muted is used in the database, but
# in_home_view in the API, since we haven't migrated the events
# API to the new name yet.
if property_name == "in_home_view":
database_property_name = "is_muted"
database_value = not value
if property_name == "is_muted":
event_property_name = "in_home_view"
event_value = not value
old_value = getattr(sub, database_property_name)
setattr(sub, database_property_name, database_value)
sub.save(update_fields=[database_property_name])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.SUBSCRIPTION_PROPERTY_CHANGED,
event_time=event_time,
modified_user=user_profile,
acting_user=acting_user,
modified_stream=stream,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: database_value,
"property": database_property_name,
}
).decode(),
)
event = dict(
type="subscription",
op="update",
property=event_property_name,
value=event_value,
stream_id=stream.id,
)
send_event(user_profile.realm, event, [user_profile.id])
def do_change_password(user_profile: UserProfile, password: str, commit: bool = True) -> None:
user_profile.set_password(password)
if commit:
user_profile.save(update_fields=["password"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_PASSWORD_CHANGED,
event_time=event_time,
)
def do_change_full_name(
user_profile: UserProfile, full_name: str, acting_user: Optional[UserProfile]
) -> None:
old_name = user_profile.full_name
user_profile.full_name = full_name
user_profile.save(update_fields=["full_name"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_FULL_NAME_CHANGED,
event_time=event_time,
extra_data=old_name,
)
payload = dict(user_id=user_profile.id, full_name=user_profile.full_name)
send_event(
user_profile.realm,
dict(type="realm_user", op="update", person=payload),
active_user_ids(user_profile.realm_id),
)
if user_profile.is_bot:
send_event(
user_profile.realm,
dict(type="realm_bot", op="update", bot=payload),
bot_owner_user_ids(user_profile),
)
def check_change_full_name(
user_profile: UserProfile, full_name_raw: str, acting_user: Optional[UserProfile]
) -> str:
"""Verifies that the user's proposed full name is valid. The caller
is responsible for checking check permissions. Returns the new
full name, which may differ from what was passed in (because this
function strips whitespace)."""
new_full_name = check_full_name(full_name_raw)
do_change_full_name(user_profile, new_full_name, acting_user)
return new_full_name
def check_change_bot_full_name(
user_profile: UserProfile, full_name_raw: str, acting_user: UserProfile
) -> None:
new_full_name = check_full_name(full_name_raw)
if new_full_name == user_profile.full_name:
# Our web app will try to patch full_name even if the user didn't
# modify the name in the form. We just silently ignore those
# situations.
return
check_bot_name_available(
realm_id=user_profile.realm_id,
full_name=new_full_name,
)
do_change_full_name(user_profile, new_full_name, acting_user)
@transaction.atomic(durable=True)
def do_change_bot_owner(
user_profile: UserProfile, bot_owner: UserProfile, acting_user: UserProfile
) -> None:
previous_owner = user_profile.bot_owner
user_profile.bot_owner = bot_owner
user_profile.save() # Can't use update_fields because of how the foreign key works.
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_BOT_OWNER_CHANGED,
event_time=event_time,
)
update_users = bot_owner_user_ids(user_profile)
# For admins, update event is sent instead of delete/add
# event. bot_data of admin contains all the
# bots and none of them should be removed/(added again).
# Delete the bot from previous owner's bot data.
if previous_owner and not previous_owner.is_realm_admin:
delete_event = dict(
type="realm_bot",
op="delete",
bot=dict(
user_id=user_profile.id,
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
delete_event,
{previous_owner.id},
)
)
# Do not send update event for previous bot owner.
update_users = update_users - {previous_owner.id}
# Notify the new owner that the bot has been added.
if not bot_owner.is_realm_admin:
add_event = created_bot_event(user_profile)
transaction.on_commit(lambda: send_event(user_profile.realm, add_event, {bot_owner.id}))
# Do not send update event for bot_owner.
update_users = update_users - {bot_owner.id}
bot_event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
owner_id=user_profile.bot_owner.id,
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
bot_event,
update_users,
)
)
# Since `bot_owner_id` is included in the user profile dict we need
# to update the users dict with the new bot owner id
event = dict(
type="realm_user",
op="update",
person=dict(
user_id=user_profile.id,
bot_owner_id=user_profile.bot_owner.id,
),
)
transaction.on_commit(
lambda: send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
)
@transaction.atomic(durable=True)
def do_change_tos_version(user_profile: UserProfile, tos_version: str) -> None:
user_profile.tos_version = tos_version
user_profile.save(update_fields=["tos_version"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_TERMS_OF_SERVICE_VERSION_CHANGED,
event_time=event_time,
)
def do_regenerate_api_key(user_profile: UserProfile, acting_user: UserProfile) -> str:
old_api_key = user_profile.api_key
new_api_key = generate_api_key()
user_profile.api_key = new_api_key
user_profile.save(update_fields=["api_key"])
# We need to explicitly delete the old API key from our caches,
# because the on-save handler for flushing the UserProfile object
# in zerver/lib/cache.py only has access to the new API key.
cache_delete(user_profile_by_api_key_cache_key(old_api_key))
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=acting_user,
modified_user=user_profile,
event_type=RealmAuditLog.USER_API_KEY_CHANGED,
event_time=event_time,
)
if user_profile.is_bot:
send_event(
user_profile.realm,
dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
api_key=new_api_key,
),
),
bot_owner_user_ids(user_profile),
)
event = {"type": "clear_push_device_tokens", "user_profile_id": user_profile.id}
queue_json_publish("deferred_work", event)
return new_api_key
def notify_avatar_url_change(user_profile: UserProfile) -> None:
if user_profile.is_bot:
bot_event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
avatar_url=avatar_url(user_profile),
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
bot_event,
bot_owner_user_ids(user_profile),
)
)
payload = dict(
avatar_source=user_profile.avatar_source,
avatar_url=avatar_url(user_profile),
avatar_url_medium=avatar_url(user_profile, medium=True),
avatar_version=user_profile.avatar_version,
# Even clients using client_gravatar don't need the email,
# since we're sending the URL anyway.
user_id=user_profile.id,
)
event = dict(type="realm_user", op="update", person=payload)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
event,
active_user_ids(user_profile.realm_id),
)
)
@transaction.atomic(savepoint=False)
def do_change_avatar_fields(
user_profile: UserProfile,
avatar_source: str,
skip_notify: bool = False,
*,
acting_user: Optional[UserProfile],
) -> None:
user_profile.avatar_source = avatar_source
user_profile.avatar_version += 1
user_profile.save(update_fields=["avatar_source", "avatar_version"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
event_type=RealmAuditLog.USER_AVATAR_SOURCE_CHANGED,
extra_data={"avatar_source": avatar_source},
event_time=event_time,
acting_user=acting_user,
)
if not skip_notify:
notify_avatar_url_change(user_profile)
def do_delete_avatar_image(user: UserProfile, *, acting_user: Optional[UserProfile]) -> None:
do_change_avatar_fields(user, UserProfile.AVATAR_FROM_GRAVATAR, acting_user=acting_user)
delete_avatar_image(user)
@transaction.atomic(durable=True)
def do_change_icon_source(
realm: Realm, icon_source: str, *, acting_user: Optional[UserProfile]
) -> None:
realm.icon_source = icon_source
realm.icon_version += 1
realm.save(update_fields=["icon_source", "icon_version"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=realm,
event_type=RealmAuditLog.REALM_ICON_SOURCE_CHANGED,
extra_data={"icon_source": icon_source, "icon_version": realm.icon_version},
event_time=event_time,
acting_user=acting_user,
)
event = dict(
type="realm",
op="update_dict",
property="icon",
data=dict(icon_source=realm.icon_source, icon_url=realm_icon_url(realm)),
)
transaction.on_commit(
lambda: send_event(
realm,
event,
active_user_ids(realm.id),
)
)
@transaction.atomic(durable=True)
def do_change_logo_source(
realm: Realm, logo_source: str, night: bool, *, acting_user: Optional[UserProfile]
) -> None:
if not night:
realm.logo_source = logo_source
realm.logo_version += 1
realm.save(update_fields=["logo_source", "logo_version"])
else:
realm.night_logo_source = logo_source
realm.night_logo_version += 1
realm.save(update_fields=["night_logo_source", "night_logo_version"])
RealmAuditLog.objects.create(
event_type=RealmAuditLog.REALM_LOGO_CHANGED,
realm=realm,
event_time=timezone_now(),
acting_user=acting_user,
)
event = dict(
type="realm",
op="update_dict",
property="night_logo" if night else "logo",
data=get_realm_logo_data(realm, night),
)
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
@transaction.atomic(durable=True)
def do_change_realm_org_type(
realm: Realm,
org_type: int,
acting_user: Optional[UserProfile],
) -> None:
old_value = realm.org_type
realm.org_type = org_type
realm.save(update_fields=["org_type"])
RealmAuditLog.objects.create(
event_type=RealmAuditLog.REALM_ORG_TYPE_CHANGED,
realm=realm,
event_time=timezone_now(),
acting_user=acting_user,
extra_data={"old_value": old_value, "new_value": org_type},
)
@transaction.atomic(savepoint=False)
def do_change_realm_plan_type(
realm: Realm, plan_type: int, *, acting_user: Optional[UserProfile]
) -> None:
old_value = realm.plan_type
realm.plan_type = plan_type
realm.save(update_fields=["plan_type"])
RealmAuditLog.objects.create(
event_type=RealmAuditLog.REALM_PLAN_TYPE_CHANGED,
realm=realm,
event_time=timezone_now(),
acting_user=acting_user,
extra_data={"old_value": old_value, "new_value": plan_type},
)
if plan_type == Realm.PLAN_TYPE_PLUS:
realm.max_invites = Realm.INVITES_STANDARD_REALM_DAILY_MAX
realm.message_visibility_limit = None
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_STANDARD
elif plan_type == Realm.PLAN_TYPE_STANDARD:
realm.max_invites = Realm.INVITES_STANDARD_REALM_DAILY_MAX
realm.message_visibility_limit = None
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_STANDARD
elif plan_type == Realm.PLAN_TYPE_SELF_HOSTED:
realm.max_invites = None # type: ignore[assignment] # Apparent mypy bug with Optional[int] setter.
realm.message_visibility_limit = None
realm.upload_quota_gb = None
elif plan_type == Realm.PLAN_TYPE_STANDARD_FREE:
realm.max_invites = Realm.INVITES_STANDARD_REALM_DAILY_MAX
realm.message_visibility_limit = None
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_STANDARD
elif plan_type == Realm.PLAN_TYPE_LIMITED:
realm.max_invites = settings.INVITES_DEFAULT_REALM_DAILY_MAX
realm.message_visibility_limit = Realm.MESSAGE_VISIBILITY_LIMITED
realm.upload_quota_gb = Realm.UPLOAD_QUOTA_LIMITED
else:
raise AssertionError("Invalid plan type")
update_first_visible_message_id(realm)
realm.save(update_fields=["_max_invites", "message_visibility_limit", "upload_quota_gb"])
event = {
"type": "realm",
"op": "update",
"property": "plan_type",
"value": plan_type,
"extra_data": {"upload_quota": realm.upload_quota_bytes()},
}
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
@transaction.atomic(durable=True)
def do_change_default_sending_stream(
user_profile: UserProfile, stream: Optional[Stream], *, acting_user: Optional[UserProfile]
) -> None:
old_value = user_profile.default_sending_stream_id
user_profile.default_sending_stream = stream
user_profile.save(update_fields=["default_sending_stream"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.USER_DEFAULT_SENDING_STREAM_CHANGED,
event_time=event_time,
modified_user=user_profile,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: None if stream is None else stream.id,
}
).decode(),
)
if user_profile.is_bot:
if stream:
stream_name: Optional[str] = stream.name
else:
stream_name = None
event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
default_sending_stream=stream_name,
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
event,
bot_owner_user_ids(user_profile),
)
)
@transaction.atomic(durable=True)
def do_change_default_events_register_stream(
user_profile: UserProfile, stream: Optional[Stream], *, acting_user: Optional[UserProfile]
) -> None:
old_value = user_profile.default_events_register_stream_id
user_profile.default_events_register_stream = stream
user_profile.save(update_fields=["default_events_register_stream"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.USER_DEFAULT_REGISTER_STREAM_CHANGED,
event_time=event_time,
modified_user=user_profile,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: None if stream is None else stream.id,
}
).decode(),
)
if user_profile.is_bot:
if stream:
stream_name: Optional[str] = stream.name
else:
stream_name = None
event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
default_events_register_stream=stream_name,
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
event,
bot_owner_user_ids(user_profile),
)
)
@transaction.atomic(durable=True)
def do_change_default_all_public_streams(
user_profile: UserProfile, value: bool, *, acting_user: Optional[UserProfile]
) -> None:
old_value = user_profile.default_all_public_streams
user_profile.default_all_public_streams = value
user_profile.save(update_fields=["default_all_public_streams"])
event_time = timezone_now()
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.USER_DEFAULT_ALL_PUBLIC_STREAMS_CHANGED,
event_time=event_time,
modified_user=user_profile,
acting_user=acting_user,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: value,
}
).decode(),
)
if user_profile.is_bot:
event = dict(
type="realm_bot",
op="update",
bot=dict(
user_id=user_profile.id,
default_all_public_streams=user_profile.default_all_public_streams,
),
)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
event,
bot_owner_user_ids(user_profile),
)
)
@transaction.atomic(durable=True)
def do_change_user_role(
user_profile: UserProfile, value: int, *, acting_user: Optional[UserProfile]
) -> None:
old_value = user_profile.role
old_system_group = get_system_user_group_for_user(user_profile)
user_profile.role = value
user_profile.save(update_fields=["role"])
RealmAuditLog.objects.create(
realm=user_profile.realm,
modified_user=user_profile,
acting_user=acting_user,
event_type=RealmAuditLog.USER_ROLE_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: value,
RealmAuditLog.ROLE_COUNT: realm_user_count_by_role(user_profile.realm),
}
).decode(),
)
event = dict(
type="realm_user", op="update", person=dict(user_id=user_profile.id, role=user_profile.role)
)
transaction.on_commit(
lambda: send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
)
UserGroupMembership.objects.filter(
user_profile=user_profile, user_group=old_system_group
).delete()
system_group = get_system_user_group_for_user(user_profile)
UserGroupMembership.objects.create(user_profile=user_profile, user_group=system_group)
do_send_user_group_members_update_event("remove_members", old_system_group, [user_profile.id])
do_send_user_group_members_update_event("add_members", system_group, [user_profile.id])
if UserProfile.ROLE_MEMBER in [old_value, value]:
update_users_in_full_members_system_group(user_profile.realm, [user_profile.id])
def do_make_user_billing_admin(user_profile: UserProfile) -> None:
user_profile.is_billing_admin = True
user_profile.save(update_fields=["is_billing_admin"])
event = dict(
type="realm_user", op="update", person=dict(user_id=user_profile.id, is_billing_admin=True)
)
send_event(user_profile.realm, event, active_user_ids(user_profile.realm_id))
def do_change_can_forge_sender(user_profile: UserProfile, value: bool) -> None:
user_profile.can_forge_sender = value
user_profile.save(update_fields=["can_forge_sender"])
def do_change_can_create_users(user_profile: UserProfile, value: bool) -> None:
user_profile.can_create_users = value
user_profile.save(update_fields=["can_create_users"])
def send_change_stream_permission_notification(
stream: Stream,
*,
old_policy_name: str,
new_policy_name: str,
acting_user: UserProfile,
) -> None:
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
user_mention = silent_mention_syntax_for_user(acting_user)
with override_language(stream.realm.default_language):
notification_string = _(
"{user} changed the [access permissions](/help/stream-permissions) "
"for this stream from **{old_policy}** to **{new_policy}**."
)
notification_string = notification_string.format(
user=user_mention,
old_policy=old_policy_name,
new_policy=new_policy_name,
)
internal_send_stream_message(
sender, stream, Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, notification_string
)
def do_change_stream_permission(
stream: Stream,
*,
invite_only: Optional[bool] = None,
history_public_to_subscribers: Optional[bool] = None,
is_web_public: Optional[bool] = None,
acting_user: UserProfile,
) -> None:
old_invite_only_value = stream.invite_only
old_history_public_to_subscribers_value = stream.history_public_to_subscribers
old_is_web_public_value = stream.is_web_public
# A note on these assertions: It's possible we'd be better off
# making all callers of this function pass the full set of
# parameters, rather than having default values. Doing so would
# allow us to remove the messy logic below, where we sometimes
# ignore the passed parameters.
#
# But absent such a refactoring, it's important to assert that
# we're not requesting an unsupported configurations.
if is_web_public:
assert history_public_to_subscribers is not False
assert invite_only is not True
stream.is_web_public = True
stream.invite_only = False
stream.history_public_to_subscribers = True
else:
assert invite_only is not None
# is_web_public is falsey
history_public_to_subscribers = get_default_value_for_history_public_to_subscribers(
stream.realm,
invite_only,
history_public_to_subscribers,
)
stream.invite_only = invite_only
stream.history_public_to_subscribers = history_public_to_subscribers
stream.is_web_public = False
with transaction.atomic():
stream.save(update_fields=["invite_only", "history_public_to_subscribers", "is_web_public"])
event_time = timezone_now()
if old_invite_only_value != stream.invite_only:
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_invite_only_value,
RealmAuditLog.NEW_VALUE: stream.invite_only,
"property": "invite_only",
}
).decode(),
)
if old_history_public_to_subscribers_value != stream.history_public_to_subscribers:
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_history_public_to_subscribers_value,
RealmAuditLog.NEW_VALUE: stream.history_public_to_subscribers,
"property": "history_public_to_subscribers",
}
).decode(),
)
if old_is_web_public_value != stream.is_web_public:
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=event_time,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_is_web_public_value,
RealmAuditLog.NEW_VALUE: stream.is_web_public,
"property": "is_web_public",
}
).decode(),
)
event = dict(
op="update",
type="stream",
property="invite_only",
value=stream.invite_only,
history_public_to_subscribers=stream.history_public_to_subscribers,
is_web_public=stream.is_web_public,
stream_id=stream.id,
name=stream.name,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
old_policy_name = get_stream_permission_policy_name(
invite_only=old_invite_only_value,
history_public_to_subscribers=old_history_public_to_subscribers_value,
is_web_public=old_is_web_public_value,
)
new_policy_name = get_stream_permission_policy_name(
invite_only=stream.invite_only,
history_public_to_subscribers=stream.history_public_to_subscribers,
is_web_public=stream.is_web_public,
)
send_change_stream_permission_notification(
stream,
old_policy_name=old_policy_name,
new_policy_name=new_policy_name,
acting_user=acting_user,
)
def send_change_stream_post_policy_notification(
stream: Stream, *, old_post_policy: int, new_post_policy: int, acting_user: UserProfile
) -> None:
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
user_mention = silent_mention_syntax_for_user(acting_user)
with override_language(stream.realm.default_language):
notification_string = _(
"{user} changed the [posting permissions](/help/stream-sending-policy) "
"for this stream:\n\n"
"* **Old permissions**: {old_policy}.\n"
"* **New permissions**: {new_policy}.\n"
)
notification_string = notification_string.format(
user=user_mention,
old_policy=Stream.POST_POLICIES[old_post_policy],
new_policy=Stream.POST_POLICIES[new_post_policy],
)
internal_send_stream_message(
sender, stream, Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, notification_string
)
def do_change_stream_post_policy(
stream: Stream, stream_post_policy: int, *, acting_user: UserProfile
) -> None:
old_post_policy = stream.stream_post_policy
with transaction.atomic():
stream.stream_post_policy = stream_post_policy
stream.save(update_fields=["stream_post_policy"])
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_post_policy,
RealmAuditLog.NEW_VALUE: stream_post_policy,
"property": "stream_post_policy",
}
).decode(),
)
event = dict(
op="update",
type="stream",
property="stream_post_policy",
value=stream_post_policy,
stream_id=stream.id,
name=stream.name,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
# Backwards-compatibility code: We removed the
# is_announcement_only property in early 2020, but we send a
# duplicate event for legacy mobile clients that might want the
# data.
event = dict(
op="update",
type="stream",
property="is_announcement_only",
value=stream.stream_post_policy == Stream.STREAM_POST_POLICY_ADMINS,
stream_id=stream.id,
name=stream.name,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
send_change_stream_post_policy_notification(
stream,
old_post_policy=old_post_policy,
new_post_policy=stream_post_policy,
acting_user=acting_user,
)
def do_rename_stream(stream: Stream, new_name: str, user_profile: UserProfile) -> Dict[str, str]:
old_name = stream.name
stream.name = new_name
stream.save(update_fields=["name"])
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=user_profile,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_NAME_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_name,
RealmAuditLog.NEW_VALUE: new_name,
}
).decode(),
)
recipient_id = stream.recipient_id
messages = Message.objects.filter(recipient_id=recipient_id).only("id")
# Update the display recipient and stream, which are easy single
# items to set.
old_cache_key = get_stream_cache_key(old_name, stream.realm_id)
new_cache_key = get_stream_cache_key(stream.name, stream.realm_id)
if old_cache_key != new_cache_key:
cache_delete(old_cache_key)
cache_set(new_cache_key, stream)
cache_set(display_recipient_cache_key(recipient_id), stream.name)
# Delete cache entries for everything else, which is cheaper and
# clearer than trying to set them. display_recipient is the out of
# date field in all cases.
cache_delete_many(to_dict_cache_key_id(message.id) for message in messages)
new_email = encode_email_address(stream, show_sender=True)
# We will tell our users to essentially
# update stream.name = new_name where name = old_name
# and update stream.email = new_email where name = old_name.
# We could optimize this by trying to send one message, but the
# client code really wants one property update at a time, and
# updating stream names is a pretty infrequent operation.
# More importantly, we want to key these updates by id, not name,
# since id is the immutable primary key, and obviously name is not.
data_updates = [
["email_address", new_email],
["name", new_name],
]
for property, value in data_updates:
event = dict(
op="update",
type="stream",
property=property,
value=value,
stream_id=stream.id,
name=old_name,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
sender = get_system_bot(settings.NOTIFICATION_BOT, stream.realm_id)
with override_language(stream.realm.default_language):
internal_send_stream_message(
sender,
stream,
Realm.STREAM_EVENTS_NOTIFICATION_TOPIC,
_("{user_name} renamed stream {old_stream_name} to {new_stream_name}.").format(
user_name=silent_mention_syntax_for_user(user_profile),
old_stream_name=f"**{old_name}**",
new_stream_name=f"**{new_name}**",
),
)
# Even though the token doesn't change, the web client needs to update the
# email forwarding address to display the correctly-escaped new name.
return {"email_address": new_email}
def send_change_stream_description_notification(
stream: Stream, *, old_description: str, new_description: str, acting_user: UserProfile
) -> None:
sender = get_system_bot(settings.NOTIFICATION_BOT, acting_user.realm_id)
user_mention = silent_mention_syntax_for_user(acting_user)
with override_language(stream.realm.default_language):
notification_string = (
_("{user} changed the description for this stream.").format(user=user_mention)
+ "\n\n* **"
+ _("Old description")
+ ":**"
+ f"\n```` quote\n{old_description}\n````\n"
+ "* **"
+ _("New description")
+ ":**"
+ f"\n```` quote\n{new_description}\n````"
)
internal_send_stream_message(
sender, stream, Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, notification_string
)
def do_change_stream_description(
stream: Stream, new_description: str, *, acting_user: UserProfile
) -> None:
old_description = stream.description
with transaction.atomic():
stream.description = new_description
stream.rendered_description = render_stream_description(new_description)
stream.save(update_fields=["description", "rendered_description"])
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_PROPERTY_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_description,
RealmAuditLog.NEW_VALUE: new_description,
"property": "description",
}
).decode(),
)
event = dict(
type="stream",
op="update",
property="description",
name=stream.name,
stream_id=stream.id,
value=new_description,
rendered_description=stream.rendered_description,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
send_change_stream_description_notification(
stream,
old_description=old_description,
new_description=new_description,
acting_user=acting_user,
)
def send_change_stream_message_retention_days_notification(
user_profile: UserProfile, stream: Stream, old_value: Optional[int], new_value: Optional[int]
) -> None:
sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id)
user_mention = silent_mention_syntax_for_user(user_profile)
# If switching from or to the organization's default retention policy,
# we want to take the realm's default into account.
if old_value is None:
old_value = stream.realm.message_retention_days
if new_value is None:
new_value = stream.realm.message_retention_days
with override_language(stream.realm.default_language):
if old_value == Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP["unlimited"]:
old_retention_period = _("Forever")
new_retention_period = f"{new_value} days"
summary_line = f"Messages in this stream will now be automatically deleted {new_value} days after they are sent."
elif new_value == Stream.MESSAGE_RETENTION_SPECIAL_VALUES_MAP["unlimited"]:
old_retention_period = f"{old_value} days"
new_retention_period = _("Forever")
summary_line = _("Messages in this stream will now be retained forever.")
else:
old_retention_period = f"{old_value} days"
new_retention_period = f"{new_value} days"
summary_line = f"Messages in this stream will now be automatically deleted {new_value} days after they are sent."
notification_string = _(
"{user} has changed the [message retention period](/help/message-retention-policy) for this stream:\n"
"* **Old retention period**: {old_retention_period}\n"
"* **New retention period**: {new_retention_period}\n\n"
"{summary_line}"
)
notification_string = notification_string.format(
user=user_mention,
old_retention_period=old_retention_period,
new_retention_period=new_retention_period,
summary_line=summary_line,
)
internal_send_stream_message(
sender, stream, Realm.STREAM_EVENTS_NOTIFICATION_TOPIC, notification_string
)
def do_change_stream_message_retention_days(
stream: Stream, acting_user: UserProfile, message_retention_days: Optional[int] = None
) -> None:
old_message_retention_days_value = stream.message_retention_days
with transaction.atomic():
stream.message_retention_days = message_retention_days
stream.save(update_fields=["message_retention_days"])
RealmAuditLog.objects.create(
realm=stream.realm,
acting_user=acting_user,
modified_stream=stream,
event_type=RealmAuditLog.STREAM_MESSAGE_RETENTION_DAYS_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_message_retention_days_value,
RealmAuditLog.NEW_VALUE: message_retention_days,
}
).decode(),
)
event = dict(
op="update",
type="stream",
property="message_retention_days",
value=message_retention_days,
stream_id=stream.id,
name=stream.name,
)
send_event(stream.realm, event, can_access_stream_user_ids(stream))
send_change_stream_message_retention_days_notification(
user_profile=acting_user,
stream=stream,
old_value=old_message_retention_days_value,
new_value=message_retention_days,
)
def set_realm_permissions_based_on_org_type(realm: Realm) -> None:
"""This function implements overrides for the default configuration
for new organizations when the administrator selected specific
organization types.
This substantially simplifies our /help/ advice for folks setting
up new organizations of these types.
"""
# Custom configuration for educational organizations. The present
# defaults are designed for a single class, not a department or
# larger institution, since those are more common.
if (
realm.org_type == Realm.ORG_TYPES["education_nonprofit"]["id"]
or realm.org_type == Realm.ORG_TYPES["education"]["id"]
):
# Limit email address visibility and user creation to administrators.
realm.email_address_visibility = Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS
realm.invite_to_realm_policy = Realm.POLICY_ADMINS_ONLY
# Restrict public stream creation to staff, but allow private
# streams (useful for study groups, etc.).
realm.create_public_stream_policy = Realm.POLICY_ADMINS_ONLY
# Don't allow members (students) to manage user groups or
# stream subscriptions.
realm.user_group_edit_policy = Realm.POLICY_MODERATORS_ONLY
realm.invite_to_stream_policy = Realm.POLICY_MODERATORS_ONLY
# Allow moderators (TAs?) to move topics between streams.
realm.move_messages_between_streams_policy = Realm.POLICY_MODERATORS_ONLY
def setup_realm_internal_bots(realm: Realm) -> None:
"""Create this realm's internal bots.
This function is idempotent; it does nothing for a bot that
already exists.
"""
internal_bots = [
(bot["name"], bot["email_template"] % (settings.INTERNAL_BOT_DOMAIN,))
for bot in settings.REALM_INTERNAL_BOTS
]
create_users(realm, internal_bots, bot_type=UserProfile.DEFAULT_BOT)
bots = UserProfile.objects.filter(
realm=realm,
email__in=[bot_info[1] for bot_info in internal_bots],
bot_owner__isnull=True,
)
for bot in bots:
bot.bot_owner = bot
bot.save()
def do_create_realm(
string_id: str,
name: str,
*,
emails_restricted_to_domains: Optional[bool] = None,
email_address_visibility: Optional[int] = None,
description: Optional[str] = None,
invite_required: Optional[bool] = None,
plan_type: Optional[int] = None,
org_type: Optional[int] = None,
date_created: Optional[datetime.datetime] = None,
is_demo_organization: Optional[bool] = False,
enable_spectator_access: Optional[bool] = False,
) -> Realm:
if string_id == settings.SOCIAL_AUTH_SUBDOMAIN:
raise AssertionError("Creating a realm on SOCIAL_AUTH_SUBDOMAIN is not allowed!")
if Realm.objects.filter(string_id=string_id).exists():
raise AssertionError(f"Realm {string_id} already exists!")
if not server_initialized():
logging.info("Server not yet initialized. Creating the internal realm first.")
create_internal_realm()
kwargs: Dict[str, Any] = {}
if emails_restricted_to_domains is not None:
kwargs["emails_restricted_to_domains"] = emails_restricted_to_domains
if email_address_visibility is not None:
kwargs["email_address_visibility"] = email_address_visibility
if description is not None:
kwargs["description"] = description
if invite_required is not None:
kwargs["invite_required"] = invite_required
if plan_type is not None:
kwargs["plan_type"] = plan_type
if org_type is not None:
kwargs["org_type"] = org_type
if enable_spectator_access is not None:
kwargs["enable_spectator_access"] = enable_spectator_access
if date_created is not None:
# The date_created parameter is intended only for use by test
# suites that want to backdate the date of a realm's creation.
assert not settings.PRODUCTION
kwargs["date_created"] = date_created
with transaction.atomic():
realm = Realm(string_id=string_id, name=name, **kwargs)
if is_demo_organization:
realm.demo_organization_scheduled_deletion_date = (
realm.date_created + datetime.timedelta(days=settings.DEMO_ORG_DEADLINE_DAYS)
)
set_realm_permissions_based_on_org_type(realm)
realm.save()
RealmAuditLog.objects.create(
realm=realm, event_type=RealmAuditLog.REALM_CREATED, event_time=realm.date_created
)
RealmUserDefault.objects.create(realm=realm)
create_system_user_groups_for_realm(realm)
# Create stream once Realm object has been saved
notifications_stream = ensure_stream(
realm,
Realm.DEFAULT_NOTIFICATION_STREAM_NAME,
stream_description="Everyone is added to this stream by default. Welcome! :octopus:",
acting_user=None,
)
realm.notifications_stream = notifications_stream
# With the current initial streams situation, the only public
# stream is the notifications_stream.
DefaultStream.objects.create(stream=notifications_stream, realm=realm)
signup_notifications_stream = ensure_stream(
realm,
Realm.INITIAL_PRIVATE_STREAM_NAME,
invite_only=True,
stream_description="A private stream for core team members.",
acting_user=None,
)
realm.signup_notifications_stream = signup_notifications_stream
realm.save(update_fields=["notifications_stream", "signup_notifications_stream"])
if plan_type is None and settings.BILLING_ENABLED:
do_change_realm_plan_type(realm, Realm.PLAN_TYPE_LIMITED, acting_user=None)
admin_realm = get_realm(settings.SYSTEM_BOT_REALM)
sender = get_system_bot(settings.NOTIFICATION_BOT, admin_realm.id)
# Send a notification to the admin realm
signup_message = _("Signups enabled")
try:
signups_stream = get_signups_stream(admin_realm)
topic = realm.display_subdomain
internal_send_stream_message(
sender,
signups_stream,
topic,
signup_message,
)
except Stream.DoesNotExist: # nocoverage
# If the signups stream hasn't been created in the admin
# realm, don't auto-create it to send to it; just do nothing.
pass
setup_realm_internal_bots(realm)
return realm
def update_scheduled_email_notifications_time(
user_profile: UserProfile, old_batching_period: int, new_batching_period: int
) -> None:
existing_scheduled_emails = ScheduledMessageNotificationEmail.objects.filter(
user_profile=user_profile
)
scheduled_timestamp_change = datetime.timedelta(
seconds=new_batching_period
) - datetime.timedelta(seconds=old_batching_period)
existing_scheduled_emails.update(
scheduled_timestamp=F("scheduled_timestamp") + scheduled_timestamp_change
)
@transaction.atomic(durable=True)
def do_change_user_setting(
user_profile: UserProfile,
setting_name: str,
setting_value: Union[bool, str, int],
*,
acting_user: Optional[UserProfile],
) -> None:
old_value = getattr(user_profile, setting_name)
event_time = timezone_now()
if setting_name == "timezone":
assert isinstance(setting_value, str)
setting_value = canonicalize_timezone(setting_value)
else:
property_type = UserProfile.property_types[setting_name]
assert isinstance(setting_value, property_type)
setattr(user_profile, setting_name, setting_value)
# TODO: Move these database actions into a transaction.atomic block.
user_profile.save(update_fields=[setting_name])
if setting_name in UserProfile.notification_setting_types:
# Prior to all personal settings being managed by property_types,
# these were only created for notification settings.
#
# TODO: Start creating these for all settings, and do a
# backfilled=True migration.
RealmAuditLog.objects.create(
realm=user_profile.realm,
event_type=RealmAuditLog.USER_SETTING_CHANGED,
event_time=event_time,
acting_user=acting_user,
modified_user=user_profile,
extra_data=orjson.dumps(
{
RealmAuditLog.OLD_VALUE: old_value,
RealmAuditLog.NEW_VALUE: setting_value,
"property": setting_name,
}
).decode(),
)
# Disabling digest emails should clear a user's email queue
if setting_name == "enable_digest_emails" and not setting_value:
clear_scheduled_emails(user_profile.id, ScheduledEmail.DIGEST)
if setting_name == "email_notifications_batching_period_seconds":
assert isinstance(old_value, int)
assert isinstance(setting_value, int)
update_scheduled_email_notifications_time(user_profile, old_value, setting_value)
event = {
"type": "user_settings",
"op": "update",
"property": setting_name,
"value": setting_value,
}
if setting_name == "default_language":
assert isinstance(setting_value, str)
event["language_name"] = get_language_name(setting_value)
transaction.on_commit(lambda: send_event(user_profile.realm, event, [user_profile.id]))
if setting_name in UserProfile.notification_settings_legacy:
# This legacy event format is for backwards-compatibility with
# clients that don't support the new user_settings event type.
# We only send this for settings added before Feature level 89.
legacy_event = {
"type": "update_global_notifications",
"user": user_profile.email,
"notification_name": setting_name,
"setting": setting_value,
}
transaction.on_commit(
lambda: send_event(user_profile.realm, legacy_event, [user_profile.id])
)
if setting_name in UserProfile.display_settings_legacy or setting_name == "timezone":
# This legacy event format is for backwards-compatibility with
# clients that don't support the new user_settings event type.
# We only send this for settings added before Feature level 89.
legacy_event = {
"type": "update_display_settings",
"user": user_profile.email,
"setting_name": setting_name,
"setting": setting_value,
}
if setting_name == "default_language":
assert isinstance(setting_value, str)
legacy_event["language_name"] = get_language_name(setting_value)
transaction.on_commit(
lambda: send_event(user_profile.realm, legacy_event, [user_profile.id])
)
# Updates to the time zone display setting are sent to all users
if setting_name == "timezone":
payload = dict(
email=user_profile.email,
user_id=user_profile.id,
timezone=canonicalize_timezone(user_profile.timezone),
)
timezone_event = dict(type="realm_user", op="update", person=payload)
transaction.on_commit(
lambda: send_event(
user_profile.realm,
timezone_event,
active_user_ids(user_profile.realm_id),
)
)
if setting_name == "enable_drafts_synchronization" and setting_value is False:
# Delete all of the drafts from the backend but don't send delete events
# for them since all that's happened is that we stopped syncing changes,
# not deleted every previously synced draft - to do that use the DELETE
# endpoint.
Draft.objects.filter(user_profile=user_profile).delete()
def lookup_default_stream_groups(
default_stream_group_names: List[str], realm: Realm
) -> List[DefaultStreamGroup]:
default_stream_groups = []
for group_name in default_stream_group_names:
try:
default_stream_group = DefaultStreamGroup.objects.get(name=group_name, realm=realm)
except DefaultStreamGroup.DoesNotExist:
raise JsonableError(_("Invalid default stream group {}").format(group_name))
default_stream_groups.append(default_stream_group)
return default_stream_groups
def notify_default_streams(realm: Realm) -> None:
event = dict(
type="default_streams",
default_streams=streams_to_dicts_sorted(get_default_streams_for_realm(realm.id)),
)
transaction.on_commit(lambda: send_event(realm, event, active_non_guest_user_ids(realm.id)))
def notify_default_stream_groups(realm: Realm) -> None:
event = dict(
type="default_stream_groups",
default_stream_groups=default_stream_groups_to_dicts_sorted(
get_default_stream_groups(realm)
),
)
transaction.on_commit(lambda: send_event(realm, event, active_non_guest_user_ids(realm.id)))
def do_add_default_stream(stream: Stream) -> None:
realm_id = stream.realm_id
stream_id = stream.id
if not DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).exists():
DefaultStream.objects.create(realm_id=realm_id, stream_id=stream_id)
notify_default_streams(stream.realm)
@transaction.atomic(savepoint=False)
def do_remove_default_stream(stream: Stream) -> None:
realm_id = stream.realm_id
stream_id = stream.id
DefaultStream.objects.filter(realm_id=realm_id, stream_id=stream_id).delete()
notify_default_streams(stream.realm)
def do_create_default_stream_group(
realm: Realm, group_name: str, description: str, streams: List[Stream]
) -> None:
default_streams = get_default_streams_for_realm(realm.id)
for stream in streams:
if stream in default_streams:
raise JsonableError(
_(
"'{stream_name}' is a default stream and cannot be added to '{group_name}'",
).format(stream_name=stream.name, group_name=group_name)
)
check_default_stream_group_name(group_name)
(group, created) = DefaultStreamGroup.objects.get_or_create(
name=group_name, realm=realm, description=description
)
if not created:
raise JsonableError(
_(
"Default stream group '{group_name}' already exists",
).format(group_name=group_name)
)
group.streams.set(streams)
notify_default_stream_groups(realm)
def do_add_streams_to_default_stream_group(
realm: Realm, group: DefaultStreamGroup, streams: List[Stream]
) -> None:
default_streams = get_default_streams_for_realm(realm.id)
for stream in streams:
if stream in default_streams:
raise JsonableError(
_(
"'{stream_name}' is a default stream and cannot be added to '{group_name}'",
).format(stream_name=stream.name, group_name=group.name)
)
if stream in group.streams.all():
raise JsonableError(
_(
"Stream '{stream_name}' is already present in default stream group '{group_name}'",
).format(stream_name=stream.name, group_name=group.name)
)
group.streams.add(stream)
group.save()
notify_default_stream_groups(realm)
def do_remove_streams_from_default_stream_group(
realm: Realm, group: DefaultStreamGroup, streams: List[Stream]
) -> None:
for stream in streams:
if stream not in group.streams.all():
raise JsonableError(
_(
"Stream '{stream_name}' is not present in default stream group '{group_name}'",
).format(stream_name=stream.name, group_name=group.name)
)
group.streams.remove(stream)
group.save()
notify_default_stream_groups(realm)
def do_change_default_stream_group_name(
realm: Realm, group: DefaultStreamGroup, new_group_name: str
) -> None:
if group.name == new_group_name:
raise JsonableError(
_("This default stream group is already named '{}'").format(new_group_name)
)
if DefaultStreamGroup.objects.filter(name=new_group_name, realm=realm).exists():
raise JsonableError(_("Default stream group '{}' already exists").format(new_group_name))
group.name = new_group_name
group.save()
notify_default_stream_groups(realm)
def do_change_default_stream_group_description(
realm: Realm, group: DefaultStreamGroup, new_description: str
) -> None:
group.description = new_description
group.save()
notify_default_stream_groups(realm)
def do_remove_default_stream_group(realm: Realm, group: DefaultStreamGroup) -> None:
group.delete()
notify_default_stream_groups(realm)
def get_default_streams_for_realm(realm_id: int) -> List[Stream]:
return [
default.stream
for default in DefaultStream.objects.select_related().filter(realm_id=realm_id)
]
def get_default_subs(user_profile: UserProfile) -> List[Stream]:
# Right now default streams are realm-wide. This wrapper gives us flexibility
# to some day further customize how we set up default streams for new users.
return get_default_streams_for_realm(user_profile.realm_id)
# returns default streams in JSON serializable format
def streams_to_dicts_sorted(streams: List[Stream]) -> List[Dict[str, Any]]:
return sorted((stream.to_dict() for stream in streams), key=lambda elt: elt["name"])
def default_stream_groups_to_dicts_sorted(groups: List[DefaultStreamGroup]) -> List[Dict[str, Any]]:
return sorted((group.to_dict() for group in groups), key=lambda elt: elt["name"])
def do_update_user_activity_interval(
user_profile: UserProfile, log_time: datetime.datetime
) -> None:
effective_end = log_time + UserActivityInterval.MIN_INTERVAL_LENGTH
# This code isn't perfect, because with various races we might end
# up creating two overlapping intervals, but that shouldn't happen
# often, and can be corrected for in post-processing
try:
last = UserActivityInterval.objects.filter(user_profile=user_profile).order_by("-end")[0]
# Two intervals overlap iff each interval ends after the other
# begins. In this case, we just extend the old interval to
# include the new interval.
if log_time <= last.end and effective_end >= last.start:
last.end = max(last.end, effective_end)
last.start = min(last.start, log_time)
last.save(update_fields=["start", "end"])
return
except IndexError:
pass
# Otherwise, the intervals don't overlap, so we should make a new one
UserActivityInterval.objects.create(
user_profile=user_profile, start=log_time, end=effective_end
)
@statsd_increment("user_activity")
def do_update_user_activity(
user_profile_id: int, client_id: int, query: str, count: int, log_time: datetime.datetime
) -> None:
(activity, created) = UserActivity.objects.get_or_create(
user_profile_id=user_profile_id,
client_id=client_id,
query=query,
defaults={"last_visit": log_time, "count": count},
)
if not created:
activity.count += count
activity.last_visit = log_time
activity.save(update_fields=["last_visit", "count"])
def send_presence_changed(user_profile: UserProfile, presence: UserPresence) -> None:
# Most presence data is sent to clients in the main presence
# endpoint in response to the user's own presence; this results
# data that is 1-2 minutes stale for who is online. The flaw with
# this plan is when a user comes back online and then immediately
# sends a message, recipients may still see that user as offline!
# We solve that by sending an immediate presence update clients.
#
# See https://zulip.readthedocs.io/en/latest/subsystems/presence.html for
# internals documentation on presence.
user_ids = active_user_ids(user_profile.realm_id)
if len(user_ids) > settings.USER_LIMIT_FOR_SENDING_PRESENCE_UPDATE_EVENTS:
# These immediate presence generate quadratic work for Tornado
# (linear number of users in each event and the frequency of
# users coming online grows linearly with userbase too). In
# organizations with thousands of users, this can overload
# Tornado, especially if much of the realm comes online at the
# same time.
#
# The utility of these live-presence updates goes down as
# organizations get bigger (since one is much less likely to
# be paying attention to the sidebar); so beyond a limit, we
# stop sending them at all.
return
presence_dict = presence.to_dict()
event = dict(
type="presence",
email=user_profile.email,
user_id=user_profile.id,
server_timestamp=time.time(),
presence={presence_dict["client"]: presence_dict},
)
send_event(user_profile.realm, event, user_ids)
def consolidate_client(client: Client) -> Client:
# The web app reports a client as 'website'
# The desktop app reports a client as ZulipDesktop
# due to it setting a custom user agent. We want both
# to count as web users
# Alias ZulipDesktop to website
if client.name in ["ZulipDesktop"]:
return get_client("website")
else:
return client
@statsd_increment("user_presence")
def do_update_user_presence(
user_profile: UserProfile, client: Client, log_time: datetime.datetime, status: int
) -> None:
client = consolidate_client(client)
defaults = dict(
timestamp=log_time,
status=status,
realm_id=user_profile.realm_id,
)
(presence, created) = UserPresence.objects.get_or_create(
user_profile=user_profile,
client=client,
defaults=defaults,
)
stale_status = (log_time - presence.timestamp) > datetime.timedelta(minutes=1, seconds=10)
was_idle = presence.status == UserPresence.IDLE
became_online = (status == UserPresence.ACTIVE) and (stale_status or was_idle)
# If an object was created, it has already been saved.
#
# We suppress changes from ACTIVE to IDLE before stale_status is reached;
# this protects us from the user having two clients open: one active, the
# other idle. Without this check, we would constantly toggle their status
# between the two states.
if not created and stale_status or was_idle or status == presence.status:
# The following block attempts to only update the "status"
# field in the event that it actually changed. This is
# important to avoid flushing the UserPresence cache when the
# data it would return to a client hasn't actually changed
# (see the UserPresence post_save hook for details).
presence.timestamp = log_time
update_fields = ["timestamp"]
if presence.status != status:
presence.status = status
update_fields.append("status")
presence.save(update_fields=update_fields)
if not user_profile.realm.presence_disabled and (created or became_online):
send_presence_changed(user_profile, presence)
def update_user_activity_interval(user_profile: UserProfile, log_time: datetime.datetime) -> None:
event = {"user_profile_id": user_profile.id, "time": datetime_to_timestamp(log_time)}
queue_json_publish("user_activity_interval", event)
def update_user_presence(
user_profile: UserProfile,
client: Client,
log_time: datetime.datetime,
status: int,
new_user_input: bool,
) -> None:
event = {
"user_profile_id": user_profile.id,
"status": status,
"time": datetime_to_timestamp(log_time),
"client": client.name,
}
queue_json_publish("user_presence", event)
if new_user_input:
update_user_activity_interval(user_profile, log_time)
def do_update_user_status(
user_profile: UserProfile,
away: Optional[bool],
status_text: Optional[str],
client_id: int,
emoji_name: Optional[str],
emoji_code: Optional[str],
reaction_type: Optional[str],
) -> None:
if away is None:
status = None
elif away:
status = UserStatus.AWAY
else:
status = UserStatus.NORMAL
realm = user_profile.realm
update_user_status(
user_profile_id=user_profile.id,
status=status,
status_text=status_text,
client_id=client_id,
emoji_name=emoji_name,
emoji_code=emoji_code,
reaction_type=reaction_type,
)
event = dict(
type="user_status",
user_id=user_profile.id,
)
if away is not None:
event["away"] = away
if status_text is not None:
event["status_text"] = status_text
if emoji_name is not None:
event["emoji_name"] = emoji_name
event["emoji_code"] = emoji_code
event["reaction_type"] = reaction_type
send_event(realm, event, active_user_ids(realm.id))
@dataclass
class ReadMessagesEvent:
messages: List[int]
all: bool
type: str = field(default="update_message_flags", init=False)
op: str = field(default="add", init=False)
operation: str = field(default="add", init=False)
flag: str = field(default="read", init=False)
def do_mark_all_as_read(user_profile: UserProfile) -> int:
log_statsd_event("bankruptcy")
# First, we clear mobile push notifications. This is safer in the
# event that the below logic times out and we're killed.
all_push_message_ids = (
UserMessage.objects.filter(
user_profile=user_profile,
)
.extra(
where=[UserMessage.where_active_push_notification()],
)
.values_list("message_id", flat=True)[0:10000]
)
do_clear_mobile_push_notifications_for_ids([user_profile.id], all_push_message_ids)
msgs = UserMessage.objects.filter(user_profile=user_profile).extra(
where=[UserMessage.where_unread()],
)
count = msgs.update(
flags=F("flags").bitor(UserMessage.flags.read),
)
event = asdict(
ReadMessagesEvent(
messages=[], # we don't send messages, since the client reloads anyway
all=True,
)
)
event_time = timezone_now()
send_event(user_profile.realm, event, [user_profile.id])
do_increment_logging_stat(
user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
)
do_increment_logging_stat(
user_profile,
COUNT_STATS["messages_read_interactions::hour"],
None,
event_time,
increment=min(1, count),
)
return count
def do_mark_stream_messages_as_read(
user_profile: UserProfile, stream_recipient_id: int, topic_name: Optional[str] = None
) -> int:
log_statsd_event("mark_stream_as_read")
msgs = UserMessage.objects.filter(
user_profile=user_profile,
)
msgs = msgs.filter(message__recipient_id=stream_recipient_id)
if topic_name:
msgs = filter_by_topic_name_via_message(
query=msgs,
topic_name=topic_name,
)
msgs = msgs.extra(
where=[UserMessage.where_unread()],
)
message_ids = list(msgs.values_list("message_id", flat=True))
count = msgs.update(
flags=F("flags").bitor(UserMessage.flags.read),
)
event = asdict(
ReadMessagesEvent(
messages=message_ids,
all=False,
)
)
event_time = timezone_now()
send_event(user_profile.realm, event, [user_profile.id])
do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids)
do_increment_logging_stat(
user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
)
do_increment_logging_stat(
user_profile,
COUNT_STATS["messages_read_interactions::hour"],
None,
event_time,
increment=min(1, count),
)
return count
def do_mark_muted_user_messages_as_read(
user_profile: UserProfile,
muted_user: UserProfile,
) -> int:
messages = UserMessage.objects.filter(
user_profile=user_profile, message__sender=muted_user
).extra(where=[UserMessage.where_unread()])
message_ids = list(messages.values_list("message_id", flat=True))
count = messages.update(
flags=F("flags").bitor(UserMessage.flags.read),
)
event = asdict(
ReadMessagesEvent(
messages=message_ids,
all=False,
)
)
event_time = timezone_now()
send_event(user_profile.realm, event, [user_profile.id])
do_clear_mobile_push_notifications_for_ids([user_profile.id], message_ids)
do_increment_logging_stat(
user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
)
do_increment_logging_stat(
user_profile,
COUNT_STATS["messages_read_interactions::hour"],
None,
event_time,
increment=min(1, count),
)
return count
def do_update_mobile_push_notification(
message: Message,
prior_mention_user_ids: Set[int],
mentions_user_ids: Set[int],
stream_push_user_ids: Set[int],
) -> None:
# Called during the message edit code path to remove mobile push
# notifications for users who are no longer mentioned following
# the edit. See #15428 for details.
#
# A perfect implementation would also support updating the message
# in a sent notification if a message was edited to mention a
# group rather than a user (or vice versa), though it is likely
# not worth the effort to do such a change.
if not message.is_stream_message():
return
remove_notify_users = prior_mention_user_ids - mentions_user_ids - stream_push_user_ids
do_clear_mobile_push_notifications_for_ids(list(remove_notify_users), [message.id])
def do_clear_mobile_push_notifications_for_ids(
user_profile_ids: List[int], message_ids: List[int]
) -> None:
if len(message_ids) == 0:
return
# This function supports clearing notifications for several users
# only for the message-edit use case where we'll have a single message_id.
assert len(user_profile_ids) == 1 or len(message_ids) == 1
messages_by_user = defaultdict(list)
notifications_to_update = list(
UserMessage.objects.filter(
message_id__in=message_ids,
user_profile_id__in=user_profile_ids,
)
.extra(
where=[UserMessage.where_active_push_notification()],
)
.values_list("user_profile_id", "message_id")
)
for (user_id, message_id) in notifications_to_update:
messages_by_user[user_id].append(message_id)
for (user_profile_id, event_message_ids) in messages_by_user.items():
queue_json_publish(
"missedmessage_mobile_notifications",
{
"type": "remove",
"user_profile_id": user_profile_id,
"message_ids": event_message_ids,
},
)
def do_update_message_flags(
user_profile: UserProfile, operation: str, flag: str, messages: List[int]
) -> int:
valid_flags = [item for item in UserMessage.flags if item not in UserMessage.NON_API_FLAGS]
if flag not in valid_flags:
raise JsonableError(_("Invalid flag: '{}'").format(flag))
if flag in UserMessage.NON_EDITABLE_FLAGS:
raise JsonableError(_("Flag not editable: '{}'").format(flag))
if operation not in ("add", "remove"):
raise JsonableError(_("Invalid message flag operation: '{}'").format(operation))
flagattr = getattr(UserMessage.flags, flag)
msgs = UserMessage.objects.filter(user_profile=user_profile, message_id__in=messages)
um_message_ids = {um.message_id for um in msgs}
historical_message_ids = list(set(messages) - um_message_ids)
# Users can mutate flags for messages that don't have a UserMessage yet.
# First, validate that the user is even allowed to access these message_ids.
for message_id in historical_message_ids:
access_message(user_profile, message_id)
# And then create historical UserMessage records. See the called function for more context.
create_historical_user_messages(user_id=user_profile.id, message_ids=historical_message_ids)
if operation == "add":
count = msgs.update(flags=F("flags").bitor(flagattr))
elif operation == "remove":
count = msgs.update(flags=F("flags").bitand(~flagattr))
event = {
"type": "update_message_flags",
"op": operation,
"operation": operation,
"flag": flag,
"messages": messages,
"all": False,
}
if flag == "read" and operation == "remove":
# When removing the read flag (i.e. marking messages as
# unread), extend the event with an additional object with
# details on the messages required to update the client's
# `unread_msgs` data structure.
raw_unread_data = get_raw_unread_data(user_profile, messages)
event["message_details"] = format_unread_message_details(user_profile.id, raw_unread_data)
send_event(user_profile.realm, event, [user_profile.id])
if flag == "read" and operation == "add":
event_time = timezone_now()
do_clear_mobile_push_notifications_for_ids([user_profile.id], messages)
do_increment_logging_stat(
user_profile, COUNT_STATS["messages_read::hour"], None, event_time, increment=count
)
do_increment_logging_stat(
user_profile,
COUNT_STATS["messages_read_interactions::hour"],
None,
event_time,
increment=min(1, count),
)
return count
class MessageUpdateUserInfoResult(TypedDict):
message_user_ids: Set[int]
mention_user_ids: Set[int]
def maybe_send_resolve_topic_notifications(
*,
user_profile: UserProfile,
stream: Stream,
old_topic: str,
new_topic: str,
changed_messages: List[Message],
) -> None:
# Note that topics will have already been stripped in check_update_message.
#
# This logic is designed to treat removing a weird "✔ ✔✔ "
# prefix as unresolving the topic.
if old_topic.lstrip(RESOLVED_TOPIC_PREFIX) != new_topic.lstrip(RESOLVED_TOPIC_PREFIX):
return
topic_resolved: bool = new_topic.startswith(RESOLVED_TOPIC_PREFIX) and not old_topic.startswith(
RESOLVED_TOPIC_PREFIX
)
topic_unresolved: bool = old_topic.startswith(
RESOLVED_TOPIC_PREFIX
) and not new_topic.startswith(RESOLVED_TOPIC_PREFIX)
if not topic_resolved and not topic_unresolved:
# If there's some other weird topic that does not toggle the
# state of "topic starts with RESOLVED_TOPIC_PREFIX", we do
# nothing. Any other logic could result in cases where we send
# these notifications in a non-alternating fashion.
#
# Note that it is still possible for an individual topic to
# have multiple "This topic was marked as resolved"
# notifications in a row: one can send new messages to the
# pre-resolve topic and then resolve the topic created that
# way to get multiple in the resolved topic. And then an
# administrator can the messages in between. We consider this
# to be a fundamental risk of irresponsible message deletion,
# not a bug with the "resolve topics" feature.
return
# Compute the users who either sent or reacted to messages that
# were moved via the "resolve topic' action. Only those users
# should be eligible for this message being managed as unread.
affected_participant_ids = (set(message.sender_id for message in changed_messages)) | set(
Reaction.objects.filter(message__in=changed_messages).values_list(
"user_profile_id", flat=True
)
)
sender = get_system_bot(settings.NOTIFICATION_BOT, user_profile.realm_id)
user_mention = silent_mention_syntax_for_user(user_profile)
with override_language(stream.realm.default_language):
if topic_resolved:
notification_string = _("{user} has marked this topic as resolved.")
elif topic_unresolved:
notification_string = _("{user} has marked this topic as unresolved.")
internal_send_stream_message(
sender,
stream,
new_topic,
notification_string.format(
user=user_mention,
),
limit_unread_user_ids=affected_participant_ids,
)
def send_message_moved_breadcrumbs(
user_profile: UserProfile,
old_stream: Stream,
old_topic: str,
old_thread_notification_string: Optional[str],
new_stream: Stream,
new_topic: Optional[str],
new_thread_notification_string: Optional[str],
changed_messages_count: int,
) -> None:
# Since moving content between streams is highly disruptive,
# it's worth adding a couple tombstone messages showing what
# happened.
sender = get_system_bot(settings.NOTIFICATION_BOT, old_stream.realm_id)
if new_topic is None:
new_topic = old_topic
user_mention = silent_mention_syntax_for_user(user_profile)
old_topic_link = f"#**{old_stream.name}>{old_topic}**"
new_topic_link = f"#**{new_stream.name}>{new_topic}**"
if new_thread_notification_string is not None:
with override_language(new_stream.realm.default_language):
internal_send_stream_message(
sender,
new_stream,
new_topic,
new_thread_notification_string.format(
old_location=old_topic_link,
user=user_mention,
changed_messages_count=changed_messages_count,
),
)
if old_thread_notification_string is not None:
with override_language(old_stream.realm.default_language):
# Send a notification to the old stream that the topic was moved.
internal_send_stream_message(
sender,
old_stream,
old_topic,
old_thread_notification_string.format(
user=user_mention,
new_location=new_topic_link,
changed_messages_count=changed_messages_count,
),
)
def get_user_info_for_message_updates(message_id: int) -> MessageUpdateUserInfoResult:
# We exclude UserMessage.flags.historical rows since those
# users did not receive the message originally, and thus
# probably are not relevant for reprocessed alert_words,
# mentions and similar rendering features. This may be a
# decision we change in the future.
query = UserMessage.objects.filter(
message=message_id,
flags=~UserMessage.flags.historical,
).values("user_profile_id", "flags")
rows = list(query)
message_user_ids = {row["user_profile_id"] for row in rows}
mask = UserMessage.flags.mentioned | UserMessage.flags.wildcard_mentioned
mention_user_ids = {row["user_profile_id"] for row in rows if int(row["flags"]) & mask}
return dict(
message_user_ids=message_user_ids,
mention_user_ids=mention_user_ids,
)
def update_user_message_flags(
rendering_result: MessageRenderingResult, ums: Iterable[UserMessage]
) -> None:
wildcard = rendering_result.mentions_wildcard
mentioned_ids = rendering_result.mentions_user_ids
ids_with_alert_words = rendering_result.user_ids_with_alert_words
changed_ums: Set[UserMessage] = set()
def update_flag(um: UserMessage, should_set: bool, flag: int) -> None:
if should_set:
if not (um.flags & flag):
um.flags |= flag
changed_ums.add(um)
else:
if um.flags & flag:
um.flags &= ~flag
changed_ums.add(um)
for um in ums:
has_alert_word = um.user_profile_id in ids_with_alert_words
update_flag(um, has_alert_word, UserMessage.flags.has_alert_word)
mentioned = um.user_profile_id in mentioned_ids
update_flag(um, mentioned, UserMessage.flags.mentioned)
update_flag(um, wildcard, UserMessage.flags.wildcard_mentioned)
for um in changed_ums:
um.save(update_fields=["flags"])
def update_to_dict_cache(
changed_messages: List[Message], realm_id: Optional[int] = None
) -> List[int]:
"""Updates the message as stored in the to_dict cache (for serving
messages)."""
items_for_remote_cache = {}
message_ids = []
changed_messages_to_dict = MessageDict.to_dict_uncached(changed_messages, realm_id)
for msg_id, msg in changed_messages_to_dict.items():
message_ids.append(msg_id)
key = to_dict_cache_key_id(msg_id)
items_for_remote_cache[key] = (msg,)
cache_set_many(items_for_remote_cache)
return message_ids
def do_update_embedded_data(
user_profile: UserProfile,
message: Message,
content: Optional[str],
rendering_result: MessageRenderingResult,
) -> None:
timestamp = timezone_now()
event: Dict[str, Any] = {
"type": "update_message",
"user_id": None,
"edit_timestamp": datetime_to_timestamp(timestamp),
"message_id": message.id,
"rendering_only": True,
}
changed_messages = [message]
rendered_content: Optional[str] = None
ums = UserMessage.objects.filter(message=message.id)
if content is not None:
update_user_message_flags(rendering_result, ums)
rendered_content = rendering_result.rendered_content
message.rendered_content = rendered_content
message.rendered_content_version = markdown_version
event["content"] = content
event["rendered_content"] = rendered_content
message.save(update_fields=["content", "rendered_content"])
event["message_ids"] = update_to_dict_cache(changed_messages)
def user_info(um: UserMessage) -> Dict[str, Any]:
return {
"id": um.user_profile_id,
"flags": um.flags_list(),
}
send_event(user_profile.realm, event, list(map(user_info, ums)))
class DeleteMessagesEvent(TypedDict, total=False):
type: str
message_ids: List[int]
message_type: str
topic: str
stream_id: int
# We use transaction.atomic to support select_for_update in the attachment codepath.
@transaction.atomic(savepoint=False)
def do_update_message(
user_profile: UserProfile,
target_message: Message,
new_stream: Optional[Stream],
topic_name: Optional[str],
propagate_mode: str,
send_notification_to_old_thread: bool,
send_notification_to_new_thread: bool,
content: Optional[str],
rendering_result: Optional[MessageRenderingResult],
prior_mention_user_ids: Set[int],
mention_data: Optional[MentionData] = None,
) -> int:
"""
The main function for message editing. A message edit event can
modify:
* the message's content (in which case the caller will have
set both content and rendered_content),
* the topic, in which case the caller will have set topic_name
* or both message's content and the topic
* or stream and/or topic, in which case the caller will have set
new_stream and/or topic_name.
With topic edits, propagate_mode determines whether other message
also have their topics edited.
"""
timestamp = timezone_now()
target_message.last_edit_time = timestamp
event: Dict[str, Any] = {
"type": "update_message",
"user_id": user_profile.id,
"edit_timestamp": datetime_to_timestamp(timestamp),
"message_id": target_message.id,
"rendering_only": False,
}
edit_history_event: EditHistoryEvent = {
"user_id": user_profile.id,
"timestamp": event["edit_timestamp"],
}
changed_messages = [target_message]
realm = user_profile.realm
stream_being_edited = None
if target_message.is_stream_message():
stream_id = target_message.recipient.type_id
stream_being_edited = get_stream_by_id_in_realm(stream_id, realm)
event["stream_name"] = stream_being_edited.name
event["stream_id"] = stream_being_edited.id
ums = UserMessage.objects.filter(message=target_message.id)
if content is not None:
assert rendering_result is not None
# mention_data is required if there's a content edit.
assert mention_data is not None
# add data from group mentions to mentions_user_ids.
for group_id in rendering_result.mentions_user_group_ids:
members = mention_data.get_group_members(group_id)
rendering_result.mentions_user_ids.update(members)
update_user_message_flags(rendering_result, ums)
# One could imagine checking realm.allow_edit_history here and
# modifying the events based on that setting, but doing so
# doesn't really make sense. We need to send the edit event
# to clients regardless, and a client already had access to
# the original/pre-edit content of the message anyway. That
# setting must be enforced on the client side, and making a
# change here simply complicates the logic for clients parsing
# edit history events.
event["orig_content"] = target_message.content
event["orig_rendered_content"] = target_message.rendered_content
edit_history_event["prev_content"] = target_message.content
edit_history_event["prev_rendered_content"] = target_message.rendered_content
edit_history_event[
"prev_rendered_content_version"
] = target_message.rendered_content_version
target_message.content = content
target_message.rendered_content = rendering_result.rendered_content
target_message.rendered_content_version = markdown_version
event["content"] = content
event["rendered_content"] = rendering_result.rendered_content
event["prev_rendered_content_version"] = target_message.rendered_content_version
event["is_me_message"] = Message.is_status_message(
content, rendering_result.rendered_content
)
# target_message.has_image and target_message.has_link will have been
# already updated by Markdown rendering in the caller.
target_message.has_attachment = check_attachment_reference_change(
target_message, rendering_result
)
if target_message.is_stream_message():
if topic_name is not None:
new_topic_name = topic_name
else:
new_topic_name = target_message.topic_name()
stream_topic: Optional[StreamTopicTarget] = StreamTopicTarget(
stream_id=stream_id,
topic_name=new_topic_name,
)
else:
stream_topic = None
info = get_recipient_info(
realm_id=realm.id,
recipient=target_message.recipient,
sender_id=target_message.sender_id,
stream_topic=stream_topic,
possible_wildcard_mention=mention_data.message_has_wildcards(),
)
event["online_push_user_ids"] = list(info["online_push_user_ids"])
event["pm_mention_push_disabled_user_ids"] = list(info["pm_mention_push_disabled_user_ids"])
event["pm_mention_email_disabled_user_ids"] = list(
info["pm_mention_email_disabled_user_ids"]
)
event["stream_push_user_ids"] = list(info["stream_push_user_ids"])
event["stream_email_user_ids"] = list(info["stream_email_user_ids"])
event["muted_sender_user_ids"] = list(info["muted_sender_user_ids"])
event["prior_mention_user_ids"] = list(prior_mention_user_ids)
event["presence_idle_user_ids"] = filter_presence_idle_user_ids(info["active_user_ids"])
event["all_bot_user_ids"] = list(info["all_bot_user_ids"])
if rendering_result.mentions_wildcard:
event["wildcard_mention_user_ids"] = list(info["wildcard_mention_user_ids"])
else:
event["wildcard_mention_user_ids"] = []
do_update_mobile_push_notification(
target_message,
prior_mention_user_ids,
rendering_result.mentions_user_ids,
info["stream_push_user_ids"],
)
if topic_name is not None or new_stream is not None:
orig_topic_name = target_message.topic_name()
event["propagate_mode"] = propagate_mode
if new_stream is not None:
assert content is None
assert target_message.is_stream_message()
assert stream_being_edited is not None
edit_history_event["prev_stream"] = stream_being_edited.id
edit_history_event["stream"] = new_stream.id
event[ORIG_TOPIC] = orig_topic_name
target_message.recipient_id = new_stream.recipient_id
event["new_stream_id"] = new_stream.id
event["propagate_mode"] = propagate_mode
# When messages are moved from one stream to another, some
# users may lose access to those messages, including guest
# users and users not subscribed to the new stream (if it is a
# private stream). For those users, their experience is as
# though the messages were deleted, and we should send a
# delete_message event to them instead.
subs_to_old_stream = get_active_subscriptions_for_stream_id(
stream_id, include_deactivated_users=True
).select_related("user_profile")
subs_to_new_stream = list(
get_active_subscriptions_for_stream_id(
new_stream.id, include_deactivated_users=True
).select_related("user_profile")
)
old_stream_sub_ids = [user.user_profile_id for user in subs_to_old_stream]
new_stream_sub_ids = [user.user_profile_id for user in subs_to_new_stream]
# Get users who aren't subscribed to the new_stream.
subs_losing_usermessages = [
sub for sub in subs_to_old_stream if sub.user_profile_id not in new_stream_sub_ids
]
# Users who can longer access the message without some action
# from administrators.
subs_losing_access = [
sub
for sub in subs_losing_usermessages
if sub.user_profile.is_guest or not new_stream.is_public()
]
ums = ums.exclude(
user_profile_id__in=[sub.user_profile_id for sub in subs_losing_usermessages]
)
subs_gaining_usermessages = []
if not new_stream.is_history_public_to_subscribers():
# For private streams, with history not public to subscribers,
# We find out users who are not present in the msgs' old stream
# and create new UserMessage for these users so that they can
# access this message.
subs_gaining_usermessages += [
user_id for user_id in new_stream_sub_ids if user_id not in old_stream_sub_ids
]
if topic_name is not None:
topic_name = truncate_topic(topic_name)
target_message.set_topic_name(topic_name)
# These fields have legacy field names.
event[ORIG_TOPIC] = orig_topic_name
event[TOPIC_NAME] = topic_name
event[TOPIC_LINKS] = topic_links(target_message.sender.realm_id, topic_name)
edit_history_event["prev_topic"] = orig_topic_name
edit_history_event["topic"] = topic_name
update_edit_history(target_message, timestamp, edit_history_event)
delete_event_notify_user_ids: List[int] = []
if propagate_mode in ["change_later", "change_all"]:
assert topic_name is not None or new_stream is not None
assert stream_being_edited is not None
# Other messages should only get topic/stream fields in their edit history.
topic_only_edit_history_event: EditHistoryEvent = {
"user_id": edit_history_event["user_id"],
"timestamp": edit_history_event["timestamp"],
}
if topic_name is not None:
topic_only_edit_history_event["prev_topic"] = edit_history_event["prev_topic"]
topic_only_edit_history_event["topic"] = edit_history_event["topic"]
if new_stream is not None:
topic_only_edit_history_event["prev_stream"] = edit_history_event["prev_stream"]
topic_only_edit_history_event["stream"] = edit_history_event["stream"]
messages_list = update_messages_for_topic_edit(
acting_user=user_profile,
edited_message=target_message,
propagate_mode=propagate_mode,
orig_topic_name=orig_topic_name,
topic_name=topic_name,
new_stream=new_stream,
old_stream=stream_being_edited,
edit_history_event=topic_only_edit_history_event,
last_edit_time=timestamp,
)
changed_messages += messages_list
if new_stream is not None:
assert stream_being_edited is not None
changed_message_ids = [msg.id for msg in changed_messages]
if subs_gaining_usermessages:
ums_to_create = []
for message_id in changed_message_ids:
for user_profile_id in subs_gaining_usermessages:
# The fact that the user didn't have a UserMessage originally means we can infer that the user
# was not mentioned in the original message (even if mention syntax was present, it would not
# take effect for a user who was not subscribed). If we were editing the message's content, we
# would rerender the message and then use the new stream's data to determine whether this is
# a mention of a subscriber; but as we are not doing so, we choose to preserve the "was this
# mention syntax an actual mention" decision made during the original rendering for implementation
# simplicity. As a result, the only flag to consider applying here is read.
um = UserMessageLite(
user_profile_id=user_profile_id,
message_id=message_id,
flags=UserMessage.flags.read,
)
ums_to_create.append(um)
bulk_insert_ums(ums_to_create)
# Delete UserMessage objects for users who will no
# longer have access to these messages. Note: This could be
# very expensive, since it's N guest users x M messages.
UserMessage.objects.filter(
user_profile_id__in=[sub.user_profile_id for sub in subs_losing_usermessages],
message_id__in=changed_message_ids,
).delete()
delete_event: DeleteMessagesEvent = {
"type": "delete_message",
"message_ids": changed_message_ids,
"message_type": "stream",
"stream_id": stream_being_edited.id,
"topic": orig_topic_name,
}
delete_event_notify_user_ids = [sub.user_profile_id for sub in subs_losing_access]
send_event(user_profile.realm, delete_event, delete_event_notify_user_ids)
# This does message.save(update_fields=[...])
save_message_for_edit_use_case(message=target_message)
realm_id: Optional[int] = None
if stream_being_edited is not None:
realm_id = stream_being_edited.realm_id
event["message_ids"] = update_to_dict_cache(changed_messages, realm_id)
def user_info(um: UserMessage) -> Dict[str, Any]:
return {
"id": um.user_profile_id,
"flags": um.flags_list(),
}
# The following blocks arranges that users who are subscribed to a
# stream and can see history from before they subscribed get
# live-update when old messages are edited (e.g. if the user does
# a topic edit themself).
#
# We still don't send an update event to users who are not
# subscribed to this stream and don't have a UserMessage row. This
# means if a non-subscriber is viewing the narrow, they won't get
# a real-time updates. This is a balance between sending
# message-edit notifications for every public stream to every user
# in the organization (too expansive, and also not what we do for
# newly sent messages anyway) and having magical live-updates
# where possible.
users_to_be_notified = list(map(user_info, ums))
if stream_being_edited is not None:
if stream_being_edited.is_history_public_to_subscribers:
subscriptions = get_active_subscriptions_for_stream_id(
stream_id, include_deactivated_users=False
)
# We exclude long-term idle users, since they by
# definition have no active clients.
subscriptions = subscriptions.exclude(user_profile__long_term_idle=True)
# Remove duplicates by excluding the id of users already
# in users_to_be_notified list. This is the case where a
# user both has a UserMessage row and is a current
# Subscriber
subscriptions = subscriptions.exclude(
user_profile_id__in=[um.user_profile_id for um in ums]
)
if new_stream is not None:
assert delete_event_notify_user_ids is not None
subscriptions = subscriptions.exclude(
user_profile_id__in=delete_event_notify_user_ids
)
# All users that are subscribed to the stream must be
# notified when a message is edited
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
if new_stream is not None:
# TODO: Guest users don't see the new moved topic
# unless breadcrumb message for new stream is
# enabled. Excluding these users from receiving this
# event helps us avoid a error traceback for our
# clients. We should figure out a way to inform the
# guest users of this new topic if sending a 'message'
# event for these messages is not an option.
#
# Don't send this event to guest subs who are not
# subscribers of the old stream but are subscribed to
# the new stream; clients will be confused.
old_stream_unsubbed_guests = [
sub
for sub in subs_to_new_stream
if sub.user_profile.is_guest and sub.user_profile_id not in subscriber_ids
]
subscriptions = subscriptions.exclude(
user_profile_id__in=[sub.user_profile_id for sub in old_stream_unsubbed_guests]
)
subscriber_ids = set(subscriptions.values_list("user_profile_id", flat=True))
users_to_be_notified += list(map(subscriber_info, sorted(list(subscriber_ids))))
# Migrate muted topic configuration in the following circumstances:
#
# * If propagate_mode is change_all, do so unconditionally.
#
# * If propagate_mode is change_later, it's likely that we want to
# move these only when it appears that the intent is to move
# most of the topic, not just the last 1-2 messages which may
# have been "off topic". At present we do so unconditionally.
#
# * Never move muted topic configuration with change_one.
#
# We may want more complex behavior in cases where one appears to
# be merging topics (E.g. there are existing messages in the
# target topic).
#
# Moving a topic to another stream is complicated in that we want
# to avoid creating a UserTopic row for the user in a stream that
# they don't have access to; doing so could leak information about
# the existence of a private stream to some users. See the
# moved_all_visible_messages below for related details.
#
# So for now, we require new_stream=None for this feature.
if propagate_mode != "change_one" and (topic_name is not None or new_stream is not None):
assert stream_being_edited is not None
for muting_user in get_users_muting_topic(stream_being_edited.id, orig_topic_name):
# TODO: Ideally, this would be a bulk update operation,
# because we are doing database operations in a loop here.
#
# This loop is only acceptable in production because it is
# rare for more than a few users to have muted an
# individual topic that is being moved; as of this
# writing, no individual topic in Zulip Cloud had been
# muted by more than 100 users.
if new_stream is not None and muting_user.id in delete_event_notify_user_ids:
# If the messages are being moved to a stream the user
# cannot access, then we treat this as the
# messages/topic being deleted for this user. Unmute
# the topic for such users.
do_unmute_topic(muting_user, stream_being_edited, orig_topic_name)
else:
# Otherwise, we move the muted topic record for the user.
# We call remove_topic_mute rather than do_unmute_topic to
# avoid sending two events with new muted topics in
# immediate succession; this is correct only because
# muted_topics events always send the full set of topics.
remove_topic_mute(muting_user, stream_being_edited.id, orig_topic_name)
do_mute_topic(
muting_user,
new_stream if new_stream is not None else stream_being_edited,
topic_name if topic_name is not None else orig_topic_name,
)
send_event(user_profile.realm, event, users_to_be_notified)
if len(changed_messages) > 0 and new_stream is not None and stream_being_edited is not None:
# Notify users that the topic was moved.
changed_messages_count = len(changed_messages)
if propagate_mode == "change_all":
moved_all_visible_messages = True
else:
# With other propagate modes, if the user in fact moved
# all messages in the stream, we want to explain it was a
# full-topic move.
#
# For security model reasons, we don't want to allow a
# user to take any action that would leak information
# about older messages they cannot access (E.g. the only
# remaining messages are in a stream without shared
# history). The bulk_access_messages call below addresses
# that concern.
#
# bulk_access_messages is inefficient for this task, since
# we just want to do the exists() version of this
# query. But it's nice to reuse code, and this bulk
# operation is likely cheaper than a `GET /messages`
# unless the topic has thousands of messages of history.
unmoved_messages = messages_for_topic(
stream_being_edited.recipient_id,
orig_topic_name,
)
visible_unmoved_messages = bulk_access_messages(
user_profile, unmoved_messages, stream=stream_being_edited
)
moved_all_visible_messages = len(visible_unmoved_messages) == 0
old_thread_notification_string = None
if send_notification_to_old_thread:
if moved_all_visible_messages:
old_thread_notification_string = gettext_lazy(
"This topic was moved to {new_location} by {user}."
)
elif changed_messages_count == 1:
old_thread_notification_string = gettext_lazy(
"A message was moved from this topic to {new_location} by {user}."
)
else:
old_thread_notification_string = gettext_lazy(
"{changed_messages_count} messages were moved from this topic to {new_location} by {user}."
)
new_thread_notification_string = None
if send_notification_to_new_thread:
if moved_all_visible_messages:
new_thread_notification_string = gettext_lazy(
"This topic was moved here from {old_location} by {user}."
)
elif changed_messages_count == 1:
new_thread_notification_string = gettext_lazy(
"A message was moved here from {old_location} by {user}."
)
else:
new_thread_notification_string = gettext_lazy(
"{changed_messages_count} messages were moved here from {old_location} by {user}."
)
send_message_moved_breadcrumbs(
user_profile,
stream_being_edited,
orig_topic_name,
old_thread_notification_string,
new_stream,
topic_name,
new_thread_notification_string,
changed_messages_count,
)
if (
topic_name is not None
and new_stream is None
and content is None
and len(changed_messages) > 0
):
assert stream_being_edited is not None
maybe_send_resolve_topic_notifications(
user_profile=user_profile,
stream=stream_being_edited,
old_topic=orig_topic_name,
new_topic=topic_name,
changed_messages=changed_messages,
)
return len(changed_messages)
def do_delete_messages(realm: Realm, messages: Iterable[Message]) -> None:
# messages in delete_message event belong to the same topic
# or is a single private message, as any other behaviour is not possible with
# the current callers to this method.
messages = list(messages)
message_ids = [message.id for message in messages]
if not message_ids:
return
event: DeleteMessagesEvent = {
"type": "delete_message",
"message_ids": message_ids,
}
sample_message = messages[0]
message_type = "stream"
users_to_notify = []
if not sample_message.is_stream_message():
assert len(messages) == 1
message_type = "private"
ums = UserMessage.objects.filter(message_id__in=message_ids)
users_to_notify = [um.user_profile_id for um in ums]
archiving_chunk_size = retention.MESSAGE_BATCH_SIZE
if message_type == "stream":
stream_id = sample_message.recipient.type_id
event["stream_id"] = stream_id
event["topic"] = sample_message.topic_name()
subscriptions = get_active_subscriptions_for_stream_id(
stream_id, include_deactivated_users=False
)
# We exclude long-term idle users, since they by definition have no active clients.
subscriptions = subscriptions.exclude(user_profile__long_term_idle=True)
users_to_notify = list(subscriptions.values_list("user_profile_id", flat=True))
archiving_chunk_size = retention.STREAM_MESSAGE_BATCH_SIZE
move_messages_to_archive(message_ids, realm=realm, chunk_size=archiving_chunk_size)
event["message_type"] = message_type
transaction.on_commit(lambda: send_event(realm, event, users_to_notify))
def do_delete_messages_by_sender(user: UserProfile) -> None:
message_ids = list(
Message.objects.filter(sender=user).values_list("id", flat=True).order_by("id")
)
if message_ids:
move_messages_to_archive(message_ids, chunk_size=retention.STREAM_MESSAGE_BATCH_SIZE)
# In general, it's better to avoid using .values() because it makes
# the code pretty ugly, but in this case, it has significant
# performance impact for loading / for users with large numbers of
# subscriptions, so it's worth optimizing.
def gather_subscriptions_helper(
user_profile: UserProfile,
include_subscribers: bool = True,
) -> SubscriptionInfo:
realm = user_profile.realm
all_streams: QuerySet[RawStreamDict] = get_active_streams(realm).values(
*Stream.API_FIELDS,
# The realm_id and recipient_id are generally not needed in the API.
"realm_id",
"recipient_id",
# email_token isn't public to some users with access to
# the stream, so doesn't belong in API_FIELDS.
"email_token",
)
recip_id_to_stream_id: Dict[int, int] = {
stream["recipient_id"]: stream["id"] for stream in all_streams
}
all_streams_map: Dict[int, RawStreamDict] = {stream["id"]: stream for stream in all_streams}
sub_dicts_query: Iterable[RawSubscriptionDict] = (
get_stream_subscriptions_for_user(user_profile)
.values(
*Subscription.API_FIELDS,
"recipient_id",
"active",
)
.order_by("recipient_id")
)
# We only care about subscriptions for active streams.
sub_dicts: List[RawSubscriptionDict] = [
sub_dict
for sub_dict in sub_dicts_query
if recip_id_to_stream_id.get(sub_dict["recipient_id"])
]
def get_stream_id(sub_dict: RawSubscriptionDict) -> int:
return recip_id_to_stream_id[sub_dict["recipient_id"]]
traffic_stream_ids = {get_stream_id(sub_dict) for sub_dict in sub_dicts}
recent_traffic = get_streams_traffic(stream_ids=traffic_stream_ids)
# Okay, now we finally get to populating our main results, which
# will be these three lists.
subscribed: List[SubscriptionStreamDict] = []
unsubscribed: List[SubscriptionStreamDict] = []
never_subscribed: List[NeverSubscribedStreamDict] = []
sub_unsub_stream_ids = set()
for sub_dict in sub_dicts:
stream_id = get_stream_id(sub_dict)
sub_unsub_stream_ids.add(stream_id)
raw_stream_dict = all_streams_map[stream_id]
stream_dict = build_stream_dict_for_sub(
user=user_profile,
sub_dict=sub_dict,
raw_stream_dict=raw_stream_dict,
recent_traffic=recent_traffic,
)
# is_active is represented in this structure by which list we include it in.
is_active = sub_dict["active"]
if is_active:
subscribed.append(stream_dict)
else:
unsubscribed.append(stream_dict)
if user_profile.can_access_public_streams():
never_subscribed_stream_ids = set(all_streams_map) - sub_unsub_stream_ids
else:
web_public_stream_ids = {stream["id"] for stream in all_streams if stream["is_web_public"]}
never_subscribed_stream_ids = web_public_stream_ids - sub_unsub_stream_ids
never_subscribed_streams = [
all_streams_map[stream_id] for stream_id in never_subscribed_stream_ids
]
for raw_stream_dict in never_subscribed_streams:
is_public = not raw_stream_dict["invite_only"]
if is_public or user_profile.is_realm_admin:
slim_stream_dict = build_stream_dict_for_never_sub(
raw_stream_dict=raw_stream_dict, recent_traffic=recent_traffic
)
never_subscribed.append(slim_stream_dict)
if include_subscribers:
# The highly optimized bulk_get_subscriber_user_ids wants to know which
# streams we are subscribed to, for validation purposes, and it uses that
# info to know if it's allowed to find OTHER subscribers.
subscribed_stream_ids = {
get_stream_id(sub_dict) for sub_dict in sub_dicts if sub_dict["active"]
}
subscriber_map = bulk_get_subscriber_user_ids(
all_streams,
user_profile,
subscribed_stream_ids,
)
for lst in [subscribed, unsubscribed]:
for stream_dict in lst:
assert isinstance(stream_dict["stream_id"], int)
stream_id = stream_dict["stream_id"]
stream_dict["subscribers"] = subscriber_map[stream_id]
for slim_stream_dict in never_subscribed:
assert isinstance(slim_stream_dict["stream_id"], int)
stream_id = slim_stream_dict["stream_id"]
slim_stream_dict["subscribers"] = subscriber_map[stream_id]
subscribed.sort(key=lambda x: x["name"])
unsubscribed.sort(key=lambda x: x["name"])
never_subscribed.sort(key=lambda x: x["name"])
return SubscriptionInfo(
subscriptions=subscribed,
unsubscribed=unsubscribed,
never_subscribed=never_subscribed,
)
def gather_subscriptions(
user_profile: UserProfile,
include_subscribers: bool = False,
) -> Tuple[List[SubscriptionStreamDict], List[SubscriptionStreamDict]]:
helper_result = gather_subscriptions_helper(
user_profile,
include_subscribers=include_subscribers,
)
subscribed = helper_result.subscriptions
unsubscribed = helper_result.unsubscribed
return (subscribed, unsubscribed)
class ActivePresenceIdleUserData(TypedDict):
alerted: bool
notifications_data: UserMessageNotificationsData
def get_active_presence_idle_user_ids(
realm: Realm,
sender_id: int,
active_users_data: List[ActivePresenceIdleUserData],
) -> List[int]:
"""
Given a list of active_user_ids, we build up a subset
of those users who fit these criteria:
* They are likely to need notifications.
* They are no longer "present" according to the
UserPresence table.
"""
if realm.presence_disabled:
return []
user_ids = set()
for user_data in active_users_data:
user_notifications_data: UserMessageNotificationsData = user_data["notifications_data"]
alerted = user_data["alerted"]
# We only need to know the presence idle state for a user if this message would be notifiable
# for them if they were indeed idle. Only including those users in the calculation below is a
# very important optimization for open communities with many inactive users.
if user_notifications_data.is_notifiable(sender_id, idle=True) or alerted:
user_ids.add(user_notifications_data.user_id)
return filter_presence_idle_user_ids(user_ids)
def filter_presence_idle_user_ids(user_ids: Set[int]) -> List[int]:
# Given a set of user IDs (the recipients of a message), accesses
# the UserPresence table to determine which of these users are
# currently idle and should potentially get email notifications
# (and push notifications with with
# user_profile.enable_online_push_notifications=False).
#
# We exclude any presence data from ZulipMobile for the purpose of
# triggering these notifications; the mobile app can more
# effectively do its own client-side filtering of notification
# sounds/etc. for the case that the user is actively doing a PM
# conversation in the app.
if not user_ids:
return []
# Matches presence.js constant
OFFLINE_THRESHOLD_SECS = 140
recent = timezone_now() - datetime.timedelta(seconds=OFFLINE_THRESHOLD_SECS)
rows = (
UserPresence.objects.filter(
user_profile_id__in=user_ids,
status=UserPresence.ACTIVE,
timestamp__gte=recent,
)
.exclude(client__name="ZulipMobile")
.distinct("user_profile_id")
.values("user_profile_id")
)
active_user_ids = {row["user_profile_id"] for row in rows}
idle_user_ids = user_ids - active_user_ids
return sorted(idle_user_ids)
def do_send_confirmation_email(
invitee: PreregistrationUser,
referrer: UserProfile,
email_language: str,
invite_expires_in_days: Union[Optional[int], UnspecifiedValue] = UnspecifiedValue(),
) -> str:
"""
Send the confirmation/welcome e-mail to an invited user.
"""
activation_url = create_confirmation_link(
invitee, Confirmation.INVITATION, validity_in_days=invite_expires_in_days
)
context = {
"referrer_full_name": referrer.full_name,
"referrer_email": referrer.delivery_email,
"activate_url": activation_url,
"referrer_realm_name": referrer.realm.name,
}
send_email(
"zerver/emails/invitation",
to_emails=[invitee.email],
from_address=FromAddress.tokenized_no_reply_address(),
language=email_language,
context=context,
realm=referrer.realm,
)
return activation_url
def email_not_system_bot(email: str) -> None:
if is_cross_realm_bot_email(email):
msg = email_reserved_for_system_bots_error(email)
code = msg
raise ValidationError(
msg,
code=code,
params=dict(deactivated=False),
)
def estimate_recent_invites(realms: Collection[Realm], *, days: int) -> int:
"""An upper bound on the number of invites sent in the last `days` days"""
recent_invites = RealmCount.objects.filter(
realm__in=realms,
property="invites_sent::day",
end_time__gte=timezone_now() - datetime.timedelta(days=days),
).aggregate(Sum("value"))["value__sum"]
if recent_invites is None:
return 0
return recent_invites
def check_invite_limit(realm: Realm, num_invitees: int) -> None:
"""Discourage using invitation emails as a vector for carrying spam."""
msg = _(
"To protect users, Zulip limits the number of invitations you can send in one day. Because you have reached the limit, no invitations were sent."
)
if not settings.OPEN_REALM_CREATION:
return
recent_invites = estimate_recent_invites([realm], days=1)
if num_invitees + recent_invites > realm.max_invites:
raise InvitationError(
msg,
[],
sent_invitations=False,
daily_limit_reached=True,
)
default_max = settings.INVITES_DEFAULT_REALM_DAILY_MAX
newrealm_age = datetime.timedelta(days=settings.INVITES_NEW_REALM_DAYS)
if realm.date_created <= timezone_now() - newrealm_age:
# If this isn't a "newly-created" realm, we're done. The
# remaining code applies an aggregate limit across all
# "new" realms, to address sudden bursts of spam realms.
return
if realm.max_invites > default_max:
# If a user is on a realm where we've bumped up
# max_invites, then we exempt them from invite limits.
return
new_realms = Realm.objects.filter(
date_created__gte=timezone_now() - newrealm_age,
_max_invites__lte=default_max,
).all()
for days, count in settings.INVITES_NEW_REALM_LIMIT_DAYS:
recent_invites = estimate_recent_invites(new_realms, days=days)
if num_invitees + recent_invites > count:
raise InvitationError(
msg,
[],
sent_invitations=False,
daily_limit_reached=True,
)
def do_invite_users(
user_profile: UserProfile,
invitee_emails: Collection[str],
streams: Collection[Stream],
*,
invite_expires_in_days: Optional[int],
invite_as: int = PreregistrationUser.INVITE_AS["MEMBER"],
) -> None:
num_invites = len(invitee_emails)
check_invite_limit(user_profile.realm, num_invites)
if settings.BILLING_ENABLED:
from corporate.lib.registration import check_spare_licenses_available_for_inviting_new_users
check_spare_licenses_available_for_inviting_new_users(user_profile.realm, num_invites)
realm = user_profile.realm
if not realm.invite_required:
# Inhibit joining an open realm to send spam invitations.
min_age = datetime.timedelta(days=settings.INVITES_MIN_USER_AGE_DAYS)
if user_profile.date_joined > timezone_now() - min_age and not user_profile.is_realm_admin:
raise InvitationError(
_(
"Your account is too new to send invites for this organization. "
"Ask an organization admin, or a more experienced user."
),
[],
sent_invitations=False,
)
good_emails: Set[str] = set()
errors: List[Tuple[str, str, bool]] = []
validate_email_allowed_in_realm = get_realm_email_validator(user_profile.realm)
for email in invitee_emails:
if email == "":
continue
email_error = validate_email_is_valid(
email,
validate_email_allowed_in_realm,
)
if email_error:
errors.append((email, email_error, False))
else:
good_emails.add(email)
"""
good_emails are emails that look ok so far,
but we still need to make sure they're not
gonna conflict with existing users
"""
error_dict = get_existing_user_errors(user_profile.realm, good_emails)
skipped: List[Tuple[str, str, bool]] = []
for email in error_dict:
msg, deactivated = error_dict[email]
skipped.append((email, msg, deactivated))
good_emails.remove(email)
validated_emails = list(good_emails)
if errors:
raise InvitationError(
_("Some emails did not validate, so we didn't send any invitations."),
errors + skipped,
sent_invitations=False,
)
if skipped and len(skipped) == len(invitee_emails):
# All e-mails were skipped, so we didn't actually invite anyone.
raise InvitationError(
_("We weren't able to invite anyone."), skipped, sent_invitations=False
)
# We do this here rather than in the invite queue processor since this
# is used for rate limiting invitations, rather than keeping track of
# when exactly invitations were sent
do_increment_logging_stat(
user_profile.realm,
COUNT_STATS["invites_sent::day"],
None,
timezone_now(),
increment=len(validated_emails),
)
# Now that we are past all the possible errors, we actually create
# the PreregistrationUser objects and trigger the email invitations.
for email in validated_emails:
# The logged in user is the referrer.
prereg_user = PreregistrationUser(
email=email, referred_by=user_profile, invited_as=invite_as, realm=user_profile.realm
)
prereg_user.save()
stream_ids = [stream.id for stream in streams]
prereg_user.streams.set(stream_ids)
event = {
"prereg_id": prereg_user.id,
"referrer_id": user_profile.id,
"email_language": user_profile.realm.default_language,
"invite_expires_in_days": invite_expires_in_days,
}
queue_json_publish("invites", event)
if skipped:
raise InvitationError(
_(
"Some of those addresses are already using Zulip, "
"so we didn't send them an invitation. We did send "
"invitations to everyone else!"
),
skipped,
sent_invitations=True,
)
notify_invites_changed(user_profile.realm)
def get_invitation_expiry_date(confirmation_obj: Confirmation) -> Optional[int]:
expiry_date = confirmation_obj.expiry_date
if expiry_date is None:
return expiry_date
return datetime_to_timestamp(expiry_date)
def do_get_invites_controlled_by_user(user_profile: UserProfile) -> List[Dict[str, Any]]:
"""
Returns a list of dicts representing invitations that can be controlled by user_profile.
This isn't necessarily the same as all the invitations generated by the user, as administrators
can control also invitations that they did not themselves create.
"""
if user_profile.is_realm_admin:
prereg_users = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(referred_by__realm=user_profile.realm)
)
else:
prereg_users = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(referred_by=user_profile)
)
invites = []
for invitee in prereg_users:
invites.append(
dict(
email=invitee.email,
invited_by_user_id=invitee.referred_by.id,
invited=datetime_to_timestamp(invitee.invited_at),
expiry_date=get_invitation_expiry_date(invitee.confirmation.get()),
id=invitee.id,
invited_as=invitee.invited_as,
is_multiuse=False,
)
)
if not user_profile.is_realm_admin:
# We do not return multiuse invites to non-admin users.
return invites
multiuse_confirmation_objs = Confirmation.objects.filter(
realm=user_profile.realm, type=Confirmation.MULTIUSE_INVITE
).filter(Q(expiry_date__gte=timezone_now()) | Q(expiry_date=None))
for confirmation_obj in multiuse_confirmation_objs:
invite = confirmation_obj.content_object
assert invite is not None
invites.append(
dict(
invited_by_user_id=invite.referred_by.id,
invited=datetime_to_timestamp(confirmation_obj.date_sent),
expiry_date=get_invitation_expiry_date(confirmation_obj),
id=invite.id,
link_url=confirmation_url(
confirmation_obj.confirmation_key,
user_profile.realm,
Confirmation.MULTIUSE_INVITE,
),
invited_as=invite.invited_as,
is_multiuse=True,
)
)
return invites
def get_valid_invite_confirmations_generated_by_user(
user_profile: UserProfile,
) -> List[Confirmation]:
prereg_user_ids = filter_to_valid_prereg_users(
PreregistrationUser.objects.filter(referred_by=user_profile)
).values_list("id", flat=True)
confirmations = list(
Confirmation.objects.filter(type=Confirmation.INVITATION, object_id__in=prereg_user_ids)
)
multiuse_invite_ids = MultiuseInvite.objects.filter(referred_by=user_profile).values_list(
"id", flat=True
)
confirmations += list(
Confirmation.objects.filter(
type=Confirmation.MULTIUSE_INVITE,
object_id__in=multiuse_invite_ids,
).filter(Q(expiry_date__gte=timezone_now()) | Q(expiry_date=None))
)
return confirmations
def revoke_invites_generated_by_user(user_profile: UserProfile) -> None:
confirmations_to_revoke = get_valid_invite_confirmations_generated_by_user(user_profile)
now = timezone_now()
for confirmation in confirmations_to_revoke:
confirmation.expiry_date = now
Confirmation.objects.bulk_update(confirmations_to_revoke, ["expiry_date"])
if len(confirmations_to_revoke):
notify_invites_changed(realm=user_profile.realm)
def do_create_multiuse_invite_link(
referred_by: UserProfile,
invited_as: int,
invite_expires_in_days: Optional[int],
streams: Sequence[Stream] = [],
) -> str:
realm = referred_by.realm
invite = MultiuseInvite.objects.create(realm=realm, referred_by=referred_by)
if streams:
invite.streams.set(streams)
invite.invited_as = invited_as
invite.save()
notify_invites_changed(referred_by.realm)
return create_confirmation_link(
invite, Confirmation.MULTIUSE_INVITE, validity_in_days=invite_expires_in_days
)
def do_revoke_user_invite(prereg_user: PreregistrationUser) -> None:
email = prereg_user.email
realm = prereg_user.realm
assert realm is not None
# Delete both the confirmation objects and the prereg_user object.
# TODO: Probably we actually want to set the confirmation objects
# to a "revoked" status so that we can give the invited user a better
# error message.
content_type = ContentType.objects.get_for_model(PreregistrationUser)
Confirmation.objects.filter(content_type=content_type, object_id=prereg_user.id).delete()
prereg_user.delete()
clear_scheduled_invitation_emails(email)
notify_invites_changed(realm)
def do_revoke_multi_use_invite(multiuse_invite: MultiuseInvite) -> None:
realm = multiuse_invite.referred_by.realm
content_type = ContentType.objects.get_for_model(MultiuseInvite)
Confirmation.objects.filter(content_type=content_type, object_id=multiuse_invite.id).delete()
multiuse_invite.delete()
notify_invites_changed(realm)
def do_resend_user_invite_email(prereg_user: PreregistrationUser) -> int:
# These are two structurally for the caller's code path.
assert prereg_user.referred_by is not None
assert prereg_user.realm is not None
check_invite_limit(prereg_user.referred_by.realm, 1)
prereg_user.invited_at = timezone_now()
prereg_user.save()
expiry_date = prereg_user.confirmation.get().expiry_date
if expiry_date is None:
invite_expires_in_days = None
else:
# The resent invitation is reset to expire as long after the
# reminder is sent as it lasted originally.
invite_expires_in_days = (expiry_date - prereg_user.invited_at).days
prereg_user.confirmation.clear()
do_increment_logging_stat(
prereg_user.realm, COUNT_STATS["invites_sent::day"], None, prereg_user.invited_at
)
clear_scheduled_invitation_emails(prereg_user.email)
# We don't store the custom email body, so just set it to None
event = {
"prereg_id": prereg_user.id,
"referrer_id": prereg_user.referred_by.id,
"email_language": prereg_user.referred_by.realm.default_language,
"invite_expires_in_days": invite_expires_in_days,
}
queue_json_publish("invites", event)
return datetime_to_timestamp(prereg_user.invited_at)
def notify_realm_emoji(realm: Realm) -> None:
event = dict(type="realm_emoji", op="update", realm_emoji=realm.get_emoji())
send_event(realm, event, active_user_ids(realm.id))
def check_add_realm_emoji(
realm: Realm, name: str, author: UserProfile, image_file: IO[bytes]
) -> RealmEmoji:
try:
realm_emoji = RealmEmoji(realm=realm, name=name, author=author)
realm_emoji.full_clean()
realm_emoji.save()
except django.db.utils.IntegrityError:
# Match the string in upload_emoji.
raise JsonableError(_("A custom emoji with this name already exists."))
emoji_file_name = get_emoji_file_name(image_file.name, realm_emoji.id)
# The only user-controlled portion of 'emoji_file_name' is an extension,
# which can not contain '..' or '/' or '\', making it difficult to exploit
emoji_file_name = mark_sanitized(emoji_file_name)
emoji_uploaded_successfully = False
is_animated = False
try:
is_animated = upload_emoji_image(image_file, emoji_file_name, author)
emoji_uploaded_successfully = True
finally:
if not emoji_uploaded_successfully:
realm_emoji.delete()
realm_emoji.file_name = emoji_file_name
realm_emoji.is_animated = is_animated
realm_emoji.save(update_fields=["file_name", "is_animated"])
notify_realm_emoji(realm_emoji.realm)
return realm_emoji
def do_remove_realm_emoji(realm: Realm, name: str) -> None:
emoji = RealmEmoji.objects.get(realm=realm, name=name, deactivated=False)
emoji.deactivated = True
emoji.save(update_fields=["deactivated"])
notify_realm_emoji(realm)
def notify_alert_words(user_profile: UserProfile, words: Sequence[str]) -> None:
event = dict(type="alert_words", alert_words=words)
send_event(user_profile.realm, event, [user_profile.id])
def do_add_alert_words(user_profile: UserProfile, alert_words: Iterable[str]) -> None:
words = add_user_alert_words(user_profile, alert_words)
notify_alert_words(user_profile, words)
def do_remove_alert_words(user_profile: UserProfile, alert_words: Iterable[str]) -> None:
words = remove_user_alert_words(user_profile, alert_words)
notify_alert_words(user_profile, words)
def do_mute_topic(
user_profile: UserProfile,
stream: Stream,
topic: str,
date_muted: Optional[datetime.datetime] = None,
) -> None:
if date_muted is None:
date_muted = timezone_now()
add_topic_mute(user_profile, stream.id, stream.recipient_id, topic, date_muted)
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
send_event(user_profile.realm, event, [user_profile.id])
def do_unmute_topic(user_profile: UserProfile, stream: Stream, topic: str) -> None:
# Note: If you add any new code to this function, the
# remove_topic_mute call in do_update_message will need to be
# updated for correctness.
try:
remove_topic_mute(user_profile, stream.id, topic)
except UserTopic.DoesNotExist:
raise JsonableError(_("Topic is not muted"))
event = dict(type="muted_topics", muted_topics=get_topic_mutes(user_profile))
send_event(user_profile.realm, event, [user_profile.id])
def do_mute_user(
user_profile: UserProfile,
muted_user: UserProfile,
date_muted: Optional[datetime.datetime] = None,
) -> None:
if date_muted is None:
date_muted = timezone_now()
add_user_mute(user_profile, muted_user, date_muted)
do_mark_muted_user_messages_as_read(user_profile, muted_user)
event = dict(type="muted_users", muted_users=get_user_mutes(user_profile))
send_event(user_profile.realm, event, [user_profile.id])
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_MUTED,
event_time=date_muted,
extra_data=orjson.dumps({"muted_user_id": muted_user.id}).decode(),
)
def do_unmute_user(mute_object: MutedUser) -> None:
user_profile = mute_object.user_profile
muted_user = mute_object.muted_user
mute_object.delete()
event = dict(type="muted_users", muted_users=get_user_mutes(user_profile))
send_event(user_profile.realm, event, [user_profile.id])
RealmAuditLog.objects.create(
realm=user_profile.realm,
acting_user=user_profile,
modified_user=user_profile,
event_type=RealmAuditLog.USER_UNMUTED,
event_time=timezone_now(),
extra_data=orjson.dumps({"unmuted_user_id": muted_user.id}).decode(),
)
def do_mark_hotspot_as_read(user: UserProfile, hotspot: str) -> None:
UserHotspot.objects.get_or_create(user=user, hotspot=hotspot)
event = dict(type="hotspots", hotspots=get_next_hotspots(user))
send_event(user.realm, event, [user.id])
def notify_linkifiers(realm: Realm) -> None:
realm_linkifiers = linkifiers_for_realm(realm.id)
event: Dict[str, object] = dict(type="realm_linkifiers", realm_linkifiers=realm_linkifiers)
send_event(realm, event, active_user_ids(realm.id))
# Below is code for backwards compatibility. The now deprecated
# "realm_filters" event-type is used by older clients, and uses
# tuples.
realm_filters = realm_filters_for_realm(realm.id)
event = dict(type="realm_filters", realm_filters=realm_filters)
send_event(realm, event, active_user_ids(realm.id))
# NOTE: Regexes must be simple enough that they can be easily translated to JavaScript
# RegExp syntax. In addition to JS-compatible syntax, the following features are available:
# * Named groups will be converted to numbered groups automatically
# * Inline-regex flags will be stripped, and where possible translated to RegExp-wide flags
def do_add_linkifier(realm: Realm, pattern: str, url_format_string: str) -> int:
pattern = pattern.strip()
url_format_string = url_format_string.strip()
linkifier = RealmFilter(realm=realm, pattern=pattern, url_format_string=url_format_string)
linkifier.full_clean()
linkifier.save()
notify_linkifiers(realm)
return linkifier.id
def do_remove_linkifier(
realm: Realm, pattern: Optional[str] = None, id: Optional[int] = None
) -> None:
if pattern is not None:
RealmFilter.objects.get(realm=realm, pattern=pattern).delete()
else:
RealmFilter.objects.get(realm=realm, id=id).delete()
notify_linkifiers(realm)
def do_update_linkifier(realm: Realm, id: int, pattern: str, url_format_string: str) -> None:
pattern = pattern.strip()
url_format_string = url_format_string.strip()
linkifier = RealmFilter.objects.get(realm=realm, id=id)
linkifier.pattern = pattern
linkifier.url_format_string = url_format_string
linkifier.full_clean()
linkifier.save(update_fields=["pattern", "url_format_string"])
notify_linkifiers(realm)
@transaction.atomic(durable=True)
def do_add_realm_domain(
realm: Realm, domain: str, allow_subdomains: bool, *, acting_user: Optional[UserProfile]
) -> (RealmDomain):
realm_domain = RealmDomain.objects.create(
realm=realm, domain=domain, allow_subdomains=allow_subdomains
)
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_ADDED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
"realm_domains": get_realm_domains(realm),
"added_domain": {"domain": domain, "allow_subdomains": allow_subdomains},
}
).decode(),
)
event = dict(
type="realm_domains",
op="add",
realm_domain=dict(
domain=realm_domain.domain, allow_subdomains=realm_domain.allow_subdomains
),
)
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
return realm_domain
@transaction.atomic(durable=True)
def do_change_realm_domain(
realm_domain: RealmDomain, allow_subdomains: bool, *, acting_user: Optional[UserProfile]
) -> None:
realm_domain.allow_subdomains = allow_subdomains
realm_domain.save(update_fields=["allow_subdomains"])
RealmAuditLog.objects.create(
realm=realm_domain.realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_CHANGED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
"realm_domains": get_realm_domains(realm_domain.realm),
"changed_domain": {
"domain": realm_domain.domain,
"allow_subdomains": realm_domain.allow_subdomains,
},
}
).decode(),
)
event = dict(
type="realm_domains",
op="change",
realm_domain=dict(
domain=realm_domain.domain, allow_subdomains=realm_domain.allow_subdomains
),
)
transaction.on_commit(
lambda: send_event(realm_domain.realm, event, active_user_ids(realm_domain.realm_id))
)
@transaction.atomic(durable=True)
def do_remove_realm_domain(
realm_domain: RealmDomain, *, acting_user: Optional[UserProfile]
) -> None:
realm = realm_domain.realm
domain = realm_domain.domain
realm_domain.delete()
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_DOMAIN_REMOVED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
"realm_domains": get_realm_domains(realm),
"removed_domain": {
"domain": realm_domain.domain,
"allow_subdomains": realm_domain.allow_subdomains,
},
}
).decode(),
)
if RealmDomain.objects.filter(realm=realm).count() == 0 and realm.emails_restricted_to_domains:
# If this was the last realm domain, we mark the realm as no
# longer restricted to domain, because the feature doesn't do
# anything if there are no domains, and this is probably less
# confusing than the alternative.
do_set_realm_property(realm, "emails_restricted_to_domains", False, acting_user=acting_user)
event = dict(type="realm_domains", op="remove", domain=domain)
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
def notify_realm_playgrounds(realm: Realm, realm_playgrounds: List[RealmPlaygroundDict]) -> None:
event = dict(type="realm_playgrounds", realm_playgrounds=realm_playgrounds)
transaction.on_commit(lambda: send_event(realm, event, active_user_ids(realm.id)))
@transaction.atomic(durable=True)
def do_add_realm_playground(
realm: Realm, *, acting_user: Optional[UserProfile], **kwargs: Any
) -> int:
realm_playground = RealmPlayground(realm=realm, **kwargs)
# We expect full_clean to always pass since a thorough input validation
# is performed in the view (using check_url, check_pygments_language, etc)
# before calling this function.
realm_playground.full_clean()
realm_playground.save()
realm_playgrounds = get_realm_playgrounds(realm)
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_PLAYGROUND_ADDED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
"realm_playgrounds": realm_playgrounds,
"added_playground": RealmPlaygroundDict(
id=realm_playground.id,
name=realm_playground.name,
pygments_language=realm_playground.pygments_language,
url_prefix=realm_playground.url_prefix,
),
}
).decode(),
)
notify_realm_playgrounds(realm, realm_playgrounds)
return realm_playground.id
@transaction.atomic(durable=True)
def do_remove_realm_playground(
realm: Realm, realm_playground: RealmPlayground, *, acting_user: Optional[UserProfile]
) -> None:
removed_playground = {
"name": realm_playground.name,
"pygments_language": realm_playground.pygments_language,
"url_prefix": realm_playground.url_prefix,
}
realm_playground.delete()
realm_playgrounds = get_realm_playgrounds(realm)
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_PLAYGROUND_REMOVED,
event_time=timezone_now(),
extra_data=orjson.dumps(
{
"realm_playgrounds": realm_playgrounds,
"removed_playground": removed_playground,
}
).decode(),
)
notify_realm_playgrounds(realm, realm_playgrounds)
def get_occupied_streams(realm: Realm) -> QuerySet:
# TODO: Make a generic stub for QuerySet
"""Get streams with subscribers"""
exists_expression = Exists(
Subscription.objects.filter(
active=True,
is_user_active=True,
user_profile__realm=realm,
recipient_id=OuterRef("recipient_id"),
),
)
occupied_streams = (
Stream.objects.filter(realm=realm, deactivated=False)
.annotate(occupied=exists_expression)
.filter(occupied=True)
)
return occupied_streams
def get_web_public_streams(realm: Realm) -> List[Dict[str, Any]]: # nocoverage
query = get_web_public_streams_queryset(realm)
streams = Stream.get_client_data(query)
return streams
def do_get_streams(
user_profile: UserProfile,
include_public: bool = True,
include_web_public: bool = False,
include_subscribed: bool = True,
include_all_active: bool = False,
include_default: bool = False,
include_owner_subscribed: bool = False,
) -> List[Dict[str, Any]]:
# This function is only used by API clients now.
if include_all_active and not user_profile.is_realm_admin:
raise JsonableError(_("User not authorized for this query"))
include_public = include_public and user_profile.can_access_public_streams()
# Start out with all active streams in the realm.
query = Stream.objects.filter(realm=user_profile.realm, deactivated=False)
if include_all_active:
streams = Stream.get_client_data(query)
else:
# We construct a query as the or (|) of the various sources
# this user requested streams from.
query_filter: Optional[Q] = None
def add_filter_option(option: Q) -> None:
nonlocal query_filter
if query_filter is None:
query_filter = option
else:
query_filter |= option
if include_subscribed:
subscribed_stream_ids = get_subscribed_stream_ids_for_user(user_profile)
recipient_check = Q(id__in=set(subscribed_stream_ids))
add_filter_option(recipient_check)
if include_public:
invite_only_check = Q(invite_only=False)
add_filter_option(invite_only_check)
if include_web_public:
# This should match get_web_public_streams_queryset
web_public_check = Q(
is_web_public=True,
invite_only=False,
history_public_to_subscribers=True,
deactivated=False,
)
add_filter_option(web_public_check)
if include_owner_subscribed and user_profile.is_bot:
bot_owner = user_profile.bot_owner
assert bot_owner is not None
owner_stream_ids = get_subscribed_stream_ids_for_user(bot_owner)
owner_subscribed_check = Q(id__in=set(owner_stream_ids))
add_filter_option(owner_subscribed_check)
if query_filter is not None:
query = query.filter(query_filter)
streams = Stream.get_client_data(query)
else:
# Don't bother going to the database with no valid sources
streams = []
streams.sort(key=lambda elt: elt["name"])
if include_default:
is_default = {}
default_streams = get_default_streams_for_realm(user_profile.realm_id)
for default_stream in default_streams:
is_default[default_stream.id] = True
for stream in streams:
stream["is_default"] = is_default.get(stream["stream_id"], False)
return streams
def notify_attachment_update(
user_profile: UserProfile, op: str, attachment_dict: Dict[str, Any]
) -> None:
event = {
"type": "attachment",
"op": op,
"attachment": attachment_dict,
"upload_space_used": user_profile.realm.currently_used_upload_space_bytes(),
}
send_event(user_profile.realm, event, [user_profile.id])
def do_claim_attachments(message: Message, potential_path_ids: List[str]) -> bool:
claimed = False
for path_id in potential_path_ids:
user_profile = message.sender
is_message_realm_public = False
is_message_web_public = False
if message.is_stream_message():
stream = Stream.objects.get(id=message.recipient.type_id)
is_message_realm_public = stream.is_public()
is_message_web_public = stream.is_web_public
if not validate_attachment_request(user_profile, path_id):
# Technically, there are 2 cases here:
# * The user put something in their message that has the form
# of an upload, but doesn't correspond to a file that doesn't
# exist. validate_attachment_request will return None.
# * The user is trying to send a link to a file they don't have permission to
# access themselves. validate_attachment_request will return False.
#
# Either case is unusual and suggests a UI bug that got
# the user in this situation, so we log in these cases.
logging.warning(
"User %s tried to share upload %s in message %s, but lacks permission",
user_profile.id,
path_id,
message.id,
)
continue
claimed = True
attachment = claim_attachment(
user_profile, path_id, message, is_message_realm_public, is_message_web_public
)
notify_attachment_update(user_profile, "update", attachment.to_dict())
return claimed
def do_delete_old_unclaimed_attachments(weeks_ago: int) -> None:
old_unclaimed_attachments = get_old_unclaimed_attachments(weeks_ago)
for attachment in old_unclaimed_attachments:
delete_message_image(attachment.path_id)
attachment.delete()
def check_attachment_reference_change(
message: Message, rendering_result: MessageRenderingResult
) -> bool:
# For a unsaved message edit (message.* has been updated, but not
# saved to the database), adjusts Attachment data to correspond to
# the new content.
prev_attachments = {a.path_id for a in message.attachment_set.all()}
new_attachments = set(rendering_result.potential_attachment_path_ids)
if new_attachments == prev_attachments:
return bool(prev_attachments)
to_remove = list(prev_attachments - new_attachments)
if len(to_remove) > 0:
attachments_to_update = Attachment.objects.filter(path_id__in=to_remove).select_for_update()
message.attachment_set.remove(*attachments_to_update)
to_add = list(new_attachments - prev_attachments)
if len(to_add) > 0:
do_claim_attachments(message, to_add)
return message.attachment_set.exists()
def notify_realm_custom_profile_fields(realm: Realm) -> None:
fields = custom_profile_fields_for_realm(realm.id)
event = dict(type="custom_profile_fields", fields=[f.as_dict() for f in fields])
send_event(realm, event, active_user_ids(realm.id))
def try_add_realm_default_custom_profile_field(
realm: Realm, field_subtype: str
) -> CustomProfileField:
field_data = DEFAULT_EXTERNAL_ACCOUNTS[field_subtype]
custom_profile_field = CustomProfileField(
realm=realm,
name=field_data["name"],
field_type=CustomProfileField.EXTERNAL_ACCOUNT,
hint=field_data["hint"],
field_data=orjson.dumps(dict(subtype=field_subtype)).decode(),
)
custom_profile_field.save()
custom_profile_field.order = custom_profile_field.id
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
return custom_profile_field
def try_add_realm_custom_profile_field(
realm: Realm,
name: str,
field_type: int,
hint: str = "",
field_data: Optional[ProfileFieldData] = None,
) -> CustomProfileField:
custom_profile_field = CustomProfileField(realm=realm, name=name, field_type=field_type)
custom_profile_field.hint = hint
if (
custom_profile_field.field_type == CustomProfileField.SELECT
or custom_profile_field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
):
custom_profile_field.field_data = orjson.dumps(field_data or {}).decode()
custom_profile_field.save()
custom_profile_field.order = custom_profile_field.id
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
return custom_profile_field
def do_remove_realm_custom_profile_field(realm: Realm, field: CustomProfileField) -> None:
"""
Deleting a field will also delete the user profile data
associated with it in CustomProfileFieldValue model.
"""
field.delete()
notify_realm_custom_profile_fields(realm)
def do_remove_realm_custom_profile_fields(realm: Realm) -> None:
CustomProfileField.objects.filter(realm=realm).delete()
def try_update_realm_custom_profile_field(
realm: Realm,
field: CustomProfileField,
name: str,
hint: str = "",
field_data: Optional[ProfileFieldData] = None,
) -> None:
field.name = name
field.hint = hint
if (
field.field_type == CustomProfileField.SELECT
or field.field_type == CustomProfileField.EXTERNAL_ACCOUNT
):
field.field_data = orjson.dumps(field_data or {}).decode()
field.save()
notify_realm_custom_profile_fields(realm)
def try_reorder_realm_custom_profile_fields(realm: Realm, order: List[int]) -> None:
order_mapping = {_[1]: _[0] for _ in enumerate(order)}
custom_profile_fields = CustomProfileField.objects.filter(realm=realm)
for custom_profile_field in custom_profile_fields:
if custom_profile_field.id not in order_mapping:
raise JsonableError(_("Invalid order mapping."))
for custom_profile_field in custom_profile_fields:
custom_profile_field.order = order_mapping[custom_profile_field.id]
custom_profile_field.save(update_fields=["order"])
notify_realm_custom_profile_fields(realm)
def notify_user_update_custom_profile_data(
user_profile: UserProfile, field: Dict[str, Union[int, str, List[int], None]]
) -> None:
data = dict(id=field["id"], value=field["value"])
if field["rendered_value"]:
data["rendered_value"] = field["rendered_value"]
payload = dict(user_id=user_profile.id, custom_profile_field=data)
event = dict(type="realm_user", op="update", person=payload)
send_event(user_profile.realm, event, active_user_ids(user_profile.realm.id))
def do_update_user_custom_profile_data_if_changed(
user_profile: UserProfile,
data: List[Dict[str, Union[int, ProfileDataElementValue]]],
) -> None:
with transaction.atomic():
for custom_profile_field in data:
field_value, created = CustomProfileFieldValue.objects.get_or_create(
user_profile=user_profile, field_id=custom_profile_field["id"]
)
# field_value.value is a TextField() so we need to have field["value"]
# in string form to correctly make comparisons and assignments.
if isinstance(custom_profile_field["value"], str):
custom_profile_field_value_string = custom_profile_field["value"]
else:
custom_profile_field_value_string = orjson.dumps(
custom_profile_field["value"]
).decode()
if not created and field_value.value == custom_profile_field_value_string:
# If the field value isn't actually being changed to a different one,
# we have nothing to do here for this field.
continue
field_value.value = custom_profile_field_value_string
if field_value.field.is_renderable():
field_value.rendered_value = render_stream_description(
custom_profile_field_value_string
)
field_value.save(update_fields=["value", "rendered_value"])
else:
field_value.save(update_fields=["value"])
notify_user_update_custom_profile_data(
user_profile,
{
"id": field_value.field_id,
"value": field_value.value,
"rendered_value": field_value.rendered_value,
"type": field_value.field.field_type,
},
)
def check_remove_custom_profile_field_value(user_profile: UserProfile, field_id: int) -> None:
try:
custom_profile_field = CustomProfileField.objects.get(realm=user_profile.realm, id=field_id)
field_value = CustomProfileFieldValue.objects.get(
field=custom_profile_field, user_profile=user_profile
)
field_value.delete()
notify_user_update_custom_profile_data(
user_profile,
{
"id": field_id,
"value": None,
"rendered_value": None,
"type": custom_profile_field.field_type,
},
)
except CustomProfileField.DoesNotExist:
raise JsonableError(_("Field id {id} not found.").format(id=field_id))
except CustomProfileFieldValue.DoesNotExist:
pass
def do_send_create_user_group_event(user_group: UserGroup, members: List[UserProfile]) -> None:
event = dict(
type="user_group",
op="add",
group=dict(
name=user_group.name,
members=[member.id for member in members],
description=user_group.description,
id=user_group.id,
is_system_group=user_group.is_system_group,
),
)
send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
def check_add_user_group(
realm: Realm, name: str, initial_members: List[UserProfile], description: str
) -> None:
try:
user_group = create_user_group(name, initial_members, realm, description=description)
do_send_create_user_group_event(user_group, initial_members)
except django.db.utils.IntegrityError:
raise JsonableError(_("User group '{}' already exists.").format(name))
def do_send_user_group_update_event(user_group: UserGroup, data: Dict[str, str]) -> None:
event = dict(type="user_group", op="update", group_id=user_group.id, data=data)
send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
def do_update_user_group_name(user_group: UserGroup, name: str) -> None:
try:
user_group.name = name
user_group.save(update_fields=["name"])
except django.db.utils.IntegrityError:
raise JsonableError(_("User group '{}' already exists.").format(name))
do_send_user_group_update_event(user_group, dict(name=name))
def do_update_user_group_description(user_group: UserGroup, description: str) -> None:
user_group.description = description
user_group.save(update_fields=["description"])
do_send_user_group_update_event(user_group, dict(description=description))
def do_update_outgoing_webhook_service(
bot_profile: UserProfile, service_interface: int, service_payload_url: str
) -> None:
# TODO: First service is chosen because currently one bot can only have one service.
# Update this once multiple services are supported.
service = get_bot_services(bot_profile.id)[0]
service.base_url = service_payload_url
service.interface = service_interface
service.save()
send_event(
bot_profile.realm,
dict(
type="realm_bot",
op="update",
bot=dict(
user_id=bot_profile.id,
services=[
dict(
base_url=service.base_url, interface=service.interface, token=service.token
)
],
),
),
bot_owner_user_ids(bot_profile),
)
def do_update_bot_config_data(bot_profile: UserProfile, config_data: Dict[str, str]) -> None:
for key, value in config_data.items():
set_bot_config(bot_profile, key, value)
updated_config_data = get_bot_config(bot_profile)
send_event(
bot_profile.realm,
dict(
type="realm_bot",
op="update",
bot=dict(
user_id=bot_profile.id,
services=[dict(config_data=updated_config_data)],
),
),
bot_owner_user_ids(bot_profile),
)
def get_service_dicts_for_bot(user_profile_id: int) -> List[Dict[str, Any]]:
user_profile = get_user_profile_by_id(user_profile_id)
services = get_bot_services(user_profile_id)
service_dicts: List[Dict[str, Any]] = []
if user_profile.bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
service_dicts = [
{
"base_url": service.base_url,
"interface": service.interface,
"token": service.token,
}
for service in services
]
elif user_profile.bot_type == UserProfile.EMBEDDED_BOT:
try:
service_dicts = [
{
"config_data": get_bot_config(user_profile),
"service_name": services[0].name,
}
]
# A ConfigError just means that there are no config entries for user_profile.
except ConfigError:
pass
return service_dicts
def get_service_dicts_for_bots(
bot_dicts: List[Dict[str, Any]], realm: Realm
) -> Dict[int, List[Dict[str, Any]]]:
bot_profile_ids = [bot_dict["id"] for bot_dict in bot_dicts]
bot_services_by_uid: Dict[int, List[Service]] = defaultdict(list)
for service in Service.objects.filter(user_profile_id__in=bot_profile_ids):
bot_services_by_uid[service.user_profile_id].append(service)
embedded_bot_ids = [
bot_dict["id"] for bot_dict in bot_dicts if bot_dict["bot_type"] == UserProfile.EMBEDDED_BOT
]
embedded_bot_configs = get_bot_configs(embedded_bot_ids)
service_dicts_by_uid: Dict[int, List[Dict[str, Any]]] = {}
for bot_dict in bot_dicts:
bot_profile_id = bot_dict["id"]
bot_type = bot_dict["bot_type"]
services = bot_services_by_uid[bot_profile_id]
service_dicts: List[Dict[str, Any]] = []
if bot_type == UserProfile.OUTGOING_WEBHOOK_BOT:
service_dicts = [
{
"base_url": service.base_url,
"interface": service.interface,
"token": service.token,
}
for service in services
]
elif bot_type == UserProfile.EMBEDDED_BOT:
if bot_profile_id in embedded_bot_configs.keys():
bot_config = embedded_bot_configs[bot_profile_id]
service_dicts = [
{
"config_data": bot_config,
"service_name": services[0].name,
}
]
service_dicts_by_uid[bot_profile_id] = service_dicts
return service_dicts_by_uid
def get_owned_bot_dicts(
user_profile: UserProfile, include_all_realm_bots_if_admin: bool = True
) -> List[Dict[str, Any]]:
if user_profile.is_realm_admin and include_all_realm_bots_if_admin:
result = get_bot_dicts_in_realm(user_profile.realm)
else:
result = UserProfile.objects.filter(
realm=user_profile.realm, is_bot=True, bot_owner=user_profile
).values(*bot_dict_fields)
services_by_ids = get_service_dicts_for_bots(result, user_profile.realm)
return [
{
"email": botdict["email"],
"user_id": botdict["id"],
"full_name": botdict["full_name"],
"bot_type": botdict["bot_type"],
"is_active": botdict["is_active"],
"api_key": botdict["api_key"],
"default_sending_stream": botdict["default_sending_stream__name"],
"default_events_register_stream": botdict["default_events_register_stream__name"],
"default_all_public_streams": botdict["default_all_public_streams"],
"owner_id": botdict["bot_owner_id"],
"avatar_url": avatar_url_from_dict(botdict),
"services": services_by_ids[botdict["id"]],
}
for botdict in result
]
def do_send_user_group_members_update_event(
event_name: str, user_group: UserGroup, user_ids: List[int]
) -> None:
event = dict(type="user_group", op=event_name, group_id=user_group.id, user_ids=user_ids)
transaction.on_commit(
lambda: send_event(user_group.realm, event, active_user_ids(user_group.realm_id))
)
@transaction.atomic(savepoint=False)
def bulk_add_members_to_user_group(user_group: UserGroup, user_profile_ids: List[int]) -> None:
memberships = [
UserGroupMembership(user_group_id=user_group.id, user_profile_id=user_id)
for user_id in user_profile_ids
]
UserGroupMembership.objects.bulk_create(memberships)
do_send_user_group_members_update_event("add_members", user_group, user_profile_ids)
@transaction.atomic(savepoint=False)
def remove_members_from_user_group(user_group: UserGroup, user_profile_ids: List[int]) -> None:
UserGroupMembership.objects.filter(
user_group_id=user_group.id, user_profile_id__in=user_profile_ids
).delete()
do_send_user_group_members_update_event("remove_members", user_group, user_profile_ids)
def do_send_delete_user_group_event(realm: Realm, user_group_id: int, realm_id: int) -> None:
event = dict(type="user_group", op="remove", group_id=user_group_id)
send_event(realm, event, active_user_ids(realm_id))
def check_delete_user_group(user_group_id: int, user_profile: UserProfile) -> None:
user_group = access_user_group_by_id(user_group_id, user_profile)
user_group.delete()
do_send_delete_user_group_event(user_profile.realm, user_group_id, user_profile.realm.id)
def do_send_realm_reactivation_email(realm: Realm, *, acting_user: Optional[UserProfile]) -> None:
url = create_confirmation_link(realm, Confirmation.REALM_REACTIVATION)
RealmAuditLog.objects.create(
realm=realm,
acting_user=acting_user,
event_type=RealmAuditLog.REALM_REACTIVATION_EMAIL_SENT,
event_time=timezone_now(),
)
context = {"confirmation_url": url, "realm_uri": realm.uri, "realm_name": realm.name}
language = realm.default_language
send_email_to_admins(
"zerver/emails/realm_reactivation",
realm,
from_address=FromAddress.tokenized_no_reply_address(),
from_name=FromAddress.security_email_from_name(language=language),
language=language,
context=context,
)
def do_set_zoom_token(user: UserProfile, token: Optional[Dict[str, object]]) -> None:
user.zoom_token = token
user.save(update_fields=["zoom_token"])
send_event(
user.realm,
dict(type="has_zoom_token", value=token is not None),
[user.id],
)
def notify_realm_export(user_profile: UserProfile) -> None:
# In the future, we may want to send this event to all realm admins.
event = dict(type="realm_export", exports=get_realm_exports_serialized(user_profile))
send_event(user_profile.realm, event, [user_profile.id])
def do_delete_realm_export(user_profile: UserProfile, export: RealmAuditLog) -> None:
# Give mypy a hint so it knows `orjson.loads`
# isn't being passed an `Optional[str]`.
export_extra_data = export.extra_data
assert export_extra_data is not None
export_data = orjson.loads(export_extra_data)
export_path = export_data.get("export_path")
if export_path:
# Allow removal even if the export failed.
delete_export_tarball(export_path)
export_data.update(deleted_timestamp=timezone_now().timestamp())
export.extra_data = orjson.dumps(export_data).decode()
export.save(update_fields=["extra_data"])
notify_realm_export(user_profile)
def get_topic_messages(user_profile: UserProfile, stream: Stream, topic_name: str) -> List[Message]:
query = UserMessage.objects.filter(
user_profile=user_profile,
message__recipient=stream.recipient,
).order_by("id")
return [um.message for um in filter_by_topic_name_via_message(query, topic_name)]
| 37.247503 | 153 | 0.679024 |
4a1ed0fa53bd8b89e682d3d4ffc5a6db7fb7e5a6 | 1,988 | py | Python | docs/manual/tuple_functor.py | Risto97/pygears | 19393e85101a16762cb3bbbf3010946ef69217f2 | [
"MIT"
] | 120 | 2018-04-23T08:29:04.000Z | 2022-03-30T14:41:52.000Z | docs/manual/tuple_functor.py | FZP1607152286/pygears | a0b21d445e1d5c89ad66751447b8253536b835ee | [
"MIT"
] | 12 | 2019-07-09T17:12:58.000Z | 2022-03-18T09:05:10.000Z | docs/manual/tuple_functor.py | FZP1607152286/pygears | a0b21d445e1d5c89ad66751447b8253536b835ee | [
"MIT"
] | 12 | 2019-05-10T19:42:08.000Z | 2022-03-28T18:26:44.000Z | from bdp import block, cap, path, text, fig, prev, p
part = block(
text_margin=p(0.5, 0.5),
alignment="nw",
dotted=True,
group='tight',
group_margin=[p(1, 3), p(1, 2)])
comp = block(size=p(6, 4), nodesep=(6, 2))
ps_comp = block(size=p(6, 6), nodesep=(2, 3))
bus_cap = cap(length=0.4, width=0.6, inset=0, type='Stealth')
bus = path(
style=(None, bus_cap), line_width=0.3, double=True, border_width=0.06)
bus_text = text(font="\\scriptsize", margin=p(0, 0.5))
functor = part("Tuple Functor")
functor['split'] = comp("Split", size=(4, 6))
functor['f1'] = comp(
"*2", size=(4, 4)).right(functor['split']).aligny(functor['split'].w(2),
prev().s(0))
functor['f2'] = comp("*2", size=(4, 4)).below(functor['f1'])
functor['concat'] = comp(
"Concat", size=(4, 6)).right(functor['f1']).aligny(functor['split'].p)
producer = comp("Producer").left(functor['split'], 1).aligny(
functor['split'].e(0.5),
prev().e(0.5))
fig << producer
prod2split = bus(producer.e(0.5), functor['split'].w(0.5))
fig << prod2split
fig << bus_text("(u16, u16)").align(prod2split.pos(0.5), prev().s(0.5, 0.2))
for i in range(2):
conn = bus(
functor['split'].e(i * 4 + 1),
functor[f'f{i+1}'].w(0.5) - (2, 0),
functor[f'f{i+1}'].w(0.5),
routedef='-|')
fig << bus_text("u16").align(conn.pos(0), prev().s(-0.4, 0.1))
fig << conn
conn = bus(
functor[f'f{i+1}'].e(0.5),
functor[f'f{i+1}'].e(0.5) + (2, 0),
functor['concat'].w(i * 4 + 1),
routedef='|-')
fig << bus_text("u17").align(conn.pos(1), prev().s(1.4, 0.1))
fig << conn
consumer = comp("Consumer").right(functor['concat'], 1).aligny(
functor['concat'].e(0.5),
prev().e(0.5))
fig << consumer
con2cons = bus(functor['concat'].e(0.5), consumer.w(0.5))
fig << con2cons
fig << bus_text("(u17, u17)").align(con2cons.pos(0.5), prev().s(0.5, 0.2))
fig << functor
# render_fig(fig)
| 31.555556 | 76 | 0.553823 |
4a1ed13d20f2790a0021afdbe5dc288419d537f1 | 3,930 | py | Python | qiime2/core/testing/pipeline.py | turanoo/qiime2 | 2af79e1a81b35b396b1a80e01617dba0f4e10446 | [
"BSD-3-Clause"
] | null | null | null | qiime2/core/testing/pipeline.py | turanoo/qiime2 | 2af79e1a81b35b396b1a80e01617dba0f4e10446 | [
"BSD-3-Clause"
] | null | null | null | qiime2/core/testing/pipeline.py | turanoo/qiime2 | 2af79e1a81b35b396b1a80e01617dba0f4e10446 | [
"BSD-3-Clause"
] | null | null | null | # ----------------------------------------------------------------------------
# Copyright (c) 2016-2018, QIIME 2 development team.
#
# Distributed under the terms of the Modified BSD License.
#
# The full license is in the file LICENSE, distributed with this software.
# ----------------------------------------------------------------------------
from .type import SingleInt, Mapping
def parameter_only_pipeline(ctx, int1, int2=2, metadata=None):
identity_with_optional_metadata = ctx.get_action(
'dummy_plugin', 'identity_with_optional_metadata')
concatenate_ints = ctx.get_action('dummy_plugin', 'concatenate_ints')
ints1 = ctx.make_artifact('IntSequence2', [int1, int2, 3])
ints2, = identity_with_optional_metadata(ints1, metadata)
ints3, = identity_with_optional_metadata(ints1, metadata)
more_ints, = concatenate_ints(ints3, ints2, ints1, int1=int1, int2=int2)
return ints1, more_ints
def typical_pipeline(ctx, int_sequence, mapping, do_extra_thing, add=1):
split_ints = ctx.get_action('dummy_plugin', 'split_ints')
most_common_viz = ctx.get_action('dummy_plugin', 'most_common_viz')
left, right = split_ints(int_sequence)
if do_extra_thing:
left = ctx.make_artifact(
'IntSequence1', [i + add for i in left.view(list)])
val, = mapping.view(dict).values()
# Some kind of runtime failure
if val != '42':
raise ValueError("Bad mapping")
left_viz, = most_common_viz(left)
right_viz, = most_common_viz(right)
return mapping, left, right, left_viz, right_viz
def optional_artifact_pipeline(ctx, int_sequence, single_int=None):
optional_artifact_method = ctx.get_action(
'dummy_plugin', 'optional_artifacts_method')
if single_int is None:
# not a nested pipeline, just sharing the ctx object
single_int = pointless_pipeline(ctx)
num1 = single_int.view(int)
ints, = optional_artifact_method(int_sequence, num1)
return ints
def visualizer_only_pipeline(ctx, mapping):
no_input_viz = ctx.get_action('dummy_plugin', 'no_input_viz')
mapping_viz = ctx.get_action('dummy_plugin', 'mapping_viz')
viz1, = no_input_viz()
viz2, = mapping_viz(mapping, mapping, 'foo', 'bar')
return viz1, viz2
def pipelines_in_pipeline(ctx, int_sequence, mapping):
pointless_pipeline = ctx.get_action('dummy_plugin', 'pointless_pipeline')
typical_pipeline = ctx.get_action('dummy_plugin', 'typical_pipeline')
visualizer_only_pipeline = ctx.get_action(
'dummy_plugin', 'visualizer_only_pipeline')
results = []
results += pointless_pipeline()
typical_results = typical_pipeline(int_sequence, mapping, True)
results += typical_results
results += visualizer_only_pipeline(typical_results[0])
return tuple(results)
def pointless_pipeline(ctx):
# Use a real type expression instead of a string.
return ctx.make_artifact(SingleInt, 4)
def failing_pipeline(ctx, int_sequence, break_from='arity'):
merge_mappings = ctx.get_action('dummy_plugin', 'merge_mappings')
list_ = int_sequence.view(list)
if list_:
integer = list_[0]
else:
integer = 0
# Made here so that we can make sure it gets cleaned up
wrong_output = ctx.make_artifact(SingleInt, integer)
if break_from == 'arity':
return int_sequence, int_sequence, int_sequence
elif break_from == 'return-view':
return None
elif break_from == 'type':
return wrong_output
elif break_from == 'method':
a = ctx.make_artifact(Mapping, {'foo': 'a'})
b = ctx.make_artifact(Mapping, {'foo': 'b'})
# has the same key
merge_mappings(a, b)
elif break_from == 'no-plugin':
ctx.get_action('not%a$plugin', 'foo')
elif break_from == 'no-action':
ctx.get_action('dummy_plugin', 'not%a$method')
else:
raise ValueError('this never works')
| 33.589744 | 78 | 0.670738 |
4a1ed1be74059784d57ab401b83feaa7d93eac15 | 5,384 | py | Python | gr-azure-software-radio/python/qa_blob_source.py | pomeroy3/azure-software-radio | a336e5670483e1085cfa1d754c6917c9e4cc6aec | [
"MIT"
] | null | null | null | gr-azure-software-radio/python/qa_blob_source.py | pomeroy3/azure-software-radio | a336e5670483e1085cfa1d754c6917c9e4cc6aec | [
"MIT"
] | null | null | null | gr-azure-software-radio/python/qa_blob_source.py | pomeroy3/azure-software-radio | a336e5670483e1085cfa1d754c6917c9e4cc6aec | [
"MIT"
] | null | null | null | # pylint: disable=missing-function-docstring, no-self-use, missing-class-docstring, no-member
#!/usr/bin/env python
# -*- coding: utf-8 -*-
#
# Copyright (c) Microsoft Corporation.
# Licensed under the GNU General Public License v3.0 or later.
# See License.txt in the project root for license information.
#
import uuid
from unittest.mock import patch
from gnuradio import gr, gr_unittest
from gnuradio import blocks
import numpy as np
from azure_software_radio import BlobSource
class qa_BlobSource(gr_unittest.TestCase):
# pylint: disable=invalid-name
def setUp(self):
self.blob_connection_string = (
"DefaultEndpointsProtocol=https;AccountName=accountname;AccountKey=accountkey;"
+ "EndpointSuffix=core.windows.net"
)
self.test_blob_container_name = str(uuid.uuid4())
self.tb = gr.top_block(catch_exceptions=False)
# pylint: disable=invalid-name
def tearDown(self):
self.tb = None
def test_instance(self):
'''
Ensure we don't throw errors in the constructor when given inputs with valid formats
'''
instance = BlobSource(np_dtype=np.complex64,
vlen=1,
authentication_method="connection_string",
connection_str=self.blob_connection_string,
container_name=self.test_blob_container_name,
blob_name='test-instance',
queue_size=4)
# really only checking that the init didn't throw an exception above, but adding the check
# below to keep flake8 happy
self.assertIsNotNone(instance)
def test_chunk_residue(self):
'''
Test that we don't crash if we get back a non-integer number of samples from a blob chunk
'''
blob_name = 'test-blob.npy'
num_samples = 500
src_data = np.arange(0, num_samples, 1, dtype=np.complex64)
op = BlobSource(np_dtype=np.complex64,
vlen=1,
authentication_method="connection_string",
connection_str=self.blob_connection_string,
container_name=self.test_blob_container_name,
blob_name=blob_name,
queue_size=4)
src_data_bytes = src_data.tobytes()
# don't send the last 2 bytes of the last sample
chunk = src_data_bytes[:-2]
data, chunk_residue = op.chunk_to_array(chunk=chunk, chunk_residue=b'')
# check data - it should include all samples except the last one
self.assertEqual(data.tobytes(), src_data[:-1].tobytes())
# the chunk residue should be the first 6 bytes of the last sample
self.assertEqual(chunk_residue, src_data_bytes[-8:-2])
def test_chunk_residue_merge(self):
'''
Test that we can glue samples back together if we get them in separate chunks
'''
blob_name = 'test-blob.npy'
num_samples = 500
src_data = np.arange(0, num_samples, 1, dtype=np.complex64)
op = BlobSource(np_dtype=np.complex64,
vlen=1,
authentication_method="connection_string",
connection_str=self.blob_connection_string,
container_name=self.test_blob_container_name,
blob_name=blob_name,
queue_size=4,
retry_total=0)
src_data_bytes = src_data.tobytes()
# don't send the first 2 bytes of the first sample
chunk_residue_in = src_data_bytes[:2]
chunk = src_data_bytes[2:]
data, chunk_residue = op.chunk_to_array(chunk=chunk, chunk_residue=chunk_residue_in)
# check data - it should include all samples with nothing left in the residue
self.assertEqual(data.tobytes(), src_data.tobytes())
self.assertEqual(len(chunk_residue), 0)
@patch.object(BlobSource, 'blob_auth_and_container_info_is_valid', return_value=True)
def test_end_to_end_run(self, _):
'''
Test the block properly starts up, reads data from the blob data queue, and cleanly
shuts down
'''
blob_name = 'test-blob.npy'
num_samples = 500
src_data = np.arange(0, num_samples, 1, dtype=np.complex64)
dst = blocks.vector_sink_c()
# prevent setup_blob_iterator from making Azure API calls
with patch.object(BlobSource, 'setup_blob_iterator', spec=iter) as mock_iter:
# add in a list of chunks we want to pretend the blob API gave us
mock_iter.return_value = iter([src_data.tobytes()])
op = BlobSource(np_dtype=np.complex64,
vlen=1,
authentication_method="connection_string",
connection_str=self.blob_connection_string,
container_name=self.test_blob_container_name,
blob_name=blob_name,
queue_size=4,
retry_total=0)
self.tb.connect(op, dst)
self.tb.run()
self.assertEqual(dst.data(), src_data.tolist())
if __name__ == '__main__':
gr_unittest.run(qa_BlobSource)
| 36.378378 | 98 | 0.607912 |
4a1ed36317272dc025f5e863f0a8168d4b466cdb | 1,532 | py | Python | misc/updatePepperVersion.py | korpling/pepper | 3bf595f1a8c95ff0c503a366c0ed612972f45dc9 | [
"Apache-2.0"
] | 18 | 2015-01-16T12:09:51.000Z | 2020-12-14T14:28:34.000Z | misc/updatePepperVersion.py | korpling/pepper | 3bf595f1a8c95ff0c503a366c0ed612972f45dc9 | [
"Apache-2.0"
] | 103 | 2015-01-05T12:42:04.000Z | 2022-01-29T13:22:05.000Z | misc/updatePepperVersion.py | korpling/pepper | 3bf595f1a8c95ff0c503a366c0ed612972f45dc9 | [
"Apache-2.0"
] | 6 | 2015-07-22T13:55:05.000Z | 2018-03-10T13:10:29.000Z | #!/usr/bin/python
# This script replaces the version in a pom.xml file of the parent project. It is used to update the pepper-parent project version.
import sys
import os
from xml.etree.ElementTree import dump
import xml.etree.ElementTree as ET
print "+--------------------------------------------------------+"
print "| |"
print "| Updates the version of the dependency to pepper-parent |"
print "| |"
print "+--------------------------------------------------------+"
# extract arguments for path and version
if len(sys.argv)< 2:
print "Please pass a new version for Pepper: python updatePepperVersion.py NEW_VERSION (PATH)?"
newVersion= sys.argv[1]
rootPath="./"
if len(sys.argv)>2:
rootPath= sys.argv[2]
print 'newVersion: ',newVersion
print 'path: ',rootPath
#name of file to read and write
pomXML="pom.xml"
#read all files in passed path argument or current path
for root, subFolders, files in os.walk(rootPath):
if pomXML in files:
currentfile= os.path.join(root, pomXML)
print 'parsing ',currentfile,' ...'
#set namespace for ELementTree, otherwise the namespace is printed in output prefixing every element
ET.register_namespace('', "http://maven.apache.org/POM/4.0.0")
mydoc = ET.parse(currentfile)
namespace = "{http://maven.apache.org/POM/4.0.0}"
#find version
for e in mydoc.findall('{0}parent/{0}version'.format(namespace)):
e.text=newVersion
mydoc.write(currentfile)
| 36.47619 | 131 | 0.622715 |
4a1ed3d3054ebc6d9151b4b1247e3bdc40889e0f | 2,020 | py | Python | server/tasks/accounts.py | anon24816/discovery-artifact-manager | 79eac10fe87949996d36fcef2f3b7282c19bccad | [
"Apache-2.0"
] | 38 | 2017-07-20T17:54:08.000Z | 2022-02-20T02:16:31.000Z | server/tasks/accounts.py | anon24816/discovery-artifact-manager | 79eac10fe87949996d36fcef2f3b7282c19bccad | [
"Apache-2.0"
] | 183 | 2017-03-23T17:17:24.000Z | 2022-02-09T00:07:17.000Z | server/tasks/accounts.py | anon24816/discovery-artifact-manager | 79eac10fe87949996d36fcef2f3b7282c19bccad | [
"Apache-2.0"
] | 42 | 2017-03-23T19:20:20.000Z | 2022-02-20T02:17:09.000Z | # Copyright 2017, Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Contains definitions/getters for GitHub and package manager accounts."""
import os
from collections import namedtuple
from google.cloud import datastore
GitHubAccount = namedtuple('GitHubAccount',
'name email username personal_access_token')
NpmAccount = namedtuple('NpmAccount', 'auth_token')
RubyGemsAccount = namedtuple('RubyGemsAccount', 'api_key')
def _get(type_):
client = datastore.Client()
obj = list(client.query(kind=type_.__name__).fetch())[0]
return type_(*[obj[x] for x in type_._fields])
def get_github_account():
"""Returns the GitHub account stored in Datastore.
Returns:
GitHubAccount: a GitHub account.
"""
# Allow environment variables to set github details for
# easy local debugging.
env = os.environ # for brevity
github_token = env.get("GITHUB_TOKEN")
if github_token:
return GitHubAccount(
env["GITHUB_USER"],
env["GITHUB_EMAIL"],
env["GITHUB_USERNAME"],
github_token)
return _get(GitHubAccount)
def get_npm_account():
"""Returns the npm account stored in Datastore.
Returns:
NpmAccount: an npm account.
"""
return _get(NpmAccount)
def get_rubygems_account():
"""Returns the RubyGems account stored in Datastore.
Returns:
RubyGemsAccount: a RubyGems account.
"""
return _get(RubyGemsAccount)
| 29.275362 | 75 | 0.69802 |
4a1ed5445a192ea56824d9b1240931add21ec065 | 57,688 | py | Python | src/sage/modular/pollack_stevens/modsym.py | Blues1998/sage | b5c9cf037cbce672101725f269470135b9b2c5c4 | [
"BSL-1.0"
] | null | null | null | src/sage/modular/pollack_stevens/modsym.py | Blues1998/sage | b5c9cf037cbce672101725f269470135b9b2c5c4 | [
"BSL-1.0"
] | null | null | null | src/sage/modular/pollack_stevens/modsym.py | Blues1998/sage | b5c9cf037cbce672101725f269470135b9b2c5c4 | [
"BSL-1.0"
] | null | null | null | # -*- coding: utf-8 -*-
r"""
Element class for Pollack-Stevens' Modular Symbols
This is the class of elements in the spaces of Pollack-Steven's modular symbols as described in [PS2011]_.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol(); phi
Modular symbol of level 11 with values in Sym^0 Q^2
sage: phi.weight() # Note that weight k=2 of a modular form corresponds here to weight 0
0
sage: phi.values()
[-1/5, 1, 0]
sage: phi.is_ordinary(11)
True
sage: phi_lift = phi.lift(11, 5, eigensymbol = True) # long time
sage: phi_lift.padic_lseries().series(5) # long time
O(11^5) + (10 + 3*11 + 6*11^2 + 9*11^3 + O(11^4))*T + (6 + 3*11 + 2*11^2 + O(11^3))*T^2 + (2 + 2*11 + O(11^2))*T^3 + (5 + O(11))*T^4 + O(T^5)
::
sage: A = ModularSymbols(Gamma1(8),4).decomposition()[0].plus_submodule().new_subspace()
sage: from sage.modular.pollack_stevens.space import ps_modsym_from_simple_modsym_space
sage: phi = ps_modsym_from_simple_modsym_space(A)
sage: phi.values()
[(-1, 0, 0), (1, 0, 0), (-9, -6, -4)]
"""
#*****************************************************************************
# Copyright (C) 2012 Robert Pollack <[email protected]>
#
# Distributed under the terms of the GNU General Public License (GPL)
# as published by the Free Software Foundation; either version 2 of
# the License, or (at your option) any later version.
# http://www.gnu.org/licenses/
#******************************************************************************
from __future__ import print_function
from __future__ import absolute_import
import operator
from sage.structure.element import ModuleElement
from sage.structure.richcmp import op_EQ, op_NE
from sage.rings.integer_ring import ZZ
from sage.rings.rational_field import QQ
from sage.misc.cachefunc import cached_method
from sage.rings.padics.factory import Qp
from sage.rings.polynomial.all import PolynomialRing
from sage.rings.padics.padic_generic import pAdicGeneric
from sage.arith.all import next_prime, gcd, kronecker
from sage.misc.misc import verbose
from sage.rings.padics.precision_error import PrecisionError
from sage.categories.action import Action
from .manin_map import ManinMap
from .sigma0 import Sigma0
from .fund_domain import M2Z
minusproj = [1, 0, 0, -1]
def _iterate_Up(Phi, p, M, ap, q, aq, check):
r"""
Return an overconvergent Hecke-eigensymbol lifting self -- self must be a
`p`-ordinary eigensymbol
INPUT:
- ``p`` -- prime
- ``M`` -- integer equal to the number of moments
- ``ap`` -- Hecke eigenvalue at `p`
- ``q`` -- prime
- ``aq`` -- Hecke eigenvalue at `q`
OUTPUT:
- Hecke-eigenvalue overconvergent modular symbol lifting self.
EXAMPLES::
sage: E = EllipticCurve('57a')
sage: p = 5
sage: prec = 4
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi_stabilized = phi.p_stabilize(p,M = prec)
sage: Phi = phi_stabilized.lift(p,prec) # indirect doctest
"""
if ap.valuation(p) > 0:
raise ValueError("Lifting non-ordinary eigensymbols not implemented (issue #20)")
## Act by Hecke to ensure values are in D and not D^dag after solving difference equation
verbose("Applying Hecke", level = 2)
apinv = ~ap
Phi = apinv * Phi.hecke(p)
## Killing eisenstein part
verbose("Killing eisenstein part with q = %s" % q, level = 2)
k = Phi.parent().weight()
Phi = ((q ** (k + 1) + 1) * Phi - Phi.hecke(q))
## Iterating U_p
verbose("Iterating U_p", level = 2)
Psi = apinv * Phi.hecke(p)
for attempts in range(M-1):
verbose("%s attempt (val = %s/%s)" % (attempts + 1,(Phi-Psi).valuation(),M), level = 2)
Phi = Psi
Psi = apinv * Phi.hecke(p)
Psi._normalize()
Phi = ~(q ** (k + 1) + 1 - aq) * Phi
return Phi
class PSModSymAction(Action):
def __init__(self, actor, MSspace):
r"""
Create the action
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: g = phi._map._codomain._act._Sigma0(matrix(ZZ,2,2,[1,2,3,4]))
sage: phi * g # indirect doctest
Modular symbol of level 11 with values in Sym^0 Q^2
"""
Action.__init__(self, actor, MSspace, False, operator.mul)
def _act_(self, g, sym):
r"""
Return the result of sym * g
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: g = phi._map._codomain._act._Sigma0(matrix(ZZ,2,2,[2,1,5,-1]))
sage: phi * g # indirect doctest
Modular symbol of level 11 with values in Sym^0 Q^2
"""
return sym.__class__(sym._map * g, sym.parent(), construct=True)
class PSModularSymbolElement(ModuleElement):
def __init__(self, map_data, parent, construct=False):
r"""
Initialize a modular symbol
EXAMPLES::
sage: E = EllipticCurve('37a')
sage: phi = E.pollack_stevens_modular_symbol()
"""
ModuleElement.__init__(self, parent)
if construct:
self._map = map_data
else:
self._map = ManinMap(parent._coefficients, parent._source, map_data)
def _repr_(self):
r"""
Return the string representation of the symbol.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi._repr_()
'Modular symbol of level 11 with values in Sym^0 Q^2'
"""
return "Modular symbol of level %s with values in %s" % (self.parent().level(), self.parent().coefficient_module())
def dict(self):
r"""
Return dictionary on the modular symbol self, where keys are generators and values are the corresponding values of self on generators
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: Set([x.moment(0) for x in phi.dict().values()]) == Set([-1/5, 1, 0])
True
"""
D = {}
for g in self.parent().source().gens():
D[g] = self._map[g]
return D
def weight(self):
r"""
Return the weight of this Pollack-Stevens modular symbol.
This is `k-2`, where `k` is the usual notion of weight for modular
forms!
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.weight()
0
"""
return self.parent().weight()
def values(self):
r"""
Return the values of the symbol self on our chosen generators
(generators are listed in ``self.dict().keys()``)
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: sorted(phi.dict())
[
[-1 -1] [ 0 -1] [1 0]
[ 3 2], [ 1 3], [0 1]
]
sage: sorted(phi.values()) == sorted(phi.dict().values())
True
"""
return [self._map[g] for g in self.parent().source().gens()]
def _normalize(self, **kwds):
"""
Normalize all of the values of the symbol self
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi._normalize()
Modular symbol of level 11 with values in Sym^0 Q^2
sage: phi._normalize().values()
[-1/5, 1, 0]
"""
for val in self._map:
val.normalize(**kwds)
return self
def _richcmp_(self, other, op):
"""
Check if self == other.
Here self and other have the same parent.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi == phi
True
sage: phi == 2*phi
False
sage: psi = EllipticCurve('37a').pollack_stevens_modular_symbol()
sage: psi == phi
False
"""
if op not in [op_EQ, op_NE]:
return NotImplemented
b = all(self._map[g] == other._map[g]
for g in self.parent().source().gens())
return b == (op == op_EQ)
def _add_(self, right):
"""
Return self + right
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi + phi
Modular symbol of level 11 with values in Sym^0 Q^2
sage: (phi + phi).values()
[-2/5, 2, 0]
"""
return self.__class__(self._map + right._map, self.parent(), construct=True)
def _lmul_(self, right):
"""
Return self * right
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: 2*phi
Modular symbol of level 11 with values in Sym^0 Q^2
sage: (2*phi).values()
[-2/5, 2, 0]
"""
return self.__class__(self._map * right, self.parent(), construct=True)
def _rmul_(self, right):
"""
Return self * right
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi*2
Modular symbol of level 11 with values in Sym^0 Q^2
sage: (phi*2).values()
[-2/5, 2, 0]
"""
return self.__class__(self._map * right, self.parent(), construct=True)
def _sub_(self, right):
"""
Return self - right
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi - phi
Modular symbol of level 11 with values in Sym^0 Q^2
sage: (phi - phi).values()
[0, 0, 0]
"""
return self.__class__(self._map - right._map, self.parent(), construct=True)
def _get_prime(self, p=None, alpha=None, allow_none=False):
"""
Combine a prime specified by the user with the prime from the parent.
INPUT:
- ``p`` -- an integer or None (default None); if specified
needs to match the prime of the parent.
- ``alpha`` -- an element or None (default None); if p-adic
can contribute a prime.
- ``allow_none`` -- boolean (default False); whether to allow
no prime to be specified.
OUTPUT:
- a prime or None. If ``allow_none`` is False then a
``ValueError`` will be raised rather than returning None if no
prime can be determined.
EXAMPLES::
sage: from sage.modular.pollack_stevens.distributions import Symk
sage: D = OverconvergentDistributions(0, 5, 10)
sage: M = PollackStevensModularSymbols(Gamma0(5), coefficients=D)
sage: f = M(1); f._get_prime()
5
sage: f._get_prime(5)
5
sage: f._get_prime(7)
Traceback (most recent call last):
...
ValueError: inconsistent prime
sage: f._get_prime(alpha=Qp(5)(1))
5
sage: D = Symk(0)
sage: M = PollackStevensModularSymbols(Gamma0(2), coefficients=D)
sage: f = M(1); f._get_prime(allow_none=True) is None
True
sage: f._get_prime(alpha=Qp(7)(1))
7
sage: f._get_prime(7,alpha=Qp(7)(1))
7
sage: f._get_prime()
Traceback (most recent call last):
...
ValueError: you must specify a prime
"""
pp = self.parent().prime()
ppp = ((alpha is not None) and hasattr(alpha.parent(), 'prime')
and alpha.parent().prime()) or None
p = ZZ(p) or pp or ppp
if not p:
if not allow_none:
raise ValueError("you must specify a prime")
elif (pp and p != pp) or (ppp and p != ppp):
raise ValueError("inconsistent prime")
return p
def plus_part(self):
r"""
Return the plus part of self -- i.e. ``self + self | [1,0,0,-1]``.
Note that we haven't divided by 2. Is this a problem?
OUTPUT:
- self + self | [1,0,0,-1]
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: (phi.plus_part()+phi.minus_part()) == 2 * phi
True
"""
S0N = Sigma0(self.parent().level())
return self + self * S0N(minusproj)
def minus_part(self):
r"""
Return the minus part of self -- i.e. self - self | [1,0,0,-1]
Note that we haven't divided by 2. Is this a problem?
OUTPUT:
- self -- self | [1,0,0,-1]
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: (phi.plus_part()+phi.minus_part()) == phi * 2
True
"""
S0N = Sigma0(self.parent().level())
return self - self * S0N(minusproj)
def hecke(self, ell, algorithm="prep"):
r"""
Return self | `T_{\ell}` by making use of the precomputations in
self.prep_hecke()
INPUT:
- ``ell`` -- a prime
- ``algorithm`` -- a string, either 'prep' (default) or
'naive'
OUTPUT:
- The image of this element under the Hecke operator
`T_{\ell}`
ALGORITHMS:
- If ``algorithm == 'prep'``, precomputes a list of matrices
that only depend on the level, then uses them to speed up
the action.
- If ``algorithm == 'naive'``, just acts by the matrices
defining the Hecke operator. That is, it computes
sum_a self | [1,a,0,ell] + self | [ell,0,0,1],
the last term occurring only if the level is prime to ell.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi.hecke(2) == phi * E.ap(2)
True
sage: phi.hecke(3) == phi * E.ap(3)
True
sage: phi.hecke(5) == phi * E.ap(5)
True
sage: phi.hecke(101) == phi * E.ap(101)
True
sage: all(phi.hecke(p, algorithm='naive') == phi * E.ap(p) for p in [2,3,5,101]) # long time
True
"""
return self.__class__(self._map.hecke(ell, algorithm),
self.parent(), construct=True)
def valuation(self, p=None):
r"""
Return the valuation of ``self`` at `p`.
Here the valuation is the minimum of the valuations of the
values of ``self``.
INPUT:
- ``p`` - prime
OUTPUT:
- The valuation of ``self`` at `p`
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi.valuation(2)
0
sage: phi.valuation(3)
0
sage: phi.valuation(5)
-1
sage: phi.valuation(7)
0
sage: phi.valuation()
Traceback (most recent call last):
...
ValueError: you must specify a prime
sage: phi2 = phi.lift(11, M=2)
sage: phi2.valuation()
0
sage: phi2.valuation(3)
Traceback (most recent call last):
...
ValueError: inconsistent prime
sage: phi2.valuation(11)
0
"""
q = self._get_prime(p)
return min([val.valuation(q) for val in self._map])
def diagonal_valuation(self, p):
"""
Return the minimum of the diagonal valuation on the values of self
INPUT:
- ``p`` -- a positive integral prime
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi.diagonal_valuation(2)
0
sage: phi.diagonal_valuation(3)
0
sage: phi.diagonal_valuation(5)
-1
sage: phi.diagonal_valuation(7)
0
"""
return min([val.diagonal_valuation(p) for val in self._map])
@cached_method
def is_Tq_eigensymbol(self, q, p=None, M=None):
r"""
Determine if self is an eigenvector for `T_q` modulo `p^M`
INPUT:
- ``q`` -- prime of the Hecke operator
- ``p`` -- prime we are working modulo
- ``M`` -- degree of accuracy of approximation
OUTPUT:
- True/False
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi_ord = phi.p_stabilize(p = 3, ap = E.ap(3), M = 10, ordinary = True)
sage: phi_ord.is_Tq_eigensymbol(2,3,10)
True
sage: phi_ord.is_Tq_eigensymbol(2,3,100)
False
sage: phi_ord.is_Tq_eigensymbol(2,3,1000)
False
sage: phi_ord.is_Tq_eigensymbol(3,3,10)
True
sage: phi_ord.is_Tq_eigensymbol(3,3,100)
False
"""
try:
self.Tq_eigenvalue(q, p, M)
return True
except ValueError:
return False
# what happens if a cached method raises an error? Is it
# recomputed each time?
@cached_method
def Tq_eigenvalue(self, q, p=None, M=None, check=True):
r"""
Eigenvalue of `T_q` modulo `p^M`
INPUT:
- ``q`` -- prime of the Hecke operator
- ``p`` -- prime we are working modulo (default: None)
- ``M`` -- degree of accuracy of approximation (default: None)
- ``check`` -- check that ``self`` is an eigensymbol
OUTPUT:
- Constant `c` such that `self|T_q - c * self` has valuation greater than
or equal to `M` (if it exists), otherwise raises ValueError
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.values()
[-1/5, 1, 0]
sage: phi_ord = phi.p_stabilize(p = 3, ap = E.ap(3), M = 10, ordinary = True)
sage: phi_ord.Tq_eigenvalue(2,3,10) + 2
O(3^10)
sage: phi_ord.Tq_eigenvalue(3,3,10)
2 + 3^2 + 2*3^3 + 2*3^4 + 2*3^6 + 3^8 + 2*3^9 + O(3^10)
sage: phi_ord.Tq_eigenvalue(3,3,100)
Traceback (most recent call last):
...
ValueError: result not determined to high enough precision
"""
qhecke = self.hecke(q)
gens = self.parent().source().gens()
if p is None:
p = self.parent().prime()
i = 0
g = gens[i]
verbose("Computing eigenvalue", level = 2)
while self._map[g].moment(0).is_zero():
if not qhecke._map[g].moment(0).is_zero():
raise ValueError("not a scalar multiple")
i += 1
try:
g = gens[i]
except IndexError:
raise ValueError("self is zero")
aq = self.parent().base_ring()(self._map[g].find_scalar_from_zeroth_moment(qhecke._map[g], p, M, check))
verbose("Found eigenvalues of %s" % aq, level = 2)
if check:
verbose("Checking that this is actually an eigensymbol", level = 2)
if p is None or M is None or not ZZ(p).is_prime():
for g in gens[1:]:
try:
if not (qhecke._map[g] - aq * self._map[g]).is_zero():
# using != did not work
raise ValueError("not a scalar multiple")
except PrecisionError:
if qhecke._map[g] != aq * self._map[g]:
raise ValueError("not a scalar multiple")
else:
verbose('p = %s, M = %s' % (p, M), level = 2)
if qhecke != aq * self:
raise ValueError("not a scalar multiple")
# if not aq.parent().is_exact() and M is not None:
# aq.add_bigoh(M)
return aq
def is_ordinary(self, p=None, P=None):
r"""
Return true if the `p`-th eigenvalue is a `p`-adic unit.
INPUT:
- ``p`` - a positive integral prime, or None (default None)
- ``P`` - a prime of the base ring above `p`, or None. This is ignored
unless the base ring is a number field.
OUTPUT:
- True/False
EXAMPLES::
sage: E = EllipticCurve('11a1')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi.is_ordinary(2)
False
sage: E.ap(2)
-2
sage: phi.is_ordinary(3)
True
sage: E.ap(3)
-1
sage: phip = phi.p_stabilize(3,20)
sage: phip.is_ordinary()
True
A number field example. Here there are multiple primes above `p`, and
`\phi` is ordinary at one but not the other.::
sage: f = Newforms(32, 8, names='a')[1]
sage: K = f.hecke_eigenvalue_field()
sage: a = f[3]
sage: from sage.modular.pollack_stevens.space import ps_modsym_from_simple_modsym_space
sage: phi = ps_modsym_from_simple_modsym_space(f.modular_symbols(1))
sage: phi.is_ordinary(K.ideal(3, 1/16*a + 3/2)) != phi.is_ordinary(K.ideal(3, 1/16*a + 5/2))
True
sage: phi.is_ordinary(3)
Traceback (most recent call last):
...
TypeError: P must be an ideal
"""
# q is the prime below p, if base is a number field; q = p otherwise
if p is None:
if self.parent().prime() == 0:
raise ValueError("need to specify a prime")
q = p = self.parent().prime()
elif p in ZZ:
q = p
else:
q = p.smallest_integer()
if not q.is_prime():
raise ValueError("p is not prime")
if (self.parent().prime() != q) and (self.parent().prime() != 0):
raise ValueError("prime does not match coefficient module's prime")
aq = self.Tq_eigenvalue(q)
return aq.valuation(p) == 0
def evaluate_twisted(self, a, chi):
r"""
Return `\Phi_{\chi}(\{a/p\}-\{\infty\})` where `\Phi` is ``self`` and
`\chi` is a quadratic character
INPUT:
- ``a`` -- integer in the range range(p)
- ``chi`` -- the modulus of a quadratic character.
OUTPUT:
The distribution `\Phi_{\chi}(\{a/p\}-\{\infty\})`.
EXAMPLES::
sage: E = EllipticCurve('17a1')
sage: L = E.padic_lseries(5, implementation="pollackstevens", precision=4) #long time
sage: D = L.quadratic_twist() # long time
sage: L.symbol().evaluate_twisted(1,D) # long time
(1 + 5 + 3*5^2 + 5^3 + O(5^4), 5^2 + O(5^3), 1 + O(5^2), 2 + O(5))
sage: E = EllipticCurve('40a4')
sage: L = E.padic_lseries(7, implementation="pollackstevens", precision=4) #long time
sage: D = L.quadratic_twist() # long time
sage: L.symbol().evaluate_twisted(1,D) # long time
(4 + 6*7 + 3*7^2 + O(7^4), 6*7 + 6*7^2 + O(7^3), 6 + O(7^2), 1 + O(7))
"""
p = self.parent().prime()
S0p = Sigma0(p)
Dists = self.parent().coefficient_module()
M = Dists.precision_cap()
p = Dists.prime()
twisted_dist = Dists.zero()
m_map = self._map
for b in range(1, abs(chi) + 1):
if gcd(b, chi) == 1:
M1 = S0p([1, (b / abs(chi)) % p ** M, 0, 1])
new_dist = m_map(M1 * M2Z([a, 1, p, 0])) * M1
new_dist = new_dist.scale(kronecker(chi, b)).normalize()
twisted_dist += new_dist
return twisted_dist.normalize()
def _consistency_check(self):
"""
Check that the map really does satisfy the Manin relations loop (for debugging).
The two and three torsion relations are checked and it is checked that the symbol
adds up correctly around the fundamental domain
EXAMPLES::
sage: E = EllipticCurve('37a1')
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi._consistency_check()
This modular symbol satisfies the Manin relations
"""
f = self._map
MR = self._map._manin
## Test two torsion relations
for g in MR.reps_with_two_torsion():
gamg = MR.two_torsion_matrix(g)
if not (f[g] * gamg + f[g]).is_zero():
raise ValueError("Two torsion relation failed with", g)
## Test three torsion relations
for g in MR.reps_with_three_torsion():
gamg = MR.three_torsion_matrix(g)
if not (f[g] * (gamg ** 2) + f[g] * gamg + f[g]).is_zero():
raise ValueError("Three torsion relation failed with", g)
## Test that the symbol adds to 0 around the boundary of the
## fundamental domain
t = self.parent().coefficient_module().zero()
for g in MR.gens()[1:]:
if not(g in MR.reps_with_two_torsion()
or g in MR.reps_with_three_torsion()):
t += f[g] * MR.gammas[g] - f[g]
else:
if g in MR.reps_with_two_torsion():
t -= f[g]
else:
t -= f[g] # what ?? same thing ??
id = MR.gens()[0]
if f[id] * MR.gammas[id] - f[id] != -t:
print(t)
print(f[id] * MR.gammas[id] - f[id])
raise ValueError("Does not add up correctly around loop")
print("This modular symbol satisfies the Manin relations")
class PSModularSymbolElement_symk(PSModularSymbolElement):
def _find_alpha(self, p, k, M=None, ap=None, new_base_ring=None, ordinary=True, check=True, find_extraprec=True):
r"""
Find `\alpha`, a `U_p` eigenvalue, which is found as a root of
the polynomial `x^2 - a_p * x + p^{k+1} \chi(p)`.
INPUT:
- ``p`` -- prime
- ``k`` -- Pollack-Stevens weight
- ``M`` -- precision (default: None) of `\QQ_p`
- ``ap`` -- Hecke eigenvalue at `p` (default: None)
- ``new_base_ring`` -- field of definition of `\alpha` (default: None)
- ``ordinary`` -- True if the prime is ordinary (default: True)
- ``check`` -- check to see if the prime is ordinary (default: True)
- ``find_extraprec`` -- setting this to True finds extra precision (default: True)
OUTPUT:
The output is a tuple (``alpha``, ``new_base_ring``,
``newM``, ``eisenloss``,``q``,``aq``), with
- ``alpha`` -- `U_p` eigenvalue
- ``new_base_ring`` -- field of definition of `\alpha` with precision at least ``newM``
- ``newM`` -- new precision
- ``eisenloss`` -- loss of precision
- ``q`` -- a prime not equal to `p` which was used to find extra precision
- ``aq`` -- the Hecke eigenvalue `a_q` corresponding to `q`
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: p = 5
sage: M = 10
sage: k = 0
sage: phi = E.pollack_stevens_modular_symbol()
sage: phi._find_alpha(p,k,M)
(1 + 4*5 + 3*5^2 + 2*5^3 + 4*5^4 + 4*5^5 + 4*5^6 + 3*5^7 + 2*5^8 + 3*5^9 + 3*5^10 + 3*5^12 + 2*5^13 + O(5^14), 5-adic Field with capped relative precision 14, 13, 1, 2, -2)
"""
if ap is None:
ap = self.Tq_eigenvalue(p, check=check)
if check and ap.valuation(p) > 0:
raise ValueError("p is not ordinary")
chi = self._map._codomain._character
if chi is not None:
eps = chi(p)
else:
eps = 1
poly = PolynomialRing(ap.parent(), 'x')([p ** (k + 1) * eps, -ap, 1])
if new_base_ring is None:
# These should actually be completions of disc.parent()
if p == 2:
# is this the right precision adjustment for p=2?
new_base_ring = Qp(2, M + 1)
else:
new_base_ring = Qp(p, M)
set_padicbase = True
else:
set_padicbase = False
try:
verbose("finding alpha: rooting %s in %s" % (poly, new_base_ring), level = 2)
poly = poly.change_ring(new_base_ring)
(v0, e0), (v1, e1) = poly.roots()
except (TypeError, ValueError):
raise ValueError("new base ring must contain a root of x^2 - ap * x + p^(k+1)")
if v0.valuation(p) > 0:
v0, v1 = v1, v0
if ordinary:
alpha = v0
else:
alpha = v1
if find_extraprec:
newM, eisenloss, q, aq = self._find_extraprec(p, M, alpha, check)
else:
newM, eisenloss, q, aq = M, None, None, None
if set_padicbase:
# We want to ensure that the relative precision of alpha
# and (alpha-1) are both at least *newM*, where newM is
# obtained from self._find_extraprec
prec_cap = None
verbose("testing prec_rel: newM = %s, alpha = %s" % (newM, alpha),
level=2)
if alpha.precision_relative() < newM:
prec_cap = newM + alpha.valuation(p) + (1 if p == 2 else 0)
if ordinary:
a1val = (alpha - 1).valuation(p)
verbose("a1val = %s" % a1val, level=2)
if a1val > 0 and ap != 1 + p ** (k + 1):
# if ap = 1 + p**(k+1) then alpha=1 and we need to give up.
if prec_cap is None:
prec_cap = newM + a1val + (1 if p == 2 else 0)
else:
prec_cap = max(prec_cap, newM + a1val + (1 if p == 2 else 0))
verbose("prec_cap = %s" % prec_cap, level=2)
if prec_cap is not None:
new_base_ring = Qp(p, prec_cap)
return self._find_alpha(p=p, k=k, M=M, ap=ap, new_base_ring=new_base_ring, ordinary=ordinary, check=False, find_extraprec=find_extraprec)
return alpha, new_base_ring, newM, eisenloss, q, aq
def p_stabilize(self, p=None, M=20, alpha=None, ap=None, new_base_ring=None, ordinary=True, check=True):
r"""
Return the `p`-stabilization of self to level `N p` on which `U_p` acts by `\alpha`.
Note that since `\alpha` is `p`-adic, the resulting symbol
is just an approximation to the true `p`-stabilization
(depending on how well `\alpha` is approximated).
INPUT:
- ``p`` -- prime not dividing the level of self
- ``M`` -- (default: 20) precision of `\QQ_p`
- ``alpha`` -- `U_p` eigenvalue
- ``ap`` -- Hecke eigenvalue
- ``new_base_ring`` -- change of base ring
- ``ordinary`` -- (default: True) whether to return the ordinary
(at ``p``) eigensymbol.
- ``check`` -- (default: True) whether to perform extra sanity checks
OUTPUT:
A modular symbol with the same Hecke eigenvalues as
self away from `p` and eigenvalue `\alpha` at `p`.
The eigenvalue `\alpha` depends on the parameter ``ordinary``.
If ``ordinary`` == True: the unique modular symbol of level
`N p` with the same Hecke eigenvalues as self away from
`p` and unit eigenvalue at `p`; else the unique modular
symbol of level `N p` with the same Hecke eigenvalues as
self away from `p` and non-unit eigenvalue at `p`.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: p = 5
sage: prec = 4
sage: phi = E.pollack_stevens_modular_symbol()
sage: phis = phi.p_stabilize(p,M = prec)
sage: phis
Modular symbol of level 55 with values in Sym^0 Q_5^2
sage: phis.hecke(7) == phis*E.ap(7)
True
sage: phis.hecke(5) == phis*E.ap(5)
False
sage: phis.hecke(3) == phis*E.ap(3)
True
sage: phis.Tq_eigenvalue(5)
1 + 4*5 + 3*5^2 + 2*5^3 + O(5^4)
sage: phis.Tq_eigenvalue(5,M = 3)
1 + 4*5 + 3*5^2 + O(5^3)
sage: phis = phi.p_stabilize(p,M = prec,ordinary=False)
sage: phis.Tq_eigenvalue(5)
5 + 5^2 + 2*5^3 + O(5^5)
A complicated example (with nontrivial character)::
sage: chi = DirichletGroup(24)([-1, -1, -1])
sage: f = Newforms(chi,names='a')[0]
sage: from sage.modular.pollack_stevens.space import ps_modsym_from_simple_modsym_space
sage: phi = ps_modsym_from_simple_modsym_space(f.modular_symbols(1))
sage: phi11, h11 = phi.completions(11,20)[0]
sage: phi11s = phi11.p_stabilize()
sage: phi11s.is_Tq_eigensymbol(11) # long time
True
"""
if check:
p = self._get_prime(p, alpha)
k = self.parent().weight()
M = ZZ(M)
verbose("p stabilizing: M = %s" % M, level=2)
if alpha is None:
alpha, new_base_ring, newM, eisenloss, q, aq = self._find_alpha(p, k, M + 1, ap, new_base_ring, ordinary, check, find_extraprec=False)
new_base_ring = Qp(p, M) if p != 2 else Qp(p, M + 1)
else:
if new_base_ring is None:
new_base_ring = alpha.parent()
if check:
if ap is None:
ap = self.base_ring()(alpha + p ** (k + 1) / alpha)
elif alpha ** 2 - ap * alpha + p ** (k + 1) != 0:
raise ValueError("alpha must be a root of x^2 - a_p*x + p^(k+1)")
if self.hecke(p) != ap * self:
raise ValueError("alpha must be a root of x^2 - a_p*x + p^(k+1)")
verbose("found alpha = %s" % alpha, level = 2)
V = self.parent()._p_stabilize_parent_space(p, new_base_ring)
return self.__class__(self._map.p_stabilize(p, alpha, V), V, construct=True)
def completions(self, p, M):
r"""
If `K` is the base_ring of self, this function takes all maps
`K\to \QQ_p` and applies them to self return a list of
(modular symbol,map: `K\to \QQ_p`) as map varies over all such maps.
.. NOTE::
This only returns all completions when `p` splits completely in `K`
INPUT:
- ``p`` -- prime
- ``M`` -- precision
OUTPUT:
- A list of tuples (modular symbol,map: `K\to \QQ_p`) as map varies over all such maps
EXAMPLES::
sage: from sage.modular.pollack_stevens.space import ps_modsym_from_simple_modsym_space
sage: D = ModularSymbols(67,2,1).cuspidal_submodule().new_subspace().decomposition()[1]
sage: f = ps_modsym_from_simple_modsym_space(D)
sage: S = f.completions(41,10); S
[(Modular symbol of level 67 with values in Sym^0 Q_41^2, Ring morphism:
From: Number Field in alpha with defining polynomial x^2 + 3*x + 1
To: 41-adic Field with capped relative precision 10
Defn: alpha |--> 5 + 22*41 + 19*41^2 + 10*41^3 + 28*41^4 + 22*41^5 + 9*41^6 + 25*41^7 + 40*41^8 + 8*41^9 + O(41^10)), (Modular symbol of level 67 with values in Sym^0 Q_41^2, Ring morphism:
From: Number Field in alpha with defining polynomial x^2 + 3*x + 1
To: 41-adic Field with capped relative precision 10
Defn: alpha |--> 33 + 18*41 + 21*41^2 + 30*41^3 + 12*41^4 + 18*41^5 + 31*41^6 + 15*41^7 + 32*41^9 + O(41^10))]
sage: TestSuite(S[0][0]).run(skip=['_test_category'])
"""
K = self.base_ring()
R = Qp(p, M + 10)['x']
x = R.gen()
if K == QQ:
f = x - 1
else:
f = K.defining_polynomial()
v = R(f).roots()
if len(v) == 0:
L = Qp(p, M).extension(f, names='a')
# a = L.gen()
V = self.parent().change_ring(L)
Dist = V.coefficient_module()
psi = K.hom([K.gen()], L)
embedded_sym = self.parent().element_class(self._map.apply(psi, codomain=Dist, to_moments=True), V, construct=True)
ans = [embedded_sym, psi]
return ans
else:
roots = [r[0] for r in v]
ans = []
V = self.parent().change_ring(Qp(p, M))
Dist = V.coefficient_module()
for r in roots:
psi = K.hom([r], Qp(p, M))
embedded_sym = self.parent().element_class(self._map.apply(psi, codomain=Dist, to_moments=True), V, construct=True)
ans.append((embedded_sym, psi))
return ans
def lift(self, p=None, M=None, alpha=None, new_base_ring=None,
algorithm = None, eigensymbol=False, check=True):
r"""
Return a (`p`-adic) overconvergent modular symbol with
`M` moments which lifts self up to an Eisenstein error
Here the Eisenstein error is a symbol whose system of Hecke
eigenvalues equals `\ell+1` for `T_\ell` when `\ell`
does not divide `Np` and 1 for `U_q` when `q` divides `Np`.
INPUT:
- ``p`` -- prime
- ``M`` -- integer equal to the number of moments
- ``alpha`` -- `U_p` eigenvalue
- ``new_base_ring`` -- change of base ring
- ``algorithm`` -- 'stevens' or 'greenberg' (default 'stevens')
- ``eigensymbol`` -- if True, lifts to Hecke eigensymbol (self must
be a `p`-ordinary eigensymbol)
(Note: ``eigensymbol = True`` does *not* just indicate to the code that
self is an eigensymbol; it solves a wholly different problem, lifting
an eigensymbol to an eigensymbol.)
OUTPUT:
An overconvergent modular symbol whose specialization equals self, up
to some Eisenstein error if ``eigensymbol`` is False. If ``eigensymbol
= True`` then the output will be an overconvergent Hecke eigensymbol
(and it will lift the input exactly, the Eisenstein error disappears).
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: f = E.pollack_stevens_modular_symbol()
sage: g = f.lift(11,4,algorithm='stevens',eigensymbol=True)
sage: g.is_Tq_eigensymbol(2)
True
sage: g.Tq_eigenvalue(3)
10 + 10*11 + 10*11^2 + 10*11^3 + O(11^4)
sage: g.Tq_eigenvalue(11)
1 + O(11^4)
We check that lifting and then specializing gives back the original symbol::
sage: g.specialize() == f
True
Another example, which showed precision loss in an earlier version of the code::
sage: E = EllipticCurve('37a')
sage: p = 5
sage: prec = 4
sage: phi = E.pollack_stevens_modular_symbol()
sage: Phi = phi.p_stabilize_and_lift(p,prec, algorithm='stevens', eigensymbol=True) # long time
sage: Phi.Tq_eigenvalue(5,M = 4) # long time
3 + 2*5 + 4*5^2 + 2*5^3 + O(5^4)
Another example::
sage: from sage.modular.pollack_stevens.padic_lseries import pAdicLseries
sage: E = EllipticCurve('37a')
sage: p = 5
sage: prec = 6
sage: phi = E.pollack_stevens_modular_symbol()
sage: Phi = phi.p_stabilize_and_lift(p=p,M=prec,alpha=None,algorithm='stevens',eigensymbol=True) #long time
sage: L = pAdicLseries(Phi) # long time
sage: L.symbol() is Phi # long time
True
Examples using Greenberg's algorithm::
sage: E = EllipticCurve('11a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: Phi = phi.lift(11,8,algorithm='greenberg',eigensymbol=True)
sage: Phi2 = phi.lift(11,8,algorithm='stevens',eigensymbol=True)
sage: Phi == Phi2
True
An example in higher weight::
sage: from sage.modular.pollack_stevens.space import ps_modsym_from_simple_modsym_space
sage: f = ps_modsym_from_simple_modsym_space(Newforms(7, 4)[0].modular_symbols(1))
sage: fs = f.p_stabilize(5)
sage: FsG = fs.lift(M=6, eigensymbol=True,algorithm='greenberg') # long time
sage: FsG.values()[0] # long time
5^-1 * (2*5 + 5^2 + 3*5^3 + 4*5^4 + O(5^7), O(5^6), 2*5^2 + 3*5^3 + O(5^5), O(5^4), 5^2 + O(5^3), O(5^2))
sage: FsS = fs.lift(M=6, eigensymbol=True,algorithm='stevens') # long time
sage: FsS == FsG # long time
True
"""
if p is None:
p = self.parent().prime()
if p == 0:
raise ValueError("must specify a prime")
elif (self.parent().prime() != 0) and p != self.parent().prime():
raise ValueError("inconsistent prime")
if M is None:
M = self.parent().precision_cap() + 1
elif M <= 1:
raise ValueError("M must be at least 2")
else:
M = ZZ(M)
if new_base_ring is None:
if isinstance(self.parent().base_ring(), pAdicGeneric):
new_base_ring = self.parent().base_ring()
else:
# We may need extra precision in solving the difference equation
if algorithm == 'greenberg':
extraprec = 0
else:
extraprec = (M - 1).exact_log(p) # DEBUG: was M-1
# should eventually be a completion
new_base_ring = Qp(p, M + extraprec)
if algorithm is None:
# The default algorithm is Greenberg's, if possible.
algorithm = 'greenberg' if eigensymbol else 'stevens'
elif algorithm == 'greenberg':
if not eigensymbol:
raise ValueError("Greenberg's algorithm only works"
" for eigensymbols. Try 'stevens'")
elif algorithm != 'stevens':
raise ValueError("algorithm %s not recognized" % algorithm)
if eigensymbol:
# We need some extra precision due to the fact that solving
# the difference equation can give denominators.
if alpha is None:
verbose('Finding alpha with M = %s' % M, level = 2)
alpha = self.Tq_eigenvalue(p, M=M + 1, check=check)
newM, eisenloss, q, aq = self._find_extraprec(p, M + 1, alpha, check)
Phi = self._lift_to_OMS(p, newM, new_base_ring, algorithm)
Phi = _iterate_Up(Phi, p, newM, alpha, q, aq, check)
Phi = Phi.reduce_precision(M)
return Phi._normalize(include_zeroth_moment = True)
else:
return self._lift_to_OMS(p, M, new_base_ring, algorithm)
def _lift_to_OMS(self, p, M, new_base_ring, algorithm = 'greenberg'):
r"""
Return a (`p`-adic) overconvergent modular symbol with
`M` moments which lifts self up to an Eisenstein error
Here the Eisenstein error is a symbol whose system of Hecke
eigenvalues equals `\ell+1` for `T_\ell` when `\ell`
does not divide `Np` and 1 for `U_q` when `q` divides `Np`.
INPUT:
- ``p`` -- prime
- ``M`` -- integer equal to the number of moments
- ``new_base_ring`` -- new base ring
- ``algorithm`` -- (default: 'greenberg') a string, either 'greenberg'
or 'stevens', specifying whether to use
the lifting algorithm of M.Greenberg or that of Pollack--Stevens.
The latter one solves the difference equation, which is not needed. The
option to use Pollack--Stevens' algorithm here is just for historical reasons.
OUTPUT:
- An overconvergent modular symbol whose specialization
equals self up to some Eisenstein error.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: f = E.pollack_stevens_modular_symbol()
sage: f._lift_to_OMS(11,4,Qp(11,4))
Modular symbol of level 11 with values in Space of 11-adic distributions with k=0 action and precision cap 4
"""
D = {}
manin = self.parent().source()
MSS = self.parent()._lift_parent_space(p, M, new_base_ring)
if algorithm == 'greenberg':
for g in manin.gens():
D[g] = self._map[g].lift(p, M, new_base_ring)
elif algorithm == 'stevens':
half = ZZ(1) / ZZ(2)
for g in manin.gens()[1:]:
twotor = g in manin.reps_with_two_torsion()
threetor = g in manin.reps_with_three_torsion()
if twotor:
# See [PS2011] section 4.1
gam = manin.two_torsion_matrix(g)
mu = self._map[g].lift(p, M, new_base_ring)
D[g] = (mu - mu * gam) * half
elif threetor:
# See [PS2011] section 4.1
gam = manin.three_torsion_matrix(g)
mu = self._map[g].lift(p, M, new_base_ring)
D[g] = (2 * mu - mu * gam - mu * (gam ** 2)) * half
else:
# no two or three torsion
D[g] = self._map[g].lift(p, M, new_base_ring)
t = self.parent().coefficient_module().lift(p, M, new_base_ring).zero()
## This loops adds up around the boundary of fundamental
## domain except the two vertical lines
for g in manin.gens()[1:]:
twotor = g in manin.reps_with_two_torsion()
threetor = g in manin.reps_with_three_torsion()
if twotor or threetor:
t = t - D[g]
else:
t += D[g] * manin.gammas[g] - D[g]
## t now should be sum Phi(D_i) | (gamma_i - 1) - sum
## Phi(D'_i) - sum Phi(D''_i)
## (Here I'm using the opposite sign convention of [PS2011]
## regarding D'_i and D''_i)
D[manin.gen(0)] = -t.solve_difference_equation() # Check this!
else:
raise NotImplementedError
return MSS(D)
def _find_aq(self, p, M, check):
r"""
Helper function for finding Hecke eigenvalue `aq` for a prime `q`
not equal to `p`. This is called in the case when `alpha = 1 (mod p^M)`
(with `alpha` a `U_p`-eigenvalue), which creates the need to use
other Hecke eigenvalues (and `alpha`s), because of division by `(alpha - 1)`.
INPUT:
- ``p`` -- working prime
- ``M`` -- precision
- ``check`` -- checks that ``self`` is a `T_q` eigensymbol
OUTPUT:
Tuple ``(q, aq, eisenloss)``, with
- ``q`` -- a prime not equal to `p`
- ``aq`` -- Hecke eigenvalue at `q`
- ``eisenloss`` -- the `p`-adic valuation of `a_q - q^{k+1} - 1`
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: f = E.pollack_stevens_modular_symbol()
sage: f._find_aq(5,10,True)
(2, -2, 1)
"""
N = self.parent().level()
q = ZZ(2)
k = self.parent().weight()
aq = self.Tq_eigenvalue(q, check=check)
eisenloss = (aq - q ** (k + 1) - 1).valuation(p)
while ((q == p) or (N % q == 0) or (eisenloss >= M)) and (q < 50):
q = next_prime(q)
aq = self.Tq_eigenvalue(q, check=check)
if q != p:
eisenloss = (aq - q ** (k + 1) - 1).valuation(p)
else:
eisenloss = (aq - 1).valuation(p)
if q >= 50:
raise ValueError("The symbol appears to be eisenstein -- "
"not implemented yet")
return q, aq, eisenloss
def _find_extraprec(self, p, M, alpha, check):
r"""
Find the extra precision needed to account for:
1) The denominators in the Hecke eigenvalue
2) the denominators appearing when solving the difference equation,
3) those denominators who might be also present in self.
INPUT :
- ``p`` -- working prime
- ``M`` -- precision
- ``alpha`` -- the Up-eigenvalue
- ``check`` -- whether to check that ``self`` is a `T_q` eigensymbol
OUTPUT :
A tuple (newM, eisenloss, q, aq), where ``newM`` is the new precision, `q` is
a prime different from `p`, and ``aq`` is the eigenvalue of `T_q` of the eigensymbol.
The value ``eisenloss`` is the loss of precision accounted for in the denominators of the Hecke
eigenvalue.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: p = 5
sage: M = 10
sage: k = 0
sage: phi = E.pollack_stevens_modular_symbol()
sage: alpha = phi.Tq_eigenvalue(p)
sage: phi._find_extraprec(p,M,alpha,True)
(13, 1, 2, -2)
"""
q, aq, eisenloss = self._find_aq(p, M, check)
newM = M + eisenloss
# We also need to add precision to account for denominators appearing while solving the difference equation.
eplog = (newM - 1).exact_log(p)
while eplog < (newM + eplog).exact_log(p):
eplog = (newM + eplog).exact_log(p)
verbose("M = %s, newM = %s, eplog=%s" % (M, newM, eplog), level=2)
newM += eplog
# We also need to add precision to account for denominators that might be present in self
s = self.valuation(p)
if s < 0:
newM += -s
return newM, eisenloss, q, aq
def p_stabilize_and_lift(self, p, M, alpha=None, ap=None,
new_base_ring=None,
ordinary=True, algorithm='greenberg', eigensymbol=False,
check=True):
"""
`p`-stabilize and lift self
INPUT:
- ``p`` -- prime, not dividing the level of self
- ``M`` -- precision
- ``alpha`` -- (default: None) the `U_p` eigenvalue, if known
- ``ap`` -- (default: None) the Hecke eigenvalue at p (before stabilizing), if known
- ``new_base_ring`` -- (default: None) if specified, force the resulting eigensymbol to take values in the given ring
- ``ordinary`` -- (default: True) whether to return the ordinary
(at ``p``) eigensymbol.
- ``algorithm`` -- (default: 'greenberg') a string, either 'greenberg'
or 'stevens', specifying whether to use
the lifting algorithm of M.Greenberg or that of Pollack--Stevens.
The latter one solves the difference equation, which is not needed. The
option to use Pollack--Stevens' algorithm here is just for historical reasons.
- ``eigensymbol`` -- (default: False) if True, return an overconvergent eigensymbol. Otherwise just perform a naive lift
- ``check`` -- (default: True) whether to perform extra sanity checks
OUTPUT:
`p`-stabilized and lifted version of self.
EXAMPLES::
sage: E = EllipticCurve('11a')
sage: f = E.pollack_stevens_modular_symbol()
sage: g = f.p_stabilize_and_lift(3,10) # long time
sage: g.Tq_eigenvalue(5) # long time
1 + O(3^10)
sage: g.Tq_eigenvalue(7) # long time
1 + 2*3 + 2*3^2 + 2*3^3 + 2*3^4 + 2*3^5 + 2*3^6 + 2*3^7 + 2*3^8 + 2*3^9 + O(3^10)
sage: g.Tq_eigenvalue(3) # long time
2 + 3^2 + 2*3^3 + 2*3^4 + 2*3^6 + 3^8 + 2*3^9 + O(3^10)
"""
if check:
p = self._get_prime(p, alpha)
k = self.parent().weight()
M = ZZ(M)
# alpha will be the eigenvalue of Up
M0 = M + 1
if alpha is None:
alpha, new_base_ring, newM, eisenloss, q, aq = self._find_alpha(p, k, M0, ap, new_base_ring, ordinary, check)
if new_base_ring is None:
new_base_ring = alpha.parent()
newM, eisenloss, q, aq = self._find_extraprec(p, M0, alpha, check)
if hasattr(new_base_ring, 'precision_cap') and newM > new_base_ring.precision_cap():
raise ValueError("Not enough precision in new base ring")
# Now we can stabilize
self = self.p_stabilize(p=p, alpha=alpha, ap=ap, M=newM,
new_base_ring=new_base_ring, check=check)
# And use the standard lifting function for eigensymbols
Phi = self._lift_to_OMS(p, newM, new_base_ring, algorithm)
Phi = _iterate_Up(Phi, p=p, M=newM, ap=alpha, q=q, aq=aq, check=check)
Phi = Phi.reduce_precision(M)
return Phi._normalize(include_zeroth_moment = True)
class PSModularSymbolElement_dist(PSModularSymbolElement):
def reduce_precision(self, M):
r"""
Only hold on to `M` moments of each value of self
EXAMPLES::
sage: D = OverconvergentDistributions(0, 5, 10)
sage: M = PollackStevensModularSymbols(Gamma0(5), coefficients=D)
sage: f = M(1)
sage: f.reduce_precision(1)
Modular symbol of level 5 with values in Space of 5-adic distributions with k=0 action and precision cap 10
"""
return self.__class__(self._map.reduce_precision(M), self.parent(),
construct=True)
def precision_relative(self):
r"""
Return the number of moments of each value of self
EXAMPLES::
sage: D = OverconvergentDistributions(0, 5, 10)
sage: M = PollackStevensModularSymbols(Gamma0(5), coefficients=D)
sage: f = M(1)
sage: f.precision_relative()
1
"""
return min([len(a._moments) for a in self._map])
def specialize(self, new_base_ring=None):
r"""
Return the underlying classical symbol of weight `k` - i.e.,
applies the canonical map `D_k \to Sym^k` to all values of
self.
EXAMPLES::
sage: D = OverconvergentDistributions(0, 5, 10); M = PollackStevensModularSymbols(Gamma0(5), coefficients=D); M
Space of overconvergent modular symbols for Congruence Subgroup Gamma0(5) with sign 0
and values in Space of 5-adic distributions with k=0 action and precision cap 10
sage: f = M(1)
sage: f.specialize()
Modular symbol of level 5 with values in Sym^0 Z_5^2
sage: f.specialize().values()
[1 + O(5), 1 + O(5), 1 + O(5)]
sage: f.values()
[1 + O(5), 1 + O(5), 1 + O(5)]
sage: f.specialize().parent()
Space of modular symbols for Congruence Subgroup Gamma0(5) with sign 0 and values in Sym^0 Z_5^2
sage: f.specialize().parent().coefficient_module()
Sym^0 Z_5^2
sage: f.specialize().parent().coefficient_module().is_symk()
True
sage: f.specialize(Qp(5,20))
Modular symbol of level 5 with values in Sym^0 Q_5^2
"""
if new_base_ring is None:
new_base_ring = self.base_ring()
return self.__class__(self._map.specialize(new_base_ring),
self.parent()._specialize_parent_space(new_base_ring), construct=True)
def padic_lseries(self,*args, **kwds):
"""
Return the `p`-adic L-series of this modular symbol.
EXAMPLES::
sage: E = EllipticCurve('37a')
sage: phi = E.pollack_stevens_modular_symbol()
sage: L = phi.lift(37, M=6, eigensymbol=True).padic_lseries(); L # long time
37-adic L-series of Modular symbol of level 37 with values in Space of 37-adic distributions with k=0 action and precision cap 7
sage: L.series(2) # long time
O(37^6) + (4 + 37 + 36*37^2 + 19*37^3 + 21*37^4 + O(37^5))*T + O(T^2)
"""
from sage.modular.pollack_stevens.padic_lseries import pAdicLseries
return pAdicLseries(self, *args, **kwds)
| 36.465234 | 203 | 0.538154 |
4a1ed5881824dbd97fbb19b2f97ab5b32e9b20a7 | 6,238 | py | Python | sputnik/thirdparty/weibopy/streaming.py | errord/sputnik | b83c635a9a160dcd5809265c0d9d231ade33e5ea | [
"BSD-3-Clause"
] | null | null | null | sputnik/thirdparty/weibopy/streaming.py | errord/sputnik | b83c635a9a160dcd5809265c0d9d231ade33e5ea | [
"BSD-3-Clause"
] | null | null | null | sputnik/thirdparty/weibopy/streaming.py | errord/sputnik | b83c635a9a160dcd5809265c0d9d231ade33e5ea | [
"BSD-3-Clause"
] | 1 | 2018-03-04T04:48:44.000Z | 2018-03-04T04:48:44.000Z |
# Copyright 2009-2010 Joshua Roesslein
# See LICENSE for details.
import httplib
from socket import timeout
from threading import Thread
from time import sleep
import urllib
from sputnik.thirdparty.weibopy.auth import BasicAuthHandler
from sputnik.thirdparty.weibopy.models import Status
from sputnik.thirdparty.weibopy.api import API
from sputnik.thirdparty.weibopy.error import WeibopError
from sputnik.thirdparty.weibopy.utils import import_simplejson
json = import_simplejson()
STREAM_VERSION = 1
class StreamListener(object):
def __init__(self, api=None):
self.api = api or API()
def on_data(self, data):
"""Called when raw data is received from connection.
Override this method if you wish to manually handle
the stream data. Return False to stop stream and close connection.
"""
if 'in_reply_to_status_id' in data:
status = Status.parse(self.api, json.loads(data))
if self.on_status(status) is False:
return False
elif 'delete' in data:
delete = json.loads(data)['delete']['status']
if self.on_delete(delete['id'], delete['user_id']) is False:
return False
elif 'limit' in data:
if self.on_limit(json.loads(data)['limit']['track']) is False:
return False
def on_status(self, status):
"""Called when a new status arrives"""
return
def on_delete(self, status_id, user_id):
"""Called when a delete notice arrives for a status"""
return
def on_limit(self, track):
"""Called when a limitation notice arrvies"""
return
def on_error(self, status_code):
"""Called when a non-200 status code is returned"""
return False
def on_timeout(self):
"""Called when stream connection times out"""
return
class Stream(object):
host = 'stream.twitter.com'
def __init__(self, username, password, listener, timeout=5.0, retry_count = None,
retry_time = 10.0, snooze_time = 5.0, buffer_size=1500, headers=None):
self.auth = BasicAuthHandler(username, password)
self.running = False
self.timeout = timeout
self.retry_count = retry_count
self.retry_time = retry_time
self.snooze_time = snooze_time
self.buffer_size = buffer_size
self.listener = listener
self.api = API()
self.headers = headers or {}
self.body = None
def _run(self):
# setup
self.auth.apply_auth(None, None, self.headers, None)
# enter loop
error_counter = 0
conn = None
while self.running:
if self.retry_count and error_counter > self.retry_count:
# quit if error count greater than retry count
break
try:
conn = httplib.HTTPConnection(self.host)
conn.connect()
conn.sock.settimeout(self.timeout)
conn.request('POST', self.url, self.body, headers=self.headers)
resp = conn.getresponse()
if resp.status != 200:
if self.listener.on_error(resp.status) is False:
break
error_counter += 1
sleep(self.retry_time)
else:
error_counter = 0
self._read_loop(resp)
except timeout:
if self.listener.on_timeout() == False:
break
if self.running is False:
break
conn.close()
sleep(self.snooze_time)
except Exception:
# any other exception is fatal, so kill loop
break
# cleanup
self.running = False
if conn:
conn.close()
def _read_loop(self, resp):
data = ''
while self.running:
if resp.isclosed():
break
# read length
length = ''
while True:
c = resp.read(1)
if c == '\n':
break
length += c
length = length.strip()
if length.isdigit():
length = int(length)
else:
continue
# read data and pass into listener
data = resp.read(length)
if self.listener.on_data(data) is False:
self.running = False
def _start(self, async):
self.running = True
if async:
Thread(target=self._run).start()
else:
self._run()
def firehose(self, count=None, async=False):
if self.running:
raise WeibopError('Stream object already connected!')
self.url = '/%i/statuses/firehose.json?delimited=length' % STREAM_VERSION
if count:
self.url += '&count=%s' % count
self._start(async)
def retweet(self, async=False):
if self.running:
raise WeibopError('Stream object already connected!')
self.url = '/%i/statuses/retweet.json?delimited=length' % STREAM_VERSION
self._start(async)
def sample(self, count=None, async=False):
if self.running:
raise WeibopError('Stream object already connected!')
self.url = '/%i/statuses/sample.json?delimited=length' % STREAM_VERSION
if count:
self.url += '&count=%s' % count
self._start(async)
def filter(self, follow=None, track=None, async=False):
params = {}
self.headers['Content-type'] = "application/x-www-form-urlencoded"
if self.running:
raise WeibopError('Stream object already connected!')
self.url = '/%i/statuses/filter.json?delimited=length' % STREAM_VERSION
if follow:
params['follow'] = ','.join(map(str, follow))
if track:
params['track'] = ','.join(map(str, track))
self.body = urllib.urlencode(params)
self._start(async)
def disconnect(self):
if self.running is False:
return
self.running = False
| 31.664975 | 90 | 0.567329 |
4a1ed609c95b91cf8f7e7fa1bbc35613502849e6 | 4,789 | py | Python | PyPoll/pyPoll.py | ElizaDeBarros/Python-challenge | e1f1efa580c319a66f077cc663e9b8eccf6aa0aa | [
"RSA-MD"
] | null | null | null | PyPoll/pyPoll.py | ElizaDeBarros/Python-challenge | e1f1efa580c319a66f077cc663e9b8eccf6aa0aa | [
"RSA-MD"
] | null | null | null | PyPoll/pyPoll.py | ElizaDeBarros/Python-challenge | e1f1efa580c319a66f077cc663e9b8eccf6aa0aa | [
"RSA-MD"
] | null | null | null | import os
import csv
# Create a path
csv_path = os.path.join('Resources', 'election_data.csv')
total_votes = []
candidates_list = [] # list that will store the name of each candidate on the poll
candidates = []
votes = []
vote_count = {}
winner = "name"
listing_print = []
# Open file
with open(csv_path,'r', newline='') as csv_file:
csv_reader = csv.reader(csv_file)
# Skips header
csv_header = next(csv_reader, None)
for row in csv_reader:
total_votes.append(row) # creates a list with element rows of the file
candidates.append(row[2]) # creates a list with with elements of the third column of the file
votes.append(row[0]) # creates a list with with elements of the first column of the file
candidates_list.append(candidates[0]) # adds the first name of the "candidates" list to the "candidates_list" list
for i in range(1, len(candidates)): # loops through "candidates" list starting on second element
if candidates[i] not in candidates_list: # checks if the name(i) on the "candidates" list is not in the "candidates_list" list
candidates_list.append(candidates[i]) # if the condition above us true, adds the name(i) of the "candidates" to the "candidates_list" list
for i in range(len(candidates_list)): # loops through the entire "candidates_list" list
vote_count["votes_" + str(candidates_list[i])] = [] # creates a list for each candidate listed in the "candidates_list"
for j in range(len(candidates)): # nested loop through the entire "candidates" list
if candidates[j] == candidates_list[i]: # compares each element of the "candidates" with each element of the "candidates_list"
vote_count["votes_" + str(candidates_list[i])].append(votes[j]) # adds elements of the first column of the file to its correspondent candidate list
print("Election Results")
print("-------------------------")
print("Total Votes: " + str(len(total_votes))) # len of "total_votes" corresponds to total number of votes, which is printed in this line
print("-------------------------")
# for each candidate in the "candidates_list" list prints the name of the candidate, the percentage of votes for the candidate and the number of votes for the candidate
# the length of each "vote_count" list corresponds to the votes casted for the candidate that owns the list
for i in range(len(candidates_list)):
print(candidates_list[i] + ": " + str(round((len(vote_count["votes_" + str(candidates_list[i])]) / len(total_votes)) *100, 3)) + "% (" + str(len(vote_count["votes_" + str(candidates_list[i])])) + ")")
winner = (candidates_list[0]) # initially associates the winner to the first element of the "candidates_list" list
for i in range(1,len(candidates_list)): # loops through the elements of the "candidates_list" from the second to the last element
# tests if the percentage of votes of candidate i is greater than the percentage of votes of candidate i-1
if ((len(vote_count["votes_" + str(candidates_list[i])]) / len(total_votes))) > ((len(vote_count["votes_" + str(candidates_list[i-1])]) / len(total_votes))):
winner = (candidates_list[i]) # if statement above is true, attributes the winner to candidate i.
print("-------------------------")
print("Winner: " + winner)
elem1 = "Election Results"
elem2 = "-------------------------"
elem3 = "Total Votes: " + str(len(total_votes))
elem4 = "-------------------------"
for i in range(len(candidates_list)):
listing_print.append(candidates_list[i] + ": " + str(round((len(vote_count["votes_" + str(candidates_list[i])]) / len(total_votes)) *100, 3)) + "% (" + str(len(vote_count["votes_" + str(candidates_list[i])])) + ")")
elem5 = "-------------------------"
elem6 = "Winner: " + winner
# creates lists of characters related to the printed lines above
textList = [elem1,elem2,elem3,elem4]
textList1 = [elem5, elem6]
os.mkdir('Analysis') # created the "Analysis" directory
output_path = os.path.join('Analysis', 'analysis_results.txt') # creates the path where the output file will be written
out = open(output_path, 'w', newline="") # creates and opens the output file
# writes results to the output file
for line in textList:
out.write(line)
out.write("\n")
for line in listing_print:
out.write(str(line)+'\n')
for line in textList1:
out.write(line)
out.write("\n")
# closes the file
out.close() | 52.626374 | 223 | 0.630403 |
4a1ed7274a2b3101e96db40b844ad5d030174c09 | 7,291 | py | Python | orange3/Orange/widgets/visualize/utils/tree/rules.py | rgschmitz1/BioDepot-workflow-builder | f74d904eeaf91ec52ec9b703d9fb38e9064e5a66 | [
"MIT"
] | 54 | 2017-01-08T17:21:49.000Z | 2021-11-02T08:46:07.000Z | orange3/Orange/widgets/visualize/utils/tree/rules.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 22 | 2017-03-28T06:03:14.000Z | 2021-07-28T05:43:55.000Z | orange3/Orange/widgets/visualize/utils/tree/rules.py | Synthia-3/BioDepot-workflow-builder | 4ee93abe2d79465755e82a145af3b6a6e1e79fd4 | [
"MIT"
] | 21 | 2017-01-26T21:12:09.000Z | 2022-01-31T21:34:59.000Z | r"""Rules for classification and regression trees.
Tree visualisations usually need to show the rules of nodes, these classes make
merging these rules simple (otherwise you have repeating rules e.g. `age < 3`
and `age < 2` which can be merged into `age < 2`.
Subclasses of the `Rule` class should provide a nice interface to merge rules
together through the `merge_with` method. Of course, this should not be forced
where it doesn't make sense e.g. merging a discrete rule (e.g.
:math:`x \in \{red, blue, green\}`) and a continuous rule (e.g.
:math:`x \leq 5`).
"""
import warnings
class Rule:
"""The base Rule class for tree rules."""
def merge_with(self, rule):
"""Merge the current rule with the given rule.
Parameters
----------
rule : Rule
Returns
-------
Rule
"""
raise NotImplementedError()
@property
def description(self):
return str(self)
class DiscreteRule(Rule):
"""Discrete rule class for handling Indicator rules.
Parameters
----------
attr_name : str
equals : bool
Should indicate whether or not the rule equals the value or not.
value : object
Examples
--------
>>> print(DiscreteRule('age', True, 30))
age = 30
>>> print(DiscreteRule('name', False, 'John'))
name ≠ John
Notes
-----
.. note:: Merging discrete rules is currently not implemented, the new rule
is simply returned and a warning is issued.
"""
def __init__(self, attr_name, equals, value):
self.attr_name = attr_name
self.equals = equals
self.value = value
def merge_with(self, rule):
# It does not make sense to merge discrete rules, since they can only
# be eq or not eq.
warnings.warn("Merged two discrete rules `%s` and `%s`" % (self, rule))
return rule
@property
def description(self):
return "{} {}".format("=" if self.equals else "≠", self.value)
def __str__(self):
return "{} {} {}".format(
self.attr_name, "=" if self.equals else "≠", self.value
)
def __repr__(self):
return "DiscreteRule(attr_name='%s', equals=%s, value=%s)" % (
self.attr_name,
self.equals,
self.value,
)
class ContinuousRule(Rule):
"""Continuous rule class for handling numeric rules.
Parameters
----------
attr_name : str
greater : bool
Should indicate whether the variable must be greater than the value.
value : int
inclusive : bool, optional
Should the variable range include the value or not
(LT <> LTE | GT <> GTE). Default is False.
Examples
--------
>>> print(ContinuousRule('age', False, 30, inclusive=True))
age ≤ 30.000
>>> print(ContinuousRule('age', True, 30))
age > 30.000
Notes
-----
.. note:: Continuous rules can currently only be merged with other
continuous rules.
"""
def __init__(self, attr_name, greater, value, inclusive=False):
self.attr_name = attr_name
self.greater = greater
self.value = value
self.inclusive = inclusive
def merge_with(self, rule):
if not isinstance(rule, ContinuousRule):
raise NotImplementedError(
"Continuous rules can currently only be "
"merged with other continuous rules"
)
# Handle when both have same sign
if self.greater == rule.greater:
# When both are GT
if self.greater is True:
larger = max(self.value, rule.value)
return ContinuousRule(self.attr_name, self.greater, larger)
# When both are LT
else:
smaller = min(self.value, rule.value)
return ContinuousRule(self.attr_name, self.greater, smaller)
# When they have different signs we need to return an interval rule
else:
lt_rule, gt_rule = (rule, self) if self.greater else (self, rule)
return IntervalRule(self.attr_name, gt_rule, lt_rule)
@property
def description(self):
return "%s %.3f" % (">" if self.greater else "≤", self.value)
def __str__(self):
return "%s %s %.3f" % (self.attr_name, ">" if self.greater else "≤", self.value)
def __repr__(self):
return (
"ContinuousRule(attr_name='%s', greater=%s, value=%s, "
"inclusive=%s)" % (self.attr_name, self.greater, self.value, self.inclusive)
)
class IntervalRule(Rule):
"""Interval rule class for ranges of continuous values.
Parameters
----------
attr_name : str
left_rule : ContinuousRule
The smaller (left) part of the interval.
right_rule : ContinuousRule
The larger (right) part of the interval.
Examples
--------
>>> print(IntervalRule('Rule',
>>> ContinuousRule('Rule', True, 1, inclusive=True),
>>> ContinuousRule('Rule', False, 3)))
Rule ∈ [1.000, 3.000)
Notes
-----
.. note:: Currently, only cases which appear in classification and
regression trees are implemented. An interval can not be made up of two
parts (e.g. (-∞, -1) ∪ (1, ∞)).
"""
def __init__(self, attr_name, left_rule, right_rule):
if not isinstance(left_rule, ContinuousRule):
raise AttributeError(
"The left rule must be an instance of the `ContinuousRule` " "class."
)
if not isinstance(right_rule, ContinuousRule):
raise AttributeError(
"The right rule must be an instance of the `ContinuousRule` " "class."
)
self.attr_name = attr_name
self.left_rule = left_rule
self.right_rule = right_rule
def merge_with(self, rule):
if isinstance(rule, ContinuousRule):
if rule.greater:
return IntervalRule(
self.attr_name, self.left_rule.merge_with(rule), self.right_rule
)
else:
return IntervalRule(
self.attr_name, self.left_rule, self.right_rule.merge_with(rule)
)
elif isinstance(rule, IntervalRule):
return IntervalRule(
self.attr_name,
self.left_rule.merge_with(rule.left_rule),
self.right_rule.merge_with(rule.right_rule),
)
@property
def description(self):
return "∈ %s%.3f, %.3f%s" % (
"[" if self.left_rule.inclusive else "(",
self.left_rule.value,
self.right_rule.value,
"]" if self.right_rule.inclusive else ")",
)
def __str__(self):
return "%s ∈ %s%.3f, %.3f%s" % (
self.attr_name,
"[" if self.left_rule.inclusive else "(",
self.left_rule.value,
self.right_rule.value,
"]" if self.right_rule.inclusive else ")",
)
def __repr__(self):
return "IntervalRule(attr_name='%s', left_rule=%s, right_rule=%s)" % (
self.attr_name,
repr(self.left_rule),
repr(self.right_rule),
)
| 30.004115 | 88 | 0.575641 |
4a1ed7917503793175292c0ccc035fc9b96d96e3 | 2,690 | py | Python | freyr_app/core/crawlers/custom_crawlers/novgorod_ru.py | blanchefort/freyrmonitoring | 5bf10ba86d3f88390f91106426dd964289f5aee6 | [
"MIT"
] | 2 | 2021-06-01T20:27:14.000Z | 2021-10-01T23:24:45.000Z | freyr_app/core/crawlers/custom_crawlers/novgorod_ru.py | blanchefort/freyrmonitoring | 5bf10ba86d3f88390f91106426dd964289f5aee6 | [
"MIT"
] | null | null | null | freyr_app/core/crawlers/custom_crawlers/novgorod_ru.py | blanchefort/freyrmonitoring | 5bf10ba86d3f88390f91106426dd964289f5aee6 | [
"MIT"
] | null | null | null | from ..freyr_crawler import FreyrCrawler
from dateutil import parser
import re
class NovgorodRuCrawler(FreyrCrawler):
def __init__(self):
super(NovgorodRuCrawler, self).__init__()
self.domain = 'novgorod.ru'
def latest_posts(self):
"""Получаем последные статьи
"""
items = []
response = self.session.get('https://news.novgorod.ru/news/')
if response.status_code != 200:
return items
self._collect_external_links(response)
links_to_download = [l for l in response.html.absolute_links
if '/news/' in l and '/licence/' not in l]
for link in links_to_download:
response = self.session.get(link)
if response.status_code == 200:
self._collect_external_links(response)
items.append({
'url': link,
'title': response.html.find('h1', first=True).text,
'text': response.html.find('.news-text-col', first=True).text,
'date': parser.parse(response.html.find('time', first=True).attrs['datetime']),
'views': response.html.find('.news-header-3', first=True).text,
})
return items
def latest_comments(self, url):
"""Получаем последные комментарии
"""
items = []
post_id = re.findall( r'\d+', url)
if len(post_id) == 0:
return items
url = 'https://news.novgorod.ru/news/comments/' + post_id[-1] + '/'
response = self.session.get(url)
if response.status_code == 200:
self._collect_external_links(response)
if response.html.find('.caq_container', first=True) and response.html.find('.caq_container', first=True).find('.caq_comment'):
comment_blocks = response.html.find('.caq_container', first=True).find('.caq_comment')
for comment in comment_blocks:
if comment.find('.caq_comment_body', first=True):
items.append({
'url': url,
'username': comment.find('.username', first=True).text,
'system_id': int(comment.find('.caq_comment_body', first=True).attrs['data-id']),
'text': comment.find('.caq_comment_body', first=True).text,
'likes': int(comment.find('.vote_up_result', first=True).text.replace('+', '')),
'dislikes': int(comment.find('.vote_down_result', first=True).text.replace('-', ''))
})
return items | 48.035714 | 138 | 0.545353 |
4a1ed807723d13946d72c8ce37d8732f7917ca53 | 582 | py | Python | run_tests.py | xpdAcq/sidewinder-spec | 810639071083400888f4455f9e4fc90cd7116ef4 | [
"BSD-3-Clause"
] | null | null | null | run_tests.py | xpdAcq/sidewinder-spec | 810639071083400888f4455f9e4fc90cd7116ef4 | [
"BSD-3-Clause"
] | 13 | 2016-03-23T17:37:02.000Z | 2018-03-05T17:18:29.000Z | run_tests.py | xpdAcq/SHEDsidewinder | 810639071083400888f4455f9e4fc90cd7116ef4 | [
"BSD-3-Clause"
] | 1 | 2018-03-05T17:42:17.000Z | 2018-03-05T17:42:17.000Z | #!/usr/bin/env python
import sys
import pytest
import os
if __name__ == '__main__':
# show output results from every test function
args = ['-v']
# show the message output for skipped and expected failure tests
if len(sys.argv) > 1:
args.extend(sys.argv[1:])
print('pytest arguments: {}'.format(args))
# # compute coverage stats for xpdAcq
# args.extend(['--cov', 'xpdAcq'])
# call pytest and exit with the return code from pytest so that
# travis will fail correctly if tests fail
exit_res = pytest.main(args)
sys.exit(exit_res)
| 30.631579 | 68 | 0.666667 |
4a1ed8f969b78acd2834b6ff039b178800268804 | 9,959 | py | Python | tests/test_pipelines_feature_extraction.py | HimashiRathnayake/adapter-transformers | d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4 | [
"Apache-2.0"
] | 50,404 | 2019-09-26T09:55:55.000Z | 2022-03-31T23:07:49.000Z | tests/test_pipelines_feature_extraction.py | HimashiRathnayake/adapter-transformers | d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4 | [
"Apache-2.0"
] | 13,179 | 2019-09-26T10:10:57.000Z | 2022-03-31T23:17:08.000Z | tests/test_pipelines_feature_extraction.py | HimashiRathnayake/adapter-transformers | d9c06ecbf4aaa33756e848b8fc5b3ec65f5ff4f4 | [
"Apache-2.0"
] | 13,337 | 2019-09-26T10:49:38.000Z | 2022-03-31T23:06:17.000Z | # Copyright 2020 The HuggingFace Team. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
import unittest
from transformers import MODEL_MAPPING, TF_MODEL_MAPPING, CLIPConfig, FeatureExtractionPipeline, LxmertConfig, pipeline
from transformers.testing_utils import is_pipeline_test, nested_simplify, require_tf, require_torch
from .test_pipelines_common import PipelineTestCaseMeta
@is_pipeline_test
class FeatureExtractionPipelineTests(unittest.TestCase, metaclass=PipelineTestCaseMeta):
model_mapping = MODEL_MAPPING
tf_model_mapping = TF_MODEL_MAPPING
@require_torch
def test_small_model_pt(self):
feature_extractor = pipeline(
task="feature-extraction", model="hf-internal-testing/tiny-random-distilbert", framework="pt"
)
outputs = feature_extractor("This is a test")
self.assertEqual(
nested_simplify(outputs),
[[[2.287, 1.234, 0.042, 1.53, 1.306, 0.879, -0.526, -1.71, -1.276, 0.756, -0.775, -1.048, -0.25, -0.595, -0.137, -0.598, 2.022, -0.812, 0.284, -0.488, -0.391, -0.403, -0.525, -0.061, -0.228, 1.086, 0.378, -0.14, 0.599, -0.087, -2.259, -0.098], [1.676, 0.232, -1.508, -0.145, 1.798, -1.388, 1.331, -0.37, -0.939, 0.043, 0.06, -0.414, -1.408, 0.24, 0.622, -0.55, -0.569, 1.873, -0.706, 1.924, -0.254, 1.927, -0.423, 0.152, -0.952, 0.509, -0.496, -0.968, 0.093, -1.049, -0.65, 0.312], [0.207, -0.775, -1.822, 0.321, -0.71, -0.201, 0.3, 1.146, -0.233, -0.753, -0.305, 1.309, -1.47, -0.21, 1.802, -1.555, -1.175, 1.323, -0.303, 0.722, -0.076, 0.103, -1.406, 1.931, 0.091, 0.237, 1.172, 1.607, 0.253, -0.9, -1.068, 0.438], [0.615, 1.077, 0.171, -0.175, 1.3, 0.901, -0.653, -0.138, 0.341, -0.654, -0.184, -0.441, -0.424, 0.356, -0.075, 0.26, -1.023, 0.814, 0.524, -0.904, -0.204, -0.623, 1.234, -1.03, 2.594, 0.56, 1.831, -0.199, -1.508, -0.492, -1.687, -2.165], [0.129, 0.008, -1.279, -0.412, -0.004, 1.663, 0.196, 0.104, 0.123, 0.119, 0.635, 1.757, 2.334, -0.799, -1.626, -1.26, 0.595, -0.316, -1.399, 0.232, 0.264, 1.386, -1.171, -0.256, -0.256, -1.944, 1.168, -0.368, -0.714, -0.51, 0.454, 1.148], [-0.32, 0.29, -1.309, -0.177, 0.453, 0.636, -0.024, 0.509, 0.931, -1.754, -1.575, 0.786, 0.046, -1.165, -1.416, 1.373, 1.293, -0.285, -1.541, -1.186, -0.106, -0.994, 2.001, 0.972, -0.02, 1.654, -0.236, 0.643, 1.02, 0.572, -0.914, -0.154], [0.7, -0.937, 0.441, 0.25, 0.78, -0.022, 0.282, -0.095, 1.558, -0.336, 1.706, 0.884, 1.28, 0.198, -0.796, 1.218, -1.769, 1.197, -0.342, -0.177, -0.645, 1.364, 0.008, -0.597, -0.484, -2.772, -0.696, -0.632, -0.34, -1.527, -0.562, 0.862], [2.504, 0.831, -1.271, -0.033, 0.298, -0.735, 1.339, 1.74, 0.233, -1.424, -0.819, -0.761, 0.291, 0.853, -0.092, -0.885, 0.164, 1.025, 0.907, 0.749, -1.515, -0.545, -1.365, 0.271, 0.034, -2.005, 0.031, 0.244, 0.621, 0.176, 0.336, -1.196], [-0.711, 0.591, -1.001, -0.946, 0.784, -1.66, 1.545, 0.799, -0.857, 1.148, 0.213, -0.285, 0.464, -0.139, 0.79, -1.663, -1.121, 0.575, -0.178, -0.508, 1.565, -0.242, -0.346, 1.024, -1.135, -0.158, -2.101, 0.275, 2.009, -0.425, 0.716, 0.981], [0.912, -1.186, -0.846, -0.421, -1.315, -0.827, 0.309, 0.533, 1.029, -2.343, 1.513, -1.238, 1.487, -0.849, 0.896, -0.927, -0.459, 0.159, 0.177, 0.873, 0.935, 1.433, -0.485, 0.737, 1.327, -0.338, 1.608, -0.47, -0.445, -1.118, -0.213, -0.446], [-0.434, -1.362, -1.098, -1.068, 1.507, 0.003, 0.413, -0.395, 0.897, -0.237, 1.405, -0.344, 1.693, 0.677, 0.097, -0.257, -0.602, 1.026, -1.229, 0.855, -0.713, 1.014, 0.443, 0.238, 0.425, -2.184, 1.933, -1.157, -1.132, -0.597, -0.785, 0.967], [0.58, -0.971, 0.789, -0.468, -0.576, 1.779, 1.747, 1.715, -1.939, 0.125, 0.656, -0.042, -1.024, -1.767, 0.107, -0.408, -0.866, -1.774, 1.248, 0.939, -0.033, 1.523, 1.168, -0.744, 0.209, -0.168, -0.316, 0.207, -0.432, 0.047, -0.646, -0.664], [-0.185, -0.613, -1.695, 1.602, -0.32, -0.277, 0.967, 0.728, -0.965, -0.234, 1.069, -0.63, -1.631, 0.711, 0.426, 1.298, -0.191, -0.467, -0.771, 0.971, -0.118, -1.577, -2.064, -0.055, -0.59, 0.642, -0.997, 1.251, 0.538, 1.367, 0.106, 1.704]]]) # fmt: skip
@require_tf
def test_small_model_tf(self):
feature_extractor = pipeline(
task="feature-extraction", model="hf-internal-testing/tiny-random-distilbert", framework="tf"
)
outputs = feature_extractor("This is a test")
self.assertEqual(
nested_simplify(outputs),
[[[2.287, 1.234, 0.042, 1.53, 1.306, 0.879, -0.526, -1.71, -1.276, 0.756, -0.775, -1.048, -0.25, -0.595, -0.137, -0.598, 2.022, -0.812, 0.284, -0.488, -0.391, -0.403, -0.525, -0.061, -0.228, 1.086, 0.378, -0.14, 0.599, -0.087, -2.259, -0.098], [1.676, 0.232, -1.508, -0.145, 1.798, -1.388, 1.331, -0.37, -0.939, 0.043, 0.06, -0.414, -1.408, 0.24, 0.622, -0.55, -0.569, 1.873, -0.706, 1.924, -0.254, 1.927, -0.423, 0.152, -0.952, 0.509, -0.496, -0.968, 0.093, -1.049, -0.65, 0.312], [0.207, -0.775, -1.822, 0.321, -0.71, -0.201, 0.3, 1.146, -0.233, -0.753, -0.305, 1.309, -1.47, -0.21, 1.802, -1.555, -1.175, 1.323, -0.303, 0.722, -0.076, 0.103, -1.406, 1.931, 0.091, 0.237, 1.172, 1.607, 0.253, -0.9, -1.068, 0.438], [0.615, 1.077, 0.171, -0.175, 1.3, 0.901, -0.653, -0.138, 0.341, -0.654, -0.184, -0.441, -0.424, 0.356, -0.075, 0.26, -1.023, 0.814, 0.524, -0.904, -0.204, -0.623, 1.234, -1.03, 2.594, 0.56, 1.831, -0.199, -1.508, -0.492, -1.687, -2.165], [0.129, 0.008, -1.279, -0.412, -0.004, 1.663, 0.196, 0.104, 0.123, 0.119, 0.635, 1.757, 2.334, -0.799, -1.626, -1.26, 0.595, -0.316, -1.399, 0.232, 0.264, 1.386, -1.171, -0.256, -0.256, -1.944, 1.168, -0.368, -0.714, -0.51, 0.454, 1.148], [-0.32, 0.29, -1.309, -0.177, 0.453, 0.636, -0.024, 0.509, 0.931, -1.754, -1.575, 0.786, 0.046, -1.165, -1.416, 1.373, 1.293, -0.285, -1.541, -1.186, -0.106, -0.994, 2.001, 0.972, -0.02, 1.654, -0.236, 0.643, 1.02, 0.572, -0.914, -0.154], [0.7, -0.937, 0.441, 0.25, 0.78, -0.022, 0.282, -0.095, 1.558, -0.336, 1.706, 0.884, 1.28, 0.198, -0.796, 1.218, -1.769, 1.197, -0.342, -0.177, -0.645, 1.364, 0.008, -0.597, -0.484, -2.772, -0.696, -0.632, -0.34, -1.527, -0.562, 0.862], [2.504, 0.831, -1.271, -0.033, 0.298, -0.735, 1.339, 1.74, 0.233, -1.424, -0.819, -0.761, 0.291, 0.853, -0.092, -0.885, 0.164, 1.025, 0.907, 0.749, -1.515, -0.545, -1.365, 0.271, 0.034, -2.005, 0.031, 0.244, 0.621, 0.176, 0.336, -1.196], [-0.711, 0.591, -1.001, -0.946, 0.784, -1.66, 1.545, 0.799, -0.857, 1.148, 0.213, -0.285, 0.464, -0.139, 0.79, -1.663, -1.121, 0.575, -0.178, -0.508, 1.565, -0.242, -0.346, 1.024, -1.135, -0.158, -2.101, 0.275, 2.009, -0.425, 0.716, 0.981], [0.912, -1.186, -0.846, -0.421, -1.315, -0.827, 0.309, 0.533, 1.029, -2.343, 1.513, -1.238, 1.487, -0.849, 0.896, -0.927, -0.459, 0.159, 0.177, 0.873, 0.935, 1.433, -0.485, 0.737, 1.327, -0.338, 1.608, -0.47, -0.445, -1.118, -0.213, -0.446], [-0.434, -1.362, -1.098, -1.068, 1.507, 0.003, 0.413, -0.395, 0.897, -0.237, 1.405, -0.344, 1.693, 0.677, 0.097, -0.257, -0.602, 1.026, -1.229, 0.855, -0.713, 1.014, 0.443, 0.238, 0.425, -2.184, 1.933, -1.157, -1.132, -0.597, -0.785, 0.967], [0.58, -0.971, 0.789, -0.468, -0.576, 1.779, 1.747, 1.715, -1.939, 0.125, 0.656, -0.042, -1.024, -1.767, 0.107, -0.408, -0.866, -1.774, 1.248, 0.939, -0.033, 1.523, 1.168, -0.744, 0.209, -0.168, -0.316, 0.207, -0.432, 0.047, -0.646, -0.664], [-0.185, -0.613, -1.695, 1.602, -0.32, -0.277, 0.967, 0.728, -0.965, -0.234, 1.069, -0.63, -1.631, 0.711, 0.426, 1.298, -0.191, -0.467, -0.771, 0.971, -0.118, -1.577, -2.064, -0.055, -0.59, 0.642, -0.997, 1.251, 0.538, 1.367, 0.106, 1.704]]]) # fmt: skip
def get_shape(self, input_, shape=None):
if shape is None:
shape = []
if isinstance(input_, list):
subshapes = [self.get_shape(in_, shape) for in_ in input_]
if all(s == 0 for s in subshapes):
shape.append(len(input_))
else:
subshape = subshapes[0]
shape = [len(input_), *subshape]
elif isinstance(input_, float):
return 0
else:
raise ValueError("We expect lists of floats, nothing else")
return shape
def run_pipeline_test(self, model, tokenizer, feature_extractor):
if tokenizer is None:
self.skipTest("No tokenizer")
return
elif isinstance(model.config, (LxmertConfig, CLIPConfig)):
self.skipTest(
"This is an Lxmert bimodal model, we need to find a more consistent way to switch on those models."
)
return
elif model.config.is_encoder_decoder:
self.skipTest(
"""encoder_decoder models are trickier for this pipeline.
Do we want encoder + decoder inputs to get some featues?
Do we want encoder only features ?
For now ignore those.
"""
)
return
feature_extractor = FeatureExtractionPipeline(
model=model, tokenizer=tokenizer, feature_extractor=feature_extractor
)
outputs = feature_extractor("This is a test")
shape = self.get_shape(outputs)
self.assertEqual(shape[0], 1)
# If we send too small input
# there's a bug within FunnelModel (output with shape [1, 4, 2, 1] doesn't match the broadcast shape [1, 4, 2, 2])
outputs = feature_extractor(["This is a test", "Another longer test"])
shape = self.get_shape(outputs)
self.assertEqual(shape[0], 2)
| 100.59596 | 3,143 | 0.568631 |
4a1ed9206be6449325c190ad56e38228ed123840 | 2,835 | py | Python | scripts/unstoppable.py | shoorano/damn-vulnerable-defi-python | d5ba2126b2f8307fb888f93bb5068fed331d9aea | [
"MIT"
] | null | null | null | scripts/unstoppable.py | shoorano/damn-vulnerable-defi-python | d5ba2126b2f8307fb888f93bb5068fed331d9aea | [
"MIT"
] | null | null | null | scripts/unstoppable.py | shoorano/damn-vulnerable-defi-python | d5ba2126b2f8307fb888f93bb5068fed331d9aea | [
"MIT"
] | null | null | null | #!/usr/bin/python3
from brownie import DamnValuableToken, UnstoppableLender, ReceiverUnstoppable, accounts
def main():
UnstoppableChecker().main()
class UnstoppableChecker():
"""class to run pass/fail check on challenge exploit: unstoppable"""
def __init__(self):
self.deployer = accounts[0]
self.attacker = accounts[1]
self.some_user = accounts[2]
self.other_accounts = accounts[3:]
self.TOKENS_IN_POOL = self.ether(10**6)
self.INITIAL_ATTACKER_BALANCE = self.ether(100)
self.token = DamnValuableToken.deploy({'from': self.deployer})
self.pool = UnstoppableLender.deploy(self.token.address, {'from': self.deployer})
self.receiverContract = ReceiverUnstoppable.deploy(self.pool.address, {"from": self.some_user})
def main(self):
"""runs setup, exploit and test_exploit, results are logged by respective classes"""
self.setup()
self.test_contract_pre_exploit()
self.exploit()
self.test_contract_post_exploit()
def setup(self):
"""performs required deployments and token transfers prior to running the exploit"""
self.token.approve(self.pool.address, self.TOKENS_IN_POOL + self.INITIAL_ATTACKER_BALANCE/2, {"from": self.deployer})
self.pool.depositTokens(self.TOKENS_IN_POOL, {"from": self.deployer})
self.token.transfer(self.attacker, self.INITIAL_ATTACKER_BALANCE, {"from": self.deployer})
print(f"Setup Check 1 passed: {self.token.balanceOf(self.pool.address) == self.TOKENS_IN_POOL}")
print(f"Setup Check 2 passed: {self.token.balanceOf(self.attacker) == self.INITIAL_ATTACKER_BALANCE}")
def test_contract_pre_exploit(self):
"""tests contracts executeFlashLoan method pre-exploit"""
print("PRE EXPLOIT TEST RUNNING...")
self.receiverContract.executeFlashLoan(10, {"from": self.some_user})
def exploit(self):
"""WRITE EXPLOIT HERE"""
print("RUNNING EXPLOIT...")
self.token.transfer(self.pool.address, self.INITIAL_ATTACKER_BALANCE, {"from": self.attacker })
return
def exploit_outcome(self):
"""returns True if exploit outcome is as expected"""
return self.receiverContract.executeFlashLoan(10, {"from": self.some_user}).revert_msg == "Transaction reverted without a reason string"
def test_contract_post_exploit(self):
"""tests contracts executeFlashLoan method post exploit"""
print("POST EXPLOIT TEST RUNNING...")
try:
assert(self.exploit_outcome())
except:
print("Exploit did not pass: \u274E \n Expected a transaction revert")
return
print("Passed: \u2705")
def ether(self, amount):
"""receives number and converts to wei"""
return amount*10**18
| 41.691176 | 144 | 0.675132 |
4a1ed99b479e4ea45809da463b3ae0321bf335aa | 3,644 | py | Python | pahelix/networks/involution_block.py | agave233/PaddleHelix | e5578f72c2a203a27d9df7da111f1ced826c1429 | [
"Apache-2.0"
] | 454 | 2020-11-21T01:02:45.000Z | 2022-03-29T12:53:40.000Z | pahelix/networks/involution_block.py | chupvl/PaddleHelix | 6e082f89b8090c3c360593d40a08bffc884165dd | [
"Apache-2.0"
] | 161 | 2020-12-12T06:35:54.000Z | 2022-03-27T11:31:13.000Z | pahelix/networks/involution_block.py | chupvl/PaddleHelix | 6e082f89b8090c3c360593d40a08bffc884165dd | [
"Apache-2.0"
] | 108 | 2020-12-07T09:01:10.000Z | 2022-03-31T14:42:29.000Z | # Copyright (c) 2021 PaddlePaddle Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""
Involution Block
"""
import paddle
from paddle import nn
import paddle.nn.functional as F
import numpy as np
import math
class Involution2D(nn.Layer):
"""
Involution module.
Args:
in_channel: The channel size of input.
out_channel: The channel size of output.
sigma_mapping: Sigma mapping.
kernel_size: Kernel size.
stride: Stride size.
groups: Group size.
reduce_ratio: The ratio of reduce.
dilation: The dilation size.
padding: The padding size.
Returns:
output: Tbe output of Involution2D block.
References:
[1] Involution: Inverting the Inherence of Convolution for Visual Recognition. https://arxiv.org/abs/2103.06255
"""
def __init__(self, in_channel, out_channel, sigma_mapping=None, kernel_size=7, stride=1,
groups=1, reduce_ratio=1, dilation=1, padding=3):
"""
Initialization
"""
super(Involution2D, self).__init__()
self.in_channel = in_channel
self.out_channel = out_channel
self.kernel_size = kernel_size
self.stride = stride
self.groups = groups
self.reduce_ratio = reduce_ratio
self.dilation = dilation
self.padding = padding
self.sigma_mapping = nn.Sequential(
nn.BatchNorm2D(num_features=self.out_channel // self.reduce_ratio),
nn.ReLU()
)
self.initial_mapping = nn.Conv2D(in_channels=self.in_channel, out_channels=self.out_channel,
kernel_size=1, stride=1, padding=0)
self.o_mapping = nn.AvgPool2D(kernel_size=self.stride, stride=self.stride)
self.reduce_mapping = nn.Conv2D(in_channels=self.in_channel, out_channels=self.out_channel // self.reduce_ratio,
kernel_size=1, stride=1, padding=0)
self.span_mapping = nn.Conv2D(in_channels=self.out_channel // self.reduce_ratio,
out_channels=self.kernel_size * self.kernel_size * self.groups,
kernel_size=1, stride=1, padding=0)
def forward(self, x):
"""
Involution block
"""
batch_size, _, height, width = x.shape
temp_mapping = self.initial_mapping(x)
input_unfolded = F.unfold(temp_mapping, self.kernel_size, strides=self.stride,
paddings=self.padding, dilations=self.dilation)
input_unfolded = input_unfolded.view(batch_size, self.groups, self.out_channel // self.groups,
self.kernel_size * self.kernel_size, height, width)
kernel = self.span_mapping(self.sigma_mapping(self.reduce_mapping(self.o_mapping(x))))
kernel = kernel.view(batch_size, self.groups, self.kernel_size * self.kernel_size, height, width).unsqueeze(2)
output = paddle.sum(kernel * input_unfolded, axis=3).view(batch_size, -1, height, width)
return output | 40.488889 | 121 | 0.64764 |
4a1eda95307aae492b4a8a43b2d6f395075f373e | 389 | py | Python | Single_Number.py | Ahmed--Mohsen/leetcode | ad8967a5d85ac54f53b3fcce04df1b4bdec5fd9e | [
"MIT"
] | 4 | 2016-01-03T20:57:21.000Z | 2017-07-06T23:22:40.000Z | Single_Number.py | Ahmed--Mohsen/leetcode | ad8967a5d85ac54f53b3fcce04df1b4bdec5fd9e | [
"MIT"
] | null | null | null | Single_Number.py | Ahmed--Mohsen/leetcode | ad8967a5d85ac54f53b3fcce04df1b4bdec5fd9e | [
"MIT"
] | null | null | null | """
Given an array of integers, every element appears twice except for one. Find that single one.
Note:
Your algorithm should have a linear runtime complexity. Could you implement it without using extra memory?
"""
class Solution:
# @param A, a list of integer
# @return an integer
def singleNumber(self, A):
missing = 0
for a in A:
missing = missing ^ a
return missing | 22.882353 | 106 | 0.717224 |
4a1edab5d0ce23ec38fe931a08e40df9e9c9be5b | 686 | py | Python | html_print.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | 3 | 2018-12-08T21:44:45.000Z | 2019-03-01T03:22:32.000Z | html_print.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | null | null | null | html_print.py | evandrocoan/SublimePackageDefault | ff47d47b59fc3ff2c1146168605ed85ace1f3b0e | [
"Unlicense",
"MIT"
] | null | null | null | import sublime_plugin
import tempfile
import pathlib
import webbrowser
class HtmlPrintCommand(sublime_plugin.TextCommand):
def run(self, edit):
view = self.view
html = view.export_to_html(enclosing_tags=True, font_size=False)
with tempfile.NamedTemporaryFile('w', suffix=".html", encoding='utf-8', delete=False) as f:
f.write("<html><head><meta charset=\"UTF-8\"></head><body>")
f.write(html)
f.write('<script>window.print()</script></body></html>')
url = pathlib.Path(f.name).as_uri()
controller = webbrowser.get(using=view.settings().get('print_using_browser'))
controller.open_new_tab(url)
| 32.666667 | 99 | 0.658892 |
4a1edb56c3fb600dba63f2d5c2e4320763b1744f | 4,651 | py | Python | classtides.py | KanahS/CTA200 | 5d426a7e15296aae114f9cbd0770b333004fed07 | [
"MIT"
] | null | null | null | classtides.py | KanahS/CTA200 | 5d426a7e15296aae114f9cbd0770b333004fed07 | [
"MIT"
] | null | null | null | classtides.py | KanahS/CTA200 | 5d426a7e15296aae114f9cbd0770b333004fed07 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# coding: utf-8
import numpy as np
import matplotlib
import matplotlib.pyplot as plt
import rebound as rb
import reboundx as rx
tup_num = 7
e_b = np.linspace(0, 0.7, tup_num)
a_p = np.linspace(1, 5, tup_num)
Qex = []
for x in range(4,7):
Q = 10**x
Qex.append(Q)
Np = 5
tup_list = []
for Q in Qex:
for e in e_b:
for a in a_p:
tup_list.append((Q,e,a,Np))
Nq = len(Qex)
Ne = len(e_b)
Na = len(a_p)
def survival(initial):
Q, eb, ap, Np = initial[0], initial[1], initial[2], initial[3]
sim = rb.Simulation()
sim.integrator = "whfast"
mu = 0.5
m1 = 1
m2 = abs((m1*mu)/(1-mu))
sim.add(m=m1, hash="Binary 1")
sim.add(m=m2, a=1, e= eb, hash="Binary 2")
#initializing Np massless planets
for p in range(Np):
f_plan = np.random.rand()*2.*np.pi
sim.add(m=0, a= ap, e=0, f= f_plan)
#array to keep track of survival times
sim.move_to_com()
# Adding Tidal Elements
rebx = rx.Extras(sim)
tides = rebx.load_force("tides_constant_time_lag")
rebx.add_force(tides)
ps = sim.particles
k2 = ps[0].params["tctl_k1"] = 0.035
nb = ps[1].n
ps[0].params["tctl_tau"] = 3/(2*Q*k2*nb)
tau = ps[0].params["tctl_tau"]
directory_orbit = '/mnt/raid-cita/ksmith/ClassOrbParamsTidesKC/'
filename_orbit = r"KCeb{:.3f}_ap{:.3f}_Np{:.1f}_tup{:.1f}_tau{:.4f}.bin".format(eb,ap,Np,tup_num,tau)
sim.automateSimulationArchive(directory_orbit+filename_orbit, interval=1e1, deletefile=True)
#integrate
N_times = int(100)
N_orbit = (1e4)*2*np.pi
times = np.linspace(0,N_orbit,N_times)
#array for survival times
surv = np.zeros(Np)
for i, time in enumerate(times):
nb = ps[1].n
r_eb = ps[1].e
N_re = (1.+(15./2.)*r_eb**2+(45./8.)*r_eb**4+(5./16.)*r_eb**6)/(1-r_eb**2)**6
Omega_re = (1+3*r_eb**2+(3./8.)*r_eb**4)/(1-r_eb**2)**(9./2.)
ps[0].params["Omega"] = N_re/Omega_re*nb
sim.integrate(time, exact_finish_time=0)
for num in reversed(range(2, sim.N)):
p = sim.particles[num]
if (p.x**2 + p.y**2) > (100)**2:
surv[num-2] = time
print(f'removing planet {num}')
sim.remove(num)
if sim.N==2:
break
surv[(surv==0)] = time
print(f'simulation finished, {len(sim.particles)-2} planets remaining')
return np.mean(surv)
pool = rb.InterruptiblePool(processes=16)
mapping = pool.map(func= survival, iterable= tup_list)
time_surv = np.reshape(mapping, [Nq,Ne,Na])
directory_surv = '/mnt/raid-cita/ksmith/ClassSurvTimesTides/'
txt_surv = f'map_tup{tup_num}plan{Np}.txt'
npy_surv = f'map_tup{tup_num}plan{Np}.npy'
bin_surv = f'map_tup{tup_num}plan{Np}.bin'
np.savetxt(directory_surv+txt_surv, mapping)
np.savetxt(directory_surv+npy_surv, mapping)
np.savetxt(directory_surv+bin_surv, mapping)
fig, ax = plt.subplots(1, Nq, figsize=(20,5), constrained_layout=True)
ax = ax.ravel()
SurvTimeArr = [time_surv[i,:,:] for i in range(Nq)]
for i in range(Nq):
pcm = ax[i].pcolormesh(e_b, a_p, SurvTimeArr[i].T, shading='auto')
a_b = 2.278 + 3.824*e_b - 1.71*(e_b**2)
a_c = 1.6 + 5.1*e_b + (- 2.22*(e_b**2)) + 4.12*0.5 + (- 4.27*e_b*0.5) + (- 5.09*(0.5**2)) + 4.61*(e_b**2)*(0.5**2)
ax[i].plot(e_b, a_c, color='lightsteelblue')
ax[i].scatter(e_b, a_c, color='lightsteelblue')
ax[i].plot(e_b, a_b, color='olive')
ax[i].scatter(e_b, a_b, color='olive')
ax[i].set_title('Q={:.1e}'.format(Qex[i]))
ax[i].set_xlabel('Binary Eccentricity (e)')
ax[i].set_ylabel('Planetary Semi-Major Axis (a)')
ax[i].set_xlim(0.0,0.7)
ax[i].set_ylim(1,5)
plt.colorbar(pcm, location='right',label='Test Particle Survival Times')
# older plots
#figure_all = time_surv
#i = 0
#figure = figure_all[i,:,:]
#plt.pcolormesh(e_b, a_p, figure.T, shading='auto')
#plt.title(f'Mean Survival Times (Q={Qex[i]})')
#plt.xlabel('Binary Eccentricity (e)')
#plt.ylabel('Planetary Semi-Major Axis (a)')
#plt.xlim(0.0,0.7)
#plt.ylim(1,5)
#a_c = 1.6 + 5.1*e_b - 2.22*(e_b**2) + 4.12*0.5 - 4.27*e_b*0.5 - 5.09*(0.5**2) + 4.61*(e_b**2)*(0.5**2)
#a_b = 2.278 + 3.824*e_b - 1.71*(e_b**2)
#plt.plot(e_b, a_c, color='lightsteelblue')
#plt.scatter(e_b, a_c, color='lightsteelblue')
#plt.plot(e_b, a_b, color='white')
#plt.scatter(e_b, a_b, color='white')
#plt.colorbar(label='Test Particle Survival Times')
plt.show()
directory_test = '/mnt/raid-cita/ksmith/'
completed = 'The simulation finished!'
name = 'DONE'
np.save(directory_test+name, completed)
| 28.187879 | 118 | 0.607396 |
4a1edbfd0a3fee44c30a321122252b23d97c0299 | 8,115 | py | Python | SipMask-mmdetection/configs/cascade_mask_rcnn_x101_32x4d_fpn_1x.py | anirudh-chakravarthy/SipMask | fc82b12c13abb091e271eb4f1b6734da18234443 | [
"MIT"
] | 1,141 | 2020-06-04T01:11:22.000Z | 2022-03-31T07:12:52.000Z | SipMask-mmdetection/configs/cascade_mask_rcnn_x101_32x4d_fpn_1x.py | anirudh-chakravarthy/SipMask | fc82b12c13abb091e271eb4f1b6734da18234443 | [
"MIT"
] | 98 | 2020-01-21T09:41:30.000Z | 2022-03-12T00:53:06.000Z | SipMask-mmdetection/configs/cascade_mask_rcnn_x101_32x4d_fpn_1x.py | anirudh-chakravarthy/SipMask | fc82b12c13abb091e271eb4f1b6734da18234443 | [
"MIT"
] | 233 | 2020-01-18T03:46:27.000Z | 2022-03-19T03:17:47.000Z | # model settings
model = dict(
type='CascadeRCNN',
num_stages=3,
pretrained='open-mmlab://resnext101_32x4d',
backbone=dict(
type='ResNeXt',
depth=101,
groups=32,
base_width=4,
num_stages=4,
out_indices=(0, 1, 2, 3),
frozen_stages=1,
norm_cfg=dict(type='BN', requires_grad=True),
style='pytorch'),
neck=dict(
type='FPN',
in_channels=[256, 512, 1024, 2048],
out_channels=256,
num_outs=5),
rpn_head=dict(
type='RPNHead',
in_channels=256,
feat_channels=256,
anchor_scales=[8],
anchor_ratios=[0.5, 1.0, 2.0],
anchor_strides=[4, 8, 16, 32, 64],
target_means=[.0, .0, .0, .0],
target_stds=[1.0, 1.0, 1.0, 1.0],
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=True, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0 / 9.0, loss_weight=1.0)),
bbox_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=7, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
bbox_head=[
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.1, 0.1, 0.2, 0.2],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.05, 0.05, 0.1, 0.1],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0)),
dict(
type='SharedFCBBoxHead',
num_fcs=2,
in_channels=256,
fc_out_channels=1024,
roi_feat_size=7,
num_classes=81,
target_means=[0., 0., 0., 0.],
target_stds=[0.033, 0.033, 0.067, 0.067],
reg_class_agnostic=True,
loss_cls=dict(
type='CrossEntropyLoss', use_sigmoid=False, loss_weight=1.0),
loss_bbox=dict(type='SmoothL1Loss', beta=1.0, loss_weight=1.0))
],
mask_roi_extractor=dict(
type='SingleRoIExtractor',
roi_layer=dict(type='RoIAlign', out_size=14, sample_num=2),
out_channels=256,
featmap_strides=[4, 8, 16, 32]),
mask_head=dict(
type='FCNMaskHead',
num_convs=4,
in_channels=256,
conv_out_channels=256,
num_classes=81,
loss_mask=dict(
type='CrossEntropyLoss', use_mask=True, loss_weight=1.0)))
# model training and testing settings
train_cfg = dict(
rpn=dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.3,
min_pos_iou=0.3,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=256,
pos_fraction=0.5,
neg_pos_ub=-1,
add_gt_as_proposals=False),
allowed_border=0,
pos_weight=-1,
debug=False),
rpn_proposal=dict(
nms_across_levels=False,
nms_pre=2000,
nms_post=2000,
max_num=2000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=[
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.5,
neg_iou_thr=0.5,
min_pos_iou=0.5,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.6,
neg_iou_thr=0.6,
min_pos_iou=0.6,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False),
dict(
assigner=dict(
type='MaxIoUAssigner',
pos_iou_thr=0.7,
neg_iou_thr=0.7,
min_pos_iou=0.7,
ignore_iof_thr=-1),
sampler=dict(
type='RandomSampler',
num=512,
pos_fraction=0.25,
neg_pos_ub=-1,
add_gt_as_proposals=True),
mask_size=28,
pos_weight=-1,
debug=False)
],
stage_loss_weights=[1, 0.5, 0.25])
test_cfg = dict(
rpn=dict(
nms_across_levels=False,
nms_pre=1000,
nms_post=1000,
max_num=1000,
nms_thr=0.7,
min_bbox_size=0),
rcnn=dict(
score_thr=0.05,
nms=dict(type='nms', iou_thr=0.5),
max_per_img=100,
mask_thr_binary=0.5))
# dataset settings
dataset_type = 'CocoDataset'
data_root = 'data/coco/'
img_norm_cfg = dict(
mean=[123.675, 116.28, 103.53], std=[58.395, 57.12, 57.375], to_rgb=True)
train_pipeline = [
dict(type='LoadImageFromFile'),
dict(type='LoadAnnotations', with_bbox=True, with_mask=True),
dict(type='Resize', img_scale=(1333, 800), keep_ratio=True),
dict(type='RandomFlip', flip_ratio=0.5),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='DefaultFormatBundle'),
dict(type='Collect', keys=['img', 'gt_bboxes', 'gt_labels', 'gt_masks']),
]
test_pipeline = [
dict(type='LoadImageFromFile'),
dict(
type='MultiScaleFlipAug',
img_scale=(1333, 800),
flip=False,
transforms=[
dict(type='Resize', keep_ratio=True),
dict(type='RandomFlip'),
dict(type='Normalize', **img_norm_cfg),
dict(type='Pad', size_divisor=32),
dict(type='ImageToTensor', keys=['img']),
dict(type='Collect', keys=['img']),
])
]
data = dict(
imgs_per_gpu=2,
workers_per_gpu=2,
train=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_train2017.json',
img_prefix=data_root + 'train2017/',
pipeline=train_pipeline),
val=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline),
test=dict(
type=dataset_type,
ann_file=data_root + 'annotations/instances_val2017.json',
img_prefix=data_root + 'val2017/',
pipeline=test_pipeline))
evaluation = dict(interval=1, metric=['bbox', 'segm'])
# optimizer
optimizer = dict(type='SGD', lr=0.02, momentum=0.9, weight_decay=0.0001)
optimizer_config = dict(grad_clip=dict(max_norm=35, norm_type=2))
# learning policy
lr_config = dict(
policy='step',
warmup='linear',
warmup_iters=500,
warmup_ratio=1.0 / 3,
step=[8, 11])
checkpoint_config = dict(interval=1)
# yapf:disable
log_config = dict(
interval=50,
hooks=[
dict(type='TextLoggerHook'),
# dict(type='TensorboardLoggerHook')
])
# yapf:enable
# runtime settings
total_epochs = 12
dist_params = dict(backend='nccl')
log_level = 'INFO'
work_dir = './work_dirs/cascade_mask_rcnn_x101_32x4d_fpn_1x'
load_from = None
resume_from = None
workflow = [('train', 1)]
| 31.575875 | 78 | 0.550955 |
4a1edf425497f59254e90884a5f40d72078b973e | 555 | py | Python | setup.py | xsuite/xpart | c67d19df9d5b9b2d41fbb9e4b9562686367c175d | [
"MIT"
] | 1 | 2021-09-07T14:34:10.000Z | 2021-09-07T14:34:10.000Z | setup.py | xsuite/xpart | c67d19df9d5b9b2d41fbb9e4b9562686367c175d | [
"MIT"
] | null | null | null | setup.py | xsuite/xpart | c67d19df9d5b9b2d41fbb9e4b9562686367c175d | [
"MIT"
] | 5 | 2021-11-04T08:23:43.000Z | 2022-03-16T10:34:23.000Z | from setuptools import setup, find_packages, Extension
#######################################
# Prepare list of compiled extensions #
#######################################
extensions = []
#########
# Setup #
#########
setup(
name='xpart',
version='0.5.0',
description='Generation of Particle Ensembles',
url='https://github.com/xsuite/xpart',
packages=find_packages(),
ext_modules = extensions,
include_package_data=True,
install_requires=[
'numpy>=1.0',
'scipy',
'xobjects'
]
)
| 19.821429 | 54 | 0.517117 |
4a1ee027df94b1397a23d16e147c09ccfe571411 | 690 | py | Python | tests/TestUrlParser.py | lukebeer/GambolPutty | d642433bb2a6a54be6b4cfaa12a507994af8445a | [
"Apache-2.0"
] | null | null | null | tests/TestUrlParser.py | lukebeer/GambolPutty | d642433bb2a6a54be6b4cfaa12a507994af8445a | [
"Apache-2.0"
] | null | null | null | tests/TestUrlParser.py | lukebeer/GambolPutty | d642433bb2a6a54be6b4cfaa12a507994af8445a | [
"Apache-2.0"
] | 1 | 2019-12-03T11:36:32.000Z | 2019-12-03T11:36:32.000Z | import extendSysPath
import ModuleBaseTestCase
import unittest
import mock
import Utils
import UrlParser
class TestUrlParser(ModuleBaseTestCase.ModuleBaseTestCase):
def setUp(self):
super(TestUrlParser, self).setUp(UrlParser.UrlParser(gp=mock.Mock()))
def testHandleEvent(self):
self.test_object.configure({'source_field': 'uri'})
self.checkConfiguration()
data = Utils.getDefaultEventDict({'uri': 'http://en.wikipedia.org/wiki/Monty_Python/?gambol=putty'})
for event in self.test_object.handleEvent(data):
self.assert_('uri' in event and event['uri']['query'] == 'gambol=putty')
if __name__ == '__main__':
unittest.main() | 32.857143 | 108 | 0.711594 |
4a1ee077cbb2addc82c30811055d274961b81045 | 1,429 | py | Python | test/code/keras/test_keras_fast_net_regressor.py | IooHooI/NEURAL_NETWORKS | 1fff41658f01ebd61dc26745ad377d86f4716d30 | [
"Unlicense"
] | null | null | null | test/code/keras/test_keras_fast_net_regressor.py | IooHooI/NEURAL_NETWORKS | 1fff41658f01ebd61dc26745ad377d86f4716d30 | [
"Unlicense"
] | null | null | null | test/code/keras/test_keras_fast_net_regressor.py | IooHooI/NEURAL_NETWORKS | 1fff41658f01ebd61dc26745ad377d86f4716d30 | [
"Unlicense"
] | null | null | null | import logging
import unittest
from sklearn.model_selection import train_test_split
from sklearn.metrics import r2_score
from source.code.keras.kerasfastnetregressor import KerasFastTextRegressor
from source.code.preprocessing.dataloader import read_and_clean_feedback_data
from source.code.preprocessing.utils import create_sub_folders
import logging
import sys
logger = logging.getLogger()
logger.level = logging.DEBUG
stream_handler = logging.StreamHandler(sys.stdout)
logger.addHandler(stream_handler)
def fit_the_network(data_loader_function):
X, y = data_loader_function()
X_train, X_test, y_train, y_test = train_test_split(X, y, test_size=0.33, random_state=42)
create_sub_folders('../../../data/dataset/keras_model')
classifier = KerasFastTextRegressor(
chkpt_dir='../../../data/dataset/keras_model/model.h5'
)
classifier.fit(X_train, y_train)
return classifier, X_test, y_test
def predict_case(data_loader_function):
regressor, X_test, y_test = fit_the_network(data_loader_function)
y_pred = regressor.predict(X_test, y_test)
return y_test, y_pred
class TestKerasFastNetRegressor(unittest.TestCase):
def test_keras_fast_net_regression_predict(self):
y_test, y_pred = predict_case(read_and_clean_feedback_data)
self.assertEquals(len(y_test), len(y_pred))
logging.getLogger().info('R2-Score: {}'.format(r2_score(y_test, y_pred)))
| 30.404255 | 94 | 0.776767 |
4a1ee0ae14c74cd343432cbe8ac257fe79680b71 | 193 | py | Python | discord-voice-recorder.py | DMcP89/discord-voice-recorder | 99878e2525b1d54638733e9d8abcd85b24bc69bb | [
"MIT"
] | null | null | null | discord-voice-recorder.py | DMcP89/discord-voice-recorder | 99878e2525b1d54638733e9d8abcd85b24bc69bb | [
"MIT"
] | null | null | null | discord-voice-recorder.py | DMcP89/discord-voice-recorder | 99878e2525b1d54638733e9d8abcd85b24bc69bb | [
"MIT"
] | null | null | null | import discord
import asyncio
import MyClient
from threading import Thread
print("Discord.py Voice Recorder POC")
DISCORD_TOKEN = ""
client = MyClient.MyClient()
client.run(DISCORD_TOKEN)
| 13.785714 | 38 | 0.787565 |
4a1ee27f89f67418d65655412b2fa18cce0f6120 | 547 | py | Python | cli/src/accretion_cli/_commands/raw/add/__init__.py | mattsb42/accretion | 7cce5f4ed6d290bd9314b116be91417ded6b0f64 | [
"Apache-2.0"
] | 1 | 2019-10-19T11:18:17.000Z | 2019-10-19T11:18:17.000Z | cli/src/accretion_cli/_commands/raw/add/__init__.py | mattsb42/accretion | 7cce5f4ed6d290bd9314b116be91417ded6b0f64 | [
"Apache-2.0"
] | 13 | 2019-06-10T07:03:26.000Z | 2019-11-06T01:09:38.000Z | cli/src/accretion_cli/_commands/raw/add/__init__.py | mattsb42/accretion | 7cce5f4ed6d290bd9314b116be91417ded6b0f64 | [
"Apache-2.0"
] | null | null | null | """Commands for ``accretion raw add``."""
import click
from .artifact_builder import add_artifact_builder
from .builders import add_all_builders
from .layer_builder import add_layer_builder
from .regions import add_more_regions
__all__ = ("add_to_deployment",)
@click.group("add")
def add_to_deployment():
"""Add things to a deployment."""
add_to_deployment.add_command(add_artifact_builder)
add_to_deployment.add_command(add_layer_builder)
add_to_deployment.add_command(add_all_builders)
add_to_deployment.add_command(add_more_regions)
| 26.047619 | 51 | 0.822669 |
4a1ee38510414d2d0023ecf93657508045ae6bed | 9,420 | py | Python | docs/source/conf.py | caerusrisk/stomp.py | 090832e088693bfb980e838061cd9a23774c5ac3 | [
"Apache-2.0"
] | 408 | 2015-01-06T06:09:45.000Z | 2022-03-09T08:14:59.000Z | docs/source/conf.py | caerusrisk/stomp.py | 090832e088693bfb980e838061cd9a23774c5ac3 | [
"Apache-2.0"
] | 231 | 2015-01-13T08:23:34.000Z | 2022-03-29T02:29:34.000Z | docs/source/conf.py | caerusrisk/stomp.py | 090832e088693bfb980e838061cd9a23774c5ac3 | [
"Apache-2.0"
] | 171 | 2015-02-05T23:40:35.000Z | 2022-01-25T14:17:18.000Z | #!/usr/bin/env python3
# -*- coding: utf-8 -*-
#
# Stomp documentation build configuration file, created by
# sphinx-quickstart on Sun Sep 20 16:35:36 2015.
#
# This file is execfile()d with the current directory set to its
# containing dir.
#
# Note that not all possible configuration values are present in this
# autogenerated file.
#
# All configuration values have a default; values that are commented out
# serve to show the default.
import sys
import os
import shlex
# If extensions (or modules to document with autodoc) are in another directory,
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#sys.path.insert(0, os.path.abspath('.'))
sys.path.insert(0, os.path.join(os.path.abspath('.'), '..', '..'))
import stomp
import stomp.connect
# -- General configuration ------------------------------------------------
# If your documentation needs a minimal Sphinx version, state it here.
#needs_sphinx = '1.0'
# Add any Sphinx extension module names here, as strings. They can be
# extensions coming with Sphinx (named 'sphinx.ext.*') or your custom
# ones.
extensions = [
'sphinx.ext.autodoc',
]
# Add any paths that contain templates here, relative to this directory.
templates_path = ['_templates']
# The suffix(es) of source filenames.
# You can specify multiple suffix as a list of string:
# source_suffix = ['.rst', '.md']
source_suffix = '.rst'
# The encoding of source files.
#source_encoding = 'utf-8-sig'
# The master toctree document.
master_doc = 'index'
# General information about the project.
project = 'Stomp'
copyright = '2015, Jason R Briggs'
author = 'Jason R Briggs'
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = '.'.join(map(str, stomp.__version__))
# The full version, including alpha/beta/rc tags.
release = version + ''
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
#
# This is also used if you do content translation via gettext catalogs.
# Usually you set "language" from the command line for these cases.
language = None
# There are two options for replacing |today|: either, you set today to some
# non-false value, then it is used:
#today = ''
# Else, today_fmt is used as the format for a strftime call.
#today_fmt = '%B %d, %Y'
# List of patterns, relative to source directory, that match files and
# directories to ignore when looking for source files.
exclude_patterns = ['**test**']
# The reST default role (used for this markup: `text`) to use for all
# documents.
#default_role = None
# If true, '()' will be appended to :func: etc. cross-reference text.
#add_function_parentheses = True
# If true, the current module name will be prepended to all description
# unit titles (such as .. function::).
#add_module_names = True
# If true, sectionauthor and moduleauthor directives will be shown in the
# output. They are ignored by default.
#show_authors = False
# The name of the Pygments (syntax highlighting) style to use.
pygments_style = 'sphinx'
# A list of ignored prefixes for module index sorting.
#modindex_common_prefix = []
# If true, keep warnings as "system message" paragraphs in the built documents.
#keep_warnings = False
# If true, `todo` and `todoList` produce output, else they produce nothing.
todo_include_todos = False
# -- Options for HTML output ----------------------------------------------
# The theme to use for HTML and HTML Help pages. See the documentation for
# a list of builtin themes.
html_theme = 'alabaster'
# Theme options are theme-specific and customize the look and feel of a theme
# further. For a list of options available for each theme, see the
# documentation.
#html_theme_options = {}
# Add any paths that contain custom themes here, relative to this directory.
#html_theme_path = []
# The name for this set of Sphinx documents. If None, it defaults to
# "<project> v<release> documentation".
#html_title = None
# A shorter title for the navigation bar. Default is the same as html_title.
#html_short_title = None
# The name of an image file (relative to this directory) to place at the top
# of the sidebar.
#html_logo = None
# The name of an image file (within the static path) to use as favicon of the
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
# pixels large.
#html_favicon = None
# Add any paths that contain custom static files (such as style sheets) here,
# relative to this directory. They are copied after the builtin static files,
# so a file named "default.css" will overwrite the builtin "default.css".
html_static_path = ['_static']
# Add any extra paths that contain custom files (such as robots.txt or
# .htaccess) here, relative to this directory. These files are copied
# directly to the root of the documentation.
#html_extra_path = []
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
# using the given strftime format.
#html_last_updated_fmt = '%b %d, %Y'
# If true, SmartyPants will be used to convert quotes and dashes to
# typographically correct entities.
#html_use_smartypants = True
# Custom sidebar templates, maps document names to template names.
#html_sidebars = {}
# Additional templates that should be rendered to pages, maps page names to
# template names.
#html_additional_pages = {}
# If false, no module index is generated.
#html_domain_indices = True
# If false, no index is generated.
#html_use_index = True
# If true, the index is split into individual pages for each letter.
#html_split_index = False
# If true, links to the reST sources are added to the pages.
#html_show_sourcelink = True
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
#html_show_sphinx = True
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
#html_show_copyright = True
# If true, an OpenSearch description file will be output, and all pages will
# contain a <link> tag referring to it. The value of this option must be the
# base URL from which the finished HTML is served.
#html_use_opensearch = ''
# This is the file name suffix for HTML files (e.g. ".xhtml").
#html_file_suffix = None
# Language to be used for generating the HTML full-text search index.
# Sphinx supports the following languages:
# 'da', 'de', 'en', 'es', 'fi', 'fr', 'h', 'it', 'ja'
# 'nl', 'no', 'pt', 'ro', 'r', 'sv', 'tr'
#html_search_language = 'en'
# A dictionary with options for the search language support, empty by default.
# Now only 'ja' uses this config value
#html_search_options = {'type': 'default'}
# The name of a javascript file (relative to the configuration directory) that
# implements a search results scorer. If empty, the default will be used.
#html_search_scorer = 'scorer.js'
# Output file base name for HTML help builder.
htmlhelp_basename = 'Stompdoc'
# -- Options for LaTeX output ---------------------------------------------
latex_elements = {
# The paper size ('letterpaper' or 'a4paper').
#'papersize': 'letterpaper',
# The font size ('10pt', '11pt' or '12pt').
#'pointsize': '10pt',
# Additional stuff for the LaTeX preamble.
#'preamble': '',
# Latex figure (float) alignment
#'figure_align': 'htbp',
}
# Grouping the document tree into LaTeX files. List of tuples
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, 'Stomp.tex', 'Stomp Documentation',
'Jason R Briggs', 'manual'),
]
# The name of an image file (relative to this directory) to place at the top of
# the title page.
#latex_logo = None
# For "manual" documents, if this is true, then toplevel headings are parts,
# not chapters.
#latex_use_parts = False
# If true, show page references after internal links.
#latex_show_pagerefs = False
# If true, show URL addresses after external links.
#latex_show_urls = False
# Documents to append as an appendix to all manuals.
#latex_appendices = []
# If false, no module index is generated.
#latex_domain_indices = True
# -- Options for manual page output ---------------------------------------
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [
(master_doc, 'stomp', 'Stomp Documentation',
[author], 1)
]
# If true, show URL addresses after external links.
#man_show_urls = False
# -- Options for Texinfo output -------------------------------------------
# Grouping the document tree into Texinfo files. List of tuples
# (source start file, target name, title, author,
# dir menu entry, description, category)
texinfo_documents = [
(master_doc, 'Stomp', 'Stomp Documentation',
author, 'Stomp', 'One line description of project.',
'Miscellaneous'),
]
# Documents to append as an appendix to all manuals.
#texinfo_appendices = []
# If false, no module index is generated.
#texinfo_domain_indices = True
# How to display URL addresses: 'footnote', 'no', or 'inline'.
#texinfo_show_urls = 'footnote'
# If true, do not generate a @detailmenu in the "Top" node's menu.
#texinfo_no_detailmenu = False
rst_prolog = """
.. # define a hard line break for HTML
.. |br| raw:: html
<br />
""" | 31.717172 | 79 | 0.713694 |
4a1ee39dc05c44cef1609fbe38dd4e548af7dedb | 438 | py | Python | ex054.py | gabrielsailvasantos/exercicios-python-CemV | 19dae7472113a7b9f603467c59dc8279dad8f7ad | [
"MIT"
] | null | null | null | ex054.py | gabrielsailvasantos/exercicios-python-CemV | 19dae7472113a7b9f603467c59dc8279dad8f7ad | [
"MIT"
] | null | null | null | ex054.py | gabrielsailvasantos/exercicios-python-CemV | 19dae7472113a7b9f603467c59dc8279dad8f7ad | [
"MIT"
] | null | null | null | from datetime import date
ano = date.today()
maior = 0
menor = 0
cont = 0
anos = 0
for c in range(1, 8):
cont += 1
print('Em que ano a {}'.format(cont), end='')
anos = int(input(' pessoa nasceu? '))
res = ano.year - anos
if res >= 21:
maior += 1
else:
menor += 1
print('Ao todo tivemos {} pessoas maiores de idade'.format(maior))
print('E tambem tivemos {} pessoas menores de idade'.format(menor))
| 24.333333 | 67 | 0.600457 |
4a1ee4ba16c6289bda5abe8108195f3d13416f03 | 111 | py | Python | testing.py | antiplasti/plastic-detection-model | e5da6e84a07cb709838fb065536da883a6a4d6c0 | [
"MIT"
] | 63 | 2019-04-04T03:42:43.000Z | 2022-03-29T19:52:56.000Z | testing.py | prshnt19/Waste-Detection | 6993efb7ccf55fc84e844c26e3fa59d7c69f8fb5 | [
"MIT"
] | 3 | 2020-01-30T00:16:42.000Z | 2021-12-22T18:30:59.000Z | testing.py | prshnt19/Waste-Detection | 6993efb7ccf55fc84e844c26e3fa59d7c69f8fb5 | [
"MIT"
] | 33 | 2019-02-16T02:42:15.000Z | 2022-03-22T20:14:07.000Z | import classify
import base64
imagePath = "testing.png"
result = classify.analyse(imagePath)
print(result)
| 11.1 | 36 | 0.774775 |
4a1ee66ad51ca4176d30b95d07a9715034be343e | 424 | py | Python | home/migrations/0011_auto_20201130_1258.py | anselmobd/co2 | 69750e8a2e3f6fdebaccf07f48529b4c2920529d | [
"MIT"
] | null | null | null | home/migrations/0011_auto_20201130_1258.py | anselmobd/co2 | 69750e8a2e3f6fdebaccf07f48529b4c2920529d | [
"MIT"
] | null | null | null | home/migrations/0011_auto_20201130_1258.py | anselmobd/co2 | 69750e8a2e3f6fdebaccf07f48529b4c2920529d | [
"MIT"
] | null | null | null | # Generated by Django 3.1.3 on 2020-11-30 12:58
from django.db import migrations
import wagtail.core.fields
class Migration(migrations.Migration):
dependencies = [
('home', '0010_linkspage'),
]
operations = [
migrations.AlterField(
model_name='principalpage',
name='texto',
field=wagtail.core.fields.RichTextField(blank=True, null=True),
),
]
| 21.2 | 75 | 0.617925 |
4a1ee6cfd2e5f86095f665403651f6903879dcbc | 79,079 | py | Python | zerver/tests/test_decorators.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | 1 | 2020-02-28T11:26:19.000Z | 2020-02-28T11:26:19.000Z | zerver/tests/test_decorators.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | null | null | null | zerver/tests/test_decorators.py | DD2480-group7-2020/zulip | 9a1e18bcf383c38c35da168563a7345768c6d784 | [
"Apache-2.0"
] | null | null | null | # -*- coding: utf-8 -*-
import base64
import mock
import re
import os
from collections import defaultdict
from typing import Any, Dict, Iterable, List, Optional, Tuple
from django.test import TestCase
from django.http import HttpResponse, HttpRequest
from django.conf import settings
from zerver.forms import OurAuthenticationForm
from zerver.lib.actions import do_deactivate_realm, do_deactivate_user, \
do_reactivate_user, do_reactivate_realm, do_set_realm_property
from zerver.lib.exceptions import JsonableError, InvalidAPIKeyError, InvalidAPIKeyFormatError
from zerver.lib.initial_password import initial_password
from zerver.lib.test_helpers import (
HostRequestMock,
)
from zerver.lib.test_classes import (
ZulipTestCase,
)
from zerver.lib.response import json_response, json_success
from zerver.lib.users import get_api_key
from zerver.lib.user_agent import parse_user_agent
from zerver.lib.utils import generate_api_key, has_api_key_format
from zerver.lib.request import \
REQ, has_request_variables, RequestVariableMissingError, \
RequestVariableConversionError, RequestConfusingParmsError
from zerver.lib.webhooks.common import UnexpectedWebhookEventType
from zerver.decorator import (
api_key_only_webhook_view,
authenticated_json_view,
authenticated_rest_api_view,
authenticated_uploads_api_view,
authenticate_notify, cachify,
get_client_name, internal_notify_view, is_local_addr,
rate_limit, validate_api_key,
return_success_on_head_request, to_not_negative_int_or_none,
zulip_login_required
)
from zerver.lib.cache import ignore_unhashable_lru_cache, dict_to_items_tuple, items_tuple_to_dict
from zerver.lib.validator import (
check_string, check_dict, check_dict_only, check_bool, check_float, check_int, check_list, Validator,
check_variable_type, equals, check_none_or, check_url, check_short_string,
check_string_fixed_length, check_capped_string, check_color, to_non_negative_int,
check_string_or_int_list, check_string_or_int, check_int_in
)
from zerver.models import \
get_realm, get_user, UserProfile, Realm
import ujson
class DecoratorTestCase(TestCase):
def test_get_client_name(self) -> None:
class Request:
def __init__(self, GET: Dict[str, str], POST: Dict[str, str], META: Dict[str, str]) -> None:
self.GET = GET
self.POST = POST
self.META = META
req = Request(
GET=dict(),
POST=dict(),
META=dict(),
)
self.assertEqual(get_client_name(req, is_browser_view=True), 'website')
self.assertEqual(get_client_name(req, is_browser_view=False), 'Unspecified')
req = Request(
GET=dict(),
POST=dict(),
META=dict(HTTP_USER_AGENT='Mozilla/bla bla bla'),
)
self.assertEqual(get_client_name(req, is_browser_view=True), 'website')
self.assertEqual(get_client_name(req, is_browser_view=False), 'Mozilla')
req = Request(
GET=dict(),
POST=dict(),
META=dict(HTTP_USER_AGENT='ZulipDesktop/bla bla bla'),
)
self.assertEqual(get_client_name(req, is_browser_view=True), 'ZulipDesktop')
self.assertEqual(get_client_name(req, is_browser_view=False), 'ZulipDesktop')
req = Request(
GET=dict(),
POST=dict(),
META=dict(HTTP_USER_AGENT='ZulipMobile/bla bla bla'),
)
self.assertEqual(get_client_name(req, is_browser_view=True), 'ZulipMobile')
self.assertEqual(get_client_name(req, is_browser_view=False), 'ZulipMobile')
req = Request(
GET=dict(client='fancy phone'),
POST=dict(),
META=dict(),
)
self.assertEqual(get_client_name(req, is_browser_view=True), 'fancy phone')
self.assertEqual(get_client_name(req, is_browser_view=False), 'fancy phone')
def test_REQ_aliases(self) -> None:
@has_request_variables
def double(request: HttpRequest,
x: int=REQ(whence='number', aliases=['x', 'n'], converter=int)) -> int:
return x + x
class Request:
GET = {} # type: Dict[str, str]
POST = {} # type: Dict[str, str]
request = Request()
request.POST = dict(bogus='5555')
with self.assertRaises(RequestVariableMissingError):
double(request)
request.POST = dict(number='3')
self.assertEqual(double(request), 6)
request.POST = dict(x='4')
self.assertEqual(double(request), 8)
request.POST = dict(n='5')
self.assertEqual(double(request), 10)
request.POST = dict(number='6', x='7')
with self.assertRaises(RequestConfusingParmsError) as cm:
double(request)
self.assertEqual(str(cm.exception), "Can't decide between 'number' and 'x' arguments")
def test_REQ_converter(self) -> None:
def my_converter(data: str) -> List[int]:
lst = ujson.loads(data)
if not isinstance(lst, list):
raise ValueError('not a list')
if 13 in lst:
raise JsonableError('13 is an unlucky number!')
return [int(elem) for elem in lst]
@has_request_variables
def get_total(request: HttpRequest, numbers: Iterable[int]=REQ(converter=my_converter)) -> int:
return sum(numbers)
class Request:
GET = {} # type: Dict[str, str]
POST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.POST['numbers'] = 'bad_value'
with self.assertRaises(RequestVariableConversionError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "Bad value for 'numbers': bad_value")
request.POST['numbers'] = ujson.dumps('{fun: unfun}')
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'Bad value for \'numbers\': "{fun: unfun}"')
request.POST['numbers'] = ujson.dumps([2, 3, 5, 8, 13, 21])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), "13 is an unlucky number!")
request.POST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
def test_REQ_converter_and_validator_invalid(self) -> None:
with self.assertRaisesRegex(AssertionError, "converter and validator are mutually exclusive"):
@has_request_variables
def get_total(request: HttpRequest,
numbers: Iterable[int]=REQ(validator=check_list(check_int), # type: ignore # The condition being tested is in fact an error.
converter=lambda x: [])) -> int:
return sum(numbers) # nocoverage -- isn't intended to be run
def test_REQ_validator(self) -> None:
@has_request_variables
def get_total(request: HttpRequest,
numbers: Iterable[int]=REQ(validator=check_list(check_int))) -> int:
return sum(numbers)
class Request:
GET = {} # type: Dict[str, str]
POST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_total(request)
request.POST['numbers'] = 'bad_value'
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'Argument "numbers" is not valid JSON.')
request.POST['numbers'] = ujson.dumps([1, 2, "what?", 4, 5, 6])
with self.assertRaises(JsonableError) as cm:
get_total(request)
self.assertEqual(str(cm.exception), 'numbers[2] is not an integer')
request.POST['numbers'] = ujson.dumps([1, 2, 3, 4, 5, 6])
result = get_total(request)
self.assertEqual(result, 21)
def test_REQ_str_validator(self) -> None:
@has_request_variables
def get_middle_characters(request: HttpRequest,
value: str=REQ(str_validator=check_string_fixed_length(5))) -> str:
return value[1:-1]
class Request:
GET = {} # type: Dict[str, str]
POST = {} # type: Dict[str, str]
request = Request()
with self.assertRaises(RequestVariableMissingError):
get_middle_characters(request)
request.POST['value'] = 'long_value'
with self.assertRaises(JsonableError) as cm:
get_middle_characters(request)
self.assertEqual(str(cm.exception), 'value has incorrect length 10; should be 5')
request.POST['value'] = 'valid'
result = get_middle_characters(request)
self.assertEqual(result, 'ali')
def test_REQ_argument_type(self) -> None:
@has_request_variables
def get_payload(request: HttpRequest,
payload: Dict[str, Any]=REQ(argument_type='body')) -> Dict[str, Any]:
return payload
request = HostRequestMock()
request.body = 'notjson'
with self.assertRaises(JsonableError) as cm:
get_payload(request)
self.assertEqual(str(cm.exception), 'Malformed JSON')
request.body = '{"a": "b"}'
self.assertEqual(get_payload(request), {'a': 'b'})
# Test we properly handle an invalid argument_type.
with self.assertRaises(Exception) as cm:
@has_request_variables
def test(request: HttpRequest,
payload: Any=REQ(argument_type="invalid")) -> None: # type: ignore # The condition being tested is in fact an error.
# Any is ok; exception should occur in decorator:
pass # nocoverage # this function isn't meant to be called
test(request)
def test_api_key_only_webhook_view(self) -> None:
@api_key_only_webhook_view('ClientName')
def my_webhook(request: HttpRequest, user_profile: UserProfile) -> str:
return user_profile.email
@api_key_only_webhook_view('ClientName')
def my_webhook_raises_exception(request: HttpRequest, user_profile: UserProfile) -> None:
raise Exception("raised by webhook function")
@api_key_only_webhook_view('ClientName')
def my_webhook_raises_exception_unexpected_event(
request: HttpRequest, user_profile: UserProfile) -> None:
raise UnexpectedWebhookEventType("helloworld", "test_event")
webhook_bot_email = '[email protected]'
webhook_bot_realm = get_realm('zulip')
webhook_bot = get_user(webhook_bot_email, webhook_bot_realm)
webhook_bot_api_key = get_api_key(webhook_bot)
webhook_client_name = "ZulipClientNameWebhook"
request = HostRequestMock()
request.POST['api_key'] = 'X'*32
with self.assertRaisesRegex(JsonableError, "Invalid API key"):
my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
# Start a valid request here
request.POST['api_key'] = webhook_bot_api_key
with mock.patch('logging.warning') as mock_warning:
with self.assertRaisesRegex(JsonableError,
"Account is not associated with this subdomain"):
api_result = my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(webhook_bot_email, 'zulip', ''))
with mock.patch('logging.warning') as mock_warning:
with self.assertRaisesRegex(JsonableError,
"Account is not associated with this subdomain"):
request.host = "acme." + settings.EXTERNAL_HOST
api_result = my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(webhook_bot_email, 'zulip', 'acme'))
request.host = "zulip.testserver"
# Test when content_type is application/json and request.body
# is valid JSON; exception raised in the webhook function
# should be re-raised
with mock.patch('zerver.decorator.webhook_logger.exception') as mock_exception:
with self.assertRaisesRegex(Exception, "raised by webhook function"):
request.body = "{}"
request.content_type = 'application/json'
my_webhook_raises_exception(request) # type: ignore # mypy doesn't seem to apply the decorator
# Test when content_type is not application/json; exception raised
# in the webhook function should be re-raised
with mock.patch('zerver.decorator.webhook_logger.exception') as mock_exception:
with self.assertRaisesRegex(Exception, "raised by webhook function"):
request.body = "notjson"
request.content_type = 'text/plain'
my_webhook_raises_exception(request) # type: ignore # mypy doesn't seem to apply the decorator
# Test when content_type is application/json but request.body
# is not valid JSON; invalid JSON should be logged and the
# exception raised in the webhook function should be re-raised
with mock.patch('zerver.decorator.webhook_logger.exception') as mock_exception:
with self.assertRaisesRegex(Exception, "raised by webhook function"):
request.body = "invalidjson"
request.content_type = 'application/json'
request.META['HTTP_X_CUSTOM_HEADER'] = 'custom_value'
my_webhook_raises_exception(request) # type: ignore # mypy doesn't seem to apply the decorator
message = """
user: {email} ({realm})
client: {client_name}
URL: {path_info}
content_type: {content_type}
custom_http_headers:
{custom_headers}
body:
{body}
"""
message = message.strip(' ')
mock_exception.assert_called_with(message.format(
email=webhook_bot_email,
realm=webhook_bot_realm.string_id,
client_name=webhook_client_name,
path_info=request.META.get('PATH_INFO'),
content_type=request.content_type,
custom_headers="HTTP_X_CUSTOM_HEADER: custom_value\n",
body=request.body,
))
# Test when an unexpected webhook event occurs
with mock.patch('zerver.decorator.webhook_unexpected_events_logger.exception') as mock_exception:
exception_msg = "The 'test_event' event isn't currently supported by the helloworld webhook"
with self.assertRaisesRegex(UnexpectedWebhookEventType, exception_msg):
request.body = "invalidjson"
request.content_type = 'application/json'
request.META['HTTP_X_CUSTOM_HEADER'] = 'custom_value'
my_webhook_raises_exception_unexpected_event(request) # type: ignore # mypy doesn't seem to apply the decorator
message = """
user: {email} ({realm})
client: {client_name}
URL: {path_info}
content_type: {content_type}
custom_http_headers:
{custom_headers}
body:
{body}
"""
message = message.strip(' ')
mock_exception.assert_called_with(message.format(
email=webhook_bot_email,
realm=webhook_bot_realm.string_id,
client_name=webhook_client_name,
path_info=request.META.get('PATH_INFO'),
content_type=request.content_type,
custom_headers="HTTP_X_CUSTOM_HEADER: custom_value\n",
body=request.body,
))
with self.settings(RATE_LIMITING=True):
with mock.patch('zerver.decorator.rate_limit_user') as rate_limit_mock:
api_result = my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
# Verify rate limiting was attempted.
self.assertTrue(rate_limit_mock.called)
# Verify decorator set the magic _email field used by some of our back end logging.
self.assertEqual(request._email, webhook_bot_email)
# Verify the main purpose of the decorator, which is that it passed in the
# user_profile to my_webhook, allowing it return the correct
# email for the bot (despite the API caller only knowing the API key).
self.assertEqual(api_result, webhook_bot_email)
# Now deactivate the user
webhook_bot.is_active = False
webhook_bot.save()
with self.assertRaisesRegex(JsonableError, "Account is deactivated"):
my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
# Reactive the user, but deactivate their realm.
webhook_bot.is_active = True
webhook_bot.save()
webhook_bot.realm.deactivated = True
webhook_bot.realm.save()
with self.assertRaisesRegex(JsonableError, "This organization has been deactivated"):
my_webhook(request) # type: ignore # mypy doesn't seem to apply the decorator
class SkipRateLimitingTest(ZulipTestCase):
def test_authenticated_rest_api_view(self) -> None:
@authenticated_rest_api_view(skip_rate_limiting=False)
def my_rate_limited_view(request: HttpRequest, user_profile: UserProfile) -> str:
return json_success() # nocoverage # mock prevents this from being called
@authenticated_rest_api_view(skip_rate_limiting=True)
def my_unlimited_view(request: HttpRequest, user_profile: UserProfile) -> str:
return json_success()
request = HostRequestMock(host="zulip.testserver")
request.META['HTTP_AUTHORIZATION'] = self.encode_credentials(self.example_email("hamlet"))
request.method = 'POST'
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_unlimited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
self.assert_json_success(result)
self.assertFalse(rate_limit_mock.called)
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_rate_limited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
# Don't assert json_success, since it'll be the rate_limit mock object
self.assertTrue(rate_limit_mock.called)
def test_authenticated_uploads_api_view(self) -> None:
@authenticated_uploads_api_view(skip_rate_limiting=False)
def my_rate_limited_view(request: HttpRequest, user_profile: UserProfile) -> str:
return json_success() # nocoverage # mock prevents this from being called
@authenticated_uploads_api_view(skip_rate_limiting=True)
def my_unlimited_view(request: HttpRequest, user_profile: UserProfile) -> str:
return json_success()
request = HostRequestMock(host="zulip.testserver")
request.method = 'POST'
request.POST['api_key'] = get_api_key(self.example_user("hamlet"))
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_unlimited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
self.assert_json_success(result)
self.assertFalse(rate_limit_mock.called)
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_rate_limited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
# Don't assert json_success, since it'll be the rate_limit mock object
self.assertTrue(rate_limit_mock.called)
def test_authenticated_json_view(self) -> None:
def my_view(request: HttpRequest, user_profile: UserProfile) -> str:
return json_success()
my_rate_limited_view = authenticated_json_view(my_view, skip_rate_limiting=False)
my_unlimited_view = authenticated_json_view(my_view, skip_rate_limiting=True)
request = HostRequestMock(host="zulip.testserver")
request.method = 'POST'
request.is_authenticated = True # type: ignore # HostRequestMock doesn't have is_authenticated
request.user = self.example_user("hamlet")
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_unlimited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
self.assert_json_success(result)
self.assertFalse(rate_limit_mock.called)
with mock.patch('zerver.decorator.rate_limit') as rate_limit_mock:
result = my_rate_limited_view(request) # type: ignore # mypy doesn't seem to apply the decorator
# Don't assert json_success, since it'll be the rate_limit mock object
self.assertTrue(rate_limit_mock.called)
class DecoratorLoggingTestCase(ZulipTestCase):
def test_authenticated_rest_api_view_logging(self) -> None:
@authenticated_rest_api_view(webhook_client_name="ClientName")
def my_webhook_raises_exception(request: HttpRequest, user_profile: UserProfile) -> None:
raise Exception("raised by webhook function")
webhook_bot_email = '[email protected]'
webhook_bot_realm = get_realm('zulip')
request = HostRequestMock()
request.META['HTTP_AUTHORIZATION'] = self.encode_credentials(webhook_bot_email)
request.method = 'POST'
request.host = "zulip.testserver"
request.body = '{}'
request.POST['payload'] = '{}'
request.content_type = 'text/plain'
with mock.patch('zerver.decorator.webhook_logger.exception') as mock_exception:
with self.assertRaisesRegex(Exception, "raised by webhook function"):
my_webhook_raises_exception(request) # type: ignore # mypy doesn't seem to apply the decorator
message = """
user: {email} ({realm})
client: {client_name}
URL: {path_info}
content_type: {content_type}
custom_http_headers:
{custom_headers}
body:
{body}
"""
message = message.strip(' ')
mock_exception.assert_called_with(message.format(
email=webhook_bot_email,
realm=webhook_bot_realm.string_id,
client_name='ZulipClientNameWebhook',
path_info=request.META.get('PATH_INFO'),
content_type=request.content_type,
custom_headers=None,
body=request.body,
))
def test_authenticated_rest_api_view_logging_unexpected_event(self) -> None:
@authenticated_rest_api_view(webhook_client_name="ClientName")
def my_webhook_raises_exception(request: HttpRequest, user_profile: UserProfile) -> None:
raise UnexpectedWebhookEventType("helloworld", "test_event")
webhook_bot_email = '[email protected]'
webhook_bot_realm = get_realm('zulip')
request = HostRequestMock()
request.META['HTTP_AUTHORIZATION'] = self.encode_credentials(webhook_bot_email)
request.method = 'POST'
request.host = "zulip.testserver"
request.body = '{}'
request.POST['payload'] = '{}'
request.content_type = 'text/plain'
with mock.patch('zerver.decorator.webhook_unexpected_events_logger.exception') as mock_exception:
exception_msg = "The 'test_event' event isn't currently supported by the helloworld webhook"
with self.assertRaisesRegex(UnexpectedWebhookEventType, exception_msg):
my_webhook_raises_exception(request) # type: ignore # mypy doesn't seem to apply the decorator
message = """
user: {email} ({realm})
client: {client_name}
URL: {path_info}
content_type: {content_type}
custom_http_headers:
{custom_headers}
body:
{body}
"""
message = message.strip(' ')
mock_exception.assert_called_with(message.format(
email=webhook_bot_email,
realm=webhook_bot_realm.string_id,
client_name='ZulipClientNameWebhook',
path_info=request.META.get('PATH_INFO'),
content_type=request.content_type,
custom_headers=None,
body=request.body,
))
def test_authenticated_rest_api_view_with_non_webhook_view(self) -> None:
@authenticated_rest_api_view()
def non_webhook_view_raises_exception(request: HttpRequest, user_profile: UserProfile=None) -> None:
raise Exception("raised by a non-webhook view")
request = HostRequestMock()
request.META['HTTP_AUTHORIZATION'] = self.encode_credentials("[email protected]")
request.method = 'POST'
request.host = "zulip.testserver"
request.body = '{}'
request.content_type = 'application/json'
with mock.patch('zerver.decorator.webhook_logger.exception') as mock_exception:
with self.assertRaisesRegex(Exception, "raised by a non-webhook view"):
non_webhook_view_raises_exception(request)
self.assertFalse(mock_exception.called)
def test_authenticated_rest_api_view_errors(self) -> None:
user_profile = self.example_user("hamlet")
api_key = get_api_key(user_profile)
credentials = "%s:%s" % (user_profile.email, api_key)
api_auth = 'Digest ' + base64.b64encode(credentials.encode('utf-8')).decode('utf-8')
result = self.client_post('/api/v1/external/zendesk', {},
HTTP_AUTHORIZATION=api_auth)
self.assert_json_error(result, "This endpoint requires HTTP basic authentication.")
api_auth = 'Basic ' + base64.b64encode("foo".encode('utf-8')).decode('utf-8')
result = self.client_post('/api/v1/external/zendesk', {},
HTTP_AUTHORIZATION=api_auth)
self.assert_json_error(result, "Invalid authorization header for basic auth",
status_code=401)
result = self.client_post('/api/v1/external/zendesk', {})
self.assert_json_error(result, "Missing authorization header for basic auth",
status_code=401)
class RateLimitTestCase(TestCase):
def errors_disallowed(self) -> Any:
# Due to what is probably a hack in rate_limit(),
# some tests will give a false positive (or succeed
# for the wrong reason), unless we complain
# about logging errors. There might be a more elegant way
# make logging errors fail than what I'm doing here.
class TestLoggingErrorException(Exception):
pass
return mock.patch('logging.error', side_effect=TestLoggingErrorException)
def test_internal_local_clients_skip_rate_limiting(self) -> None:
class Client:
name = 'internal'
class Request:
client = Client()
META = {'REMOTE_ADDR': '127.0.0.1'}
req = Request()
def f(req: Any) -> str:
return 'some value'
f = rate_limit()(f)
with self.settings(RATE_LIMITING=True):
with mock.patch('zerver.decorator.rate_limit_user') as rate_limit_mock:
with self.errors_disallowed():
self.assertEqual(f(req), 'some value')
self.assertFalse(rate_limit_mock.called)
def test_debug_clients_skip_rate_limiting(self) -> None:
class Client:
name = 'internal'
class Request:
client = Client()
META = {'REMOTE_ADDR': '3.3.3.3'}
req = Request()
def f(req: Any) -> str:
return 'some value'
f = rate_limit()(f)
with self.settings(RATE_LIMITING=True):
with mock.patch('zerver.decorator.rate_limit_user') as rate_limit_mock:
with self.errors_disallowed():
with self.settings(DEBUG_RATE_LIMITING=True):
self.assertEqual(f(req), 'some value')
self.assertFalse(rate_limit_mock.called)
def test_rate_limit_setting_of_false_bypasses_rate_limiting(self) -> None:
class Client:
name = 'external'
class Request:
client = Client()
META = {'REMOTE_ADDR': '3.3.3.3'}
user = 'stub' # any non-None value here exercises the correct code path
req = Request()
def f(req: Any) -> str:
return 'some value'
f = rate_limit()(f)
with self.settings(RATE_LIMITING=False):
with mock.patch('zerver.decorator.rate_limit_user') as rate_limit_mock:
with self.errors_disallowed():
self.assertEqual(f(req), 'some value')
self.assertFalse(rate_limit_mock.called)
def test_rate_limiting_happens_in_normal_case(self) -> None:
class Client:
name = 'external'
class Request:
client = Client()
META = {'REMOTE_ADDR': '3.3.3.3'}
user = 'stub' # any non-None value here exercises the correct code path
req = Request()
def f(req: Any) -> str:
return 'some value'
f = rate_limit()(f)
with self.settings(RATE_LIMITING=True):
with mock.patch('zerver.decorator.rate_limit_user') as rate_limit_mock:
with self.errors_disallowed():
self.assertEqual(f(req), 'some value')
self.assertTrue(rate_limit_mock.called)
class ValidatorTestCase(TestCase):
def test_check_string(self) -> None:
x = "hello" # type: Any
self.assertEqual(check_string('x', x), None)
x = 4
self.assertEqual(check_string('x', x), 'x is not a string')
def test_check_string_fixed_length(self) -> None:
x = "hello" # type: Any
self.assertEqual(check_string_fixed_length(5)('x', x), None)
x = 4
self.assertEqual(check_string_fixed_length(5)('x', x), 'x is not a string')
x = "helloz"
self.assertEqual(check_string_fixed_length(5)('x', x), 'x has incorrect length 6; should be 5')
x = "hi"
self.assertEqual(check_string_fixed_length(5)('x', x), 'x has incorrect length 2; should be 5')
def test_check_capped_string(self) -> None:
x = "hello" # type: Any
self.assertEqual(check_capped_string(5)('x', x), None)
x = 4
self.assertEqual(check_capped_string(5)('x', x), 'x is not a string')
x = "helloz"
self.assertEqual(check_capped_string(5)('x', x), 'x is too long (limit: 5 characters)')
x = "hi"
self.assertEqual(check_capped_string(5)('x', x), None)
def test_check_int_in(self) -> None:
self.assertEqual(check_int_in([1])("Test", 1), None)
self.assertEqual(check_int_in([1])("Test", 2), "Invalid Test")
self.assertEqual(check_int_in([1])("Test", "t"), "Test is not an integer")
def test_check_short_string(self) -> None:
x = "hello" # type: Any
self.assertEqual(check_short_string('x', x), None)
x = 'x' * 201
self.assertEqual(check_short_string('x', x), "x is too long (limit: 50 characters)")
x = 4
self.assertEqual(check_short_string('x', x), 'x is not a string')
def test_check_bool(self) -> None:
x = True # type: Any
self.assertEqual(check_bool('x', x), None)
x = 4
self.assertEqual(check_bool('x', x), 'x is not a boolean')
def test_check_int(self) -> None:
x = 5 # type: Any
self.assertEqual(check_int('x', x), None)
x = [{}]
self.assertEqual(check_int('x', x), 'x is not an integer')
def test_to_non_negative_int(self) -> None:
self.assertEqual(to_non_negative_int('5'), 5)
with self.assertRaisesRegex(ValueError, 'argument is negative'):
self.assertEqual(to_non_negative_int('-1'))
with self.assertRaisesRegex(ValueError, re.escape('5 is too large (max 4)')):
self.assertEqual(to_non_negative_int('5', max_int_size=4))
with self.assertRaisesRegex(ValueError, re.escape('%s is too large (max %s)' % (2**32, 2**32-1))):
self.assertEqual(to_non_negative_int(str(2**32)))
def test_check_to_not_negative_int_or_none(self) -> None:
self.assertEqual(to_not_negative_int_or_none('5'), 5)
self.assertEqual(to_not_negative_int_or_none(None), None)
with self.assertRaises(ValueError):
to_not_negative_int_or_none('-5')
def test_check_float(self) -> None:
x = 5.5 # type: Any
self.assertEqual(check_float('x', x), None)
x = 5
self.assertEqual(check_float('x', x), 'x is not a float')
x = [{}]
self.assertEqual(check_float('x', x), 'x is not a float')
def test_check_color(self) -> None:
x = ['#000099', '#80ffaa', '#80FFAA', '#abcd12', '#ffff00', '#ff0', '#f00'] # valid
y = ['000099', '#80f_aa', '#80fraa', '#abcd1234', 'blue'] # invalid
z = 5 # invalid
for hex_color in x:
error = check_color('color', hex_color)
self.assertEqual(error, None)
for hex_color in y:
error = check_color('color', hex_color)
self.assertEqual(error, 'color is not a valid hex color code')
error = check_color('color', z)
self.assertEqual(error, 'color is not a string')
def test_check_list(self) -> None:
x = 999 # type: Any
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x is not a list')
x = ["hello", 5]
error = check_list(check_string)('x', x)
self.assertEqual(error, 'x[1] is not a string')
x = [["yo"], ["hello", "goodbye", 5]]
error = check_list(check_list(check_string))('x', x)
self.assertEqual(error, 'x[1][2] is not a string')
x = ["hello", "goodbye", "hello again"]
error = check_list(check_string, length=2)('x', x)
self.assertEqual(error, 'x should have exactly 2 items')
def test_check_dict(self) -> None:
keys = [
('names', check_list(check_string)),
('city', check_string),
] # type: List[Tuple[str, Validator]]
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
} # type: Any
error = check_dict(keys)('x', x)
self.assertEqual(error, None)
x = 999
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x is not a dict')
x = {}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'names key is missing from x')
x = {
'names': ['alice', 'bob', {}]
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["names"][2] is not a string')
x = {
'names': ['alice', 'bob'],
'city': 5
}
error = check_dict(keys)('x', x)
self.assertEqual(error, 'x["city"] is not a string')
x = {
'names': ['alice', 'bob'],
'city': 'Boston'
}
error = check_dict(value_validator=check_string)('x', x)
self.assertEqual(error, 'x contains a value that is not a string')
x = {
'city': 'Boston'
}
error = check_dict(value_validator=check_string)('x', x)
self.assertEqual(error, None)
# test dict_only
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
}
error = check_dict_only(keys)('x', x)
self.assertEqual(error, None)
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
'state': 'Massachusetts',
}
error = check_dict_only(keys)('x', x)
self.assertEqual(error, 'Unexpected arguments: state')
# Test optional keys
optional_keys = [
('food', check_list(check_string)),
('year', check_int)
]
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
'food': ['Lobster Spaghetti']
}
error = check_dict(keys)('x', x)
self.assertEqual(error, None) # since _allow_only_listed_keys is False
error = check_dict_only(keys)('x', x)
self.assertEqual(error, 'Unexpected arguments: food')
error = check_dict_only(keys, optional_keys)('x', x)
self.assertEqual(error, None)
x = {
'names': ['alice', 'bob'],
'city': 'Boston',
'food': 'Lobster Spaghetti'
}
error = check_dict_only(keys, optional_keys)('x', x)
self.assertEqual(error, 'x["food"] is not a list')
def test_encapsulation(self) -> None:
# There might be situations where we want deep
# validation, but the error message should be customized.
# This is an example.
def check_person(val: Any) -> Optional[str]:
error = check_dict([
('name', check_string),
('age', check_int),
])('_', val)
if error:
return 'This is not a valid person'
return None
person = {'name': 'King Lear', 'age': 42}
self.assertEqual(check_person(person), None)
nonperson = 'misconfigured data'
self.assertEqual(check_person(nonperson), 'This is not a valid person')
def test_check_variable_type(self) -> None:
x = 5 # type: Any
self.assertEqual(check_variable_type([check_string, check_int])('x', x), None)
x = 'x'
self.assertEqual(check_variable_type([check_string, check_int])('x', x), None)
x = [{}]
self.assertEqual(check_variable_type([check_string, check_int])('x', x), 'x is not an allowed_type')
def test_equals(self) -> None:
x = 5 # type: Any
self.assertEqual(equals(5)('x', x), None)
self.assertEqual(equals(6)('x', x), 'x != 6 (5 is wrong)')
def test_check_none_or(self) -> None:
x = 5 # type: Any
self.assertEqual(check_none_or(check_int)('x', x), None)
x = None
self.assertEqual(check_none_or(check_int)('x', x), None)
x = 'x'
self.assertEqual(check_none_or(check_int)('x', x), 'x is not an integer')
def test_check_url(self) -> None:
url = "http://127.0.0.1:5002/" # type: Any
self.assertEqual(check_url('url', url), None)
url = "http://zulip-bots.example.com/"
self.assertEqual(check_url('url', url), None)
url = "http://127.0.0"
self.assertEqual(check_url('url', url), 'url is not a URL')
url = 99.3
self.assertEqual(check_url('url', url), 'url is not a string')
def test_check_string_or_int_list(self) -> None:
x = "string" # type: Any
self.assertEqual(check_string_or_int_list('x', x), None)
x = [1, 2, 4]
self.assertEqual(check_string_or_int_list('x', x), None)
x = None
self.assertEqual(check_string_or_int_list('x', x), 'x is not a string or an integer list')
x = [1, 2, '3']
self.assertEqual(check_string_or_int_list('x', x), 'x[2] is not an integer')
def test_check_string_or_int(self) -> None:
x = "string" # type: Any
self.assertEqual(check_string_or_int('x', x), None)
x = 1
self.assertEqual(check_string_or_int('x', x), None)
x = None
self.assertEqual(check_string_or_int('x', x), 'x is not a string or integer')
class DeactivatedRealmTest(ZulipTestCase):
def test_send_deactivated_realm(self) -> None:
"""
rest_dispatch rejects requests in a deactivated realm, both /json and api
"""
realm = get_realm("zulip")
do_deactivate_realm(get_realm("zulip"))
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "Not logged in", status_code=401)
# Even if a logged-in session was leaked, it still wouldn't work
realm.deactivated = False
realm.save()
self.login(self.example_email("hamlet"))
realm.deactivated = True
realm.save()
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "has been deactivated", status_code=400)
result = self.api_post(self.example_email("hamlet"),
"/api/v1/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "has been deactivated", status_code=401)
def test_fetch_api_key_deactivated_realm(self) -> None:
"""
authenticated_json_view views fail in a deactivated realm
"""
realm = get_realm("zulip")
user_profile = self.example_user('hamlet')
email = user_profile.email
test_password = "abcd1234"
user_profile.set_password(test_password)
self.login(email)
realm.deactivated = True
realm.save()
result = self.client_post("/json/fetch_api_key", {"password": test_password})
self.assert_json_error_contains(result, "has been deactivated", status_code=400)
def test_webhook_deactivated_realm(self) -> None:
"""
Using a webhook while in a deactivated realm fails
"""
do_deactivate_realm(get_realm("zulip"))
user_profile = self.example_user("hamlet")
api_key = get_api_key(user_profile)
url = "/api/v1/external/jira?api_key=%s&stream=jira_custom" % (api_key,)
data = self.webhook_fixture_data('jira', 'created_v2')
result = self.client_post(url, data,
content_type="application/json")
self.assert_json_error_contains(result, "has been deactivated", status_code=400)
class LoginRequiredTest(ZulipTestCase):
def test_login_required(self) -> None:
"""
Verifies the zulip_login_required decorator blocks deactivated users.
"""
user_profile = self.example_user('hamlet')
email = user_profile.email
# Verify fails if logged-out
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 302)
# Verify succeeds once logged-in
self.login(email)
result = self.client_get('/accounts/accept_terms/')
self.assert_in_response("I agree to the", result)
# Verify fails if user deactivated (with session still valid)
user_profile.is_active = False
user_profile.save()
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 302)
# Verify succeeds if user reactivated
do_reactivate_user(user_profile)
self.login(email)
result = self.client_get('/accounts/accept_terms/')
self.assert_in_response("I agree to the", result)
# Verify fails if realm deactivated
user_profile.realm.deactivated = True
user_profile.realm.save()
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 302)
class FetchAPIKeyTest(ZulipTestCase):
def test_fetch_api_key_success(self) -> None:
email = self.example_email("cordelia")
self.login(email)
result = self.client_post("/json/fetch_api_key", {"password": initial_password(email)})
self.assert_json_success(result)
def test_fetch_api_key_email_address_visibility(self) -> None:
user_profile = self.example_user("cordelia")
email = user_profile.email
do_set_realm_property(user_profile.realm, "email_address_visibility",
Realm.EMAIL_ADDRESS_VISIBILITY_ADMINS)
self.login(email)
result = self.client_post("/json/fetch_api_key",
{"password": initial_password(email)})
self.assert_json_success(result)
def test_fetch_api_key_wrong_password(self) -> None:
email = self.example_email("cordelia")
self.login(email)
result = self.client_post("/json/fetch_api_key", {"password": "wrong_password"})
self.assert_json_error_contains(result, "password is incorrect")
class InactiveUserTest(ZulipTestCase):
def test_send_deactivated_user(self) -> None:
"""
rest_dispatch rejects requests from deactivated users, both /json and api
"""
user_profile = self.example_user('hamlet')
email = user_profile.email
self.login(email)
do_deactivate_user(user_profile)
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "Not logged in", status_code=401)
# Even if a logged-in session was leaked, it still wouldn't work
do_reactivate_user(user_profile)
self.login(email)
user_profile.is_active = False
user_profile.save()
result = self.client_post("/json/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "Account is deactivated", status_code=400)
result = self.api_post(self.example_email("hamlet"),
"/api/v1/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_error_contains(result, "Account is deactivated", status_code=401)
def test_fetch_api_key_deactivated_user(self) -> None:
"""
authenticated_json_view views fail with a deactivated user
"""
user_profile = self.example_user('hamlet')
email = user_profile.email
test_password = "abcd1234"
user_profile.set_password(test_password)
user_profile.save()
self.login(email, password=test_password)
user_profile.is_active = False
user_profile.save()
result = self.client_post("/json/fetch_api_key", {"password": test_password})
self.assert_json_error_contains(result, "Account is deactivated", status_code=400)
def test_login_deactivated_user(self) -> None:
"""
logging in fails with an inactive user
"""
user_profile = self.example_user('hamlet')
do_deactivate_user(user_profile)
result = self.login_with_return(self.example_email("hamlet"))
self.assert_in_response(
"Your account is no longer active.",
result)
def test_login_deactivated_mirror_dummy(self) -> None:
"""
logging in fails with an inactive user
"""
user_profile = self.example_user('hamlet')
user_profile.is_mirror_dummy = True
user_profile.save()
password = initial_password(user_profile.email)
request = mock.MagicMock()
request.get_host.return_value = 'zulip.testserver'
# Test a mirror-dummy active user.
form = OurAuthenticationForm(request,
data={'username': user_profile.email,
'password': password})
with self.settings(AUTHENTICATION_BACKENDS=('zproject.backends.EmailAuthBackend',)):
self.assertTrue(form.is_valid())
# Test a mirror-dummy deactivated user.
do_deactivate_user(user_profile)
user_profile.save()
form = OurAuthenticationForm(request,
data={'username': user_profile.email,
'password': password})
with self.settings(AUTHENTICATION_BACKENDS=('zproject.backends.EmailAuthBackend',)):
self.assertFalse(form.is_valid())
self.assertIn("Please enter a correct email", str(form.errors))
# Test a non-mirror-dummy deactivated user.
user_profile.is_mirror_dummy = False
user_profile.save()
form = OurAuthenticationForm(request,
data={'username': user_profile.email,
'password': password})
with self.settings(AUTHENTICATION_BACKENDS=('zproject.backends.EmailAuthBackend',)):
self.assertFalse(form.is_valid())
self.assertIn("Your account is no longer active", str(form.errors))
def test_webhook_deactivated_user(self) -> None:
"""
Deactivated users can't use webhooks
"""
user_profile = self.example_user('hamlet')
do_deactivate_user(user_profile)
api_key = get_api_key(user_profile)
url = "/api/v1/external/jira?api_key=%s&stream=jira_custom" % (api_key,)
data = self.webhook_fixture_data('jira', 'created_v2')
result = self.client_post(url, data,
content_type="application/json")
self.assert_json_error_contains(result, "Account is deactivated", status_code=400)
class TestIncomingWebhookBot(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
zulip_realm = get_realm('zulip')
self.webhook_bot = get_user('[email protected]', zulip_realm)
def test_webhook_bot_permissions(self) -> None:
result = self.api_post("[email protected]",
"/api/v1/messages", {"type": "private",
"content": "Test message",
"client": "test suite",
"to": self.example_email("othello")})
self.assert_json_success(result)
post_params = {"anchor": 1, "num_before": 1, "num_after": 1}
result = self.api_get("[email protected]", "/api/v1/messages", dict(post_params))
self.assert_json_error(result, 'This API is not available to incoming webhook bots.',
status_code=401)
class TestValidateApiKey(ZulipTestCase):
def setUp(self) -> None:
super().setUp()
zulip_realm = get_realm('zulip')
self.webhook_bot = get_user('[email protected]', zulip_realm)
self.default_bot = get_user('[email protected]', zulip_realm)
def test_has_api_key_format(self) -> None:
self.assertFalse(has_api_key_format("TooShort"))
# Has an invalid character:
self.assertFalse(has_api_key_format("32LONGXXXXXXXXXXXXXXXXXXXXXXXXX-"))
# Too long:
self.assertFalse(has_api_key_format("33LONGXXXXXXXXXXXXXXXXXXXXXXXXXXX"))
self.assertTrue(has_api_key_format("VIzRVw2CspUOnEm9Yu5vQiQtJNkvETkp"))
for i in range(0, 10):
self.assertTrue(has_api_key_format(generate_api_key()))
def test_validate_api_key_if_profile_does_not_exist(self) -> None:
with self.assertRaises(JsonableError):
validate_api_key(HostRequestMock(), '[email protected]', 'VIzRVw2CspUOnEm9Yu5vQiQtJNkvETkp')
def test_validate_api_key_if_api_key_does_not_match_profile_api_key(self) -> None:
with self.assertRaises(InvalidAPIKeyFormatError):
validate_api_key(HostRequestMock(), self.webhook_bot.email, 'not_32_length')
with self.assertRaises(InvalidAPIKeyError):
# We use default_bot's key but webhook_bot's email address to test
# the logic when an API key is passed and it doesn't belong to the
# user whose email address has been provided.
api_key = get_api_key(self.default_bot)
validate_api_key(HostRequestMock(), self.webhook_bot.email, api_key)
def test_validate_api_key_if_profile_is_not_active(self) -> None:
self._change_is_active_field(self.default_bot, False)
with self.assertRaises(JsonableError):
api_key = get_api_key(self.default_bot)
validate_api_key(HostRequestMock(), self.default_bot.email, api_key)
self._change_is_active_field(self.default_bot, True)
def test_validate_api_key_if_profile_is_incoming_webhook_and_is_webhook_is_unset(self) -> None:
with self.assertRaises(JsonableError):
api_key = get_api_key(self.webhook_bot)
validate_api_key(HostRequestMock(), self.webhook_bot.email, api_key)
def test_validate_api_key_if_profile_is_incoming_webhook_and_is_webhook_is_set(self) -> None:
api_key = get_api_key(self.webhook_bot)
profile = validate_api_key(HostRequestMock(host="zulip.testserver"),
self.webhook_bot.email, api_key,
is_webhook=True)
self.assertEqual(profile.id, self.webhook_bot.id)
def test_validate_api_key_if_email_is_case_insensitive(self) -> None:
api_key = get_api_key(self.default_bot)
profile = validate_api_key(HostRequestMock(host="zulip.testserver"), self.default_bot.email.upper(), api_key)
self.assertEqual(profile.id, self.default_bot.id)
def test_valid_api_key_if_user_is_on_wrong_subdomain(self) -> None:
with self.settings(RUNNING_INSIDE_TORNADO=False):
api_key = get_api_key(self.default_bot)
with mock.patch('logging.warning') as mock_warning:
with self.assertRaisesRegex(JsonableError,
"Account is not associated with this subdomain"):
validate_api_key(HostRequestMock(host=settings.EXTERNAL_HOST),
self.default_bot.email, api_key)
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(self.default_bot.email, 'zulip', ''))
with mock.patch('logging.warning') as mock_warning:
with self.assertRaisesRegex(JsonableError,
"Account is not associated with this subdomain"):
validate_api_key(HostRequestMock(host='acme.' + settings.EXTERNAL_HOST),
self.default_bot.email, api_key)
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(self.default_bot.email, 'zulip', 'acme'))
def _change_is_active_field(self, profile: UserProfile, value: bool) -> None:
profile.is_active = value
profile.save()
class TestInternalNotifyView(TestCase):
BORING_RESULT = 'boring'
class Request:
def __init__(self, POST: Dict[str, Any], META: Dict[str, Any]) -> None:
self.POST = POST
self.META = META
self.method = 'POST'
def internal_notify(self, is_tornado: bool, req: HttpRequest) -> HttpResponse:
boring_view = lambda req: self.BORING_RESULT
return internal_notify_view(is_tornado)(boring_view)(req)
def test_valid_internal_requests(self) -> None:
secret = 'random'
req = self.Request(
POST=dict(secret=secret),
META=dict(REMOTE_ADDR='127.0.0.1'),
) # type: HttpRequest
with self.settings(SHARED_SECRET=secret):
self.assertTrue(authenticate_notify(req))
self.assertEqual(self.internal_notify(False, req), self.BORING_RESULT)
self.assertEqual(req._email, 'internal')
with self.assertRaises(RuntimeError):
self.internal_notify(True, req)
req._tornado_handler = 'set'
with self.settings(SHARED_SECRET=secret):
self.assertTrue(authenticate_notify(req))
self.assertEqual(self.internal_notify(True, req), self.BORING_RESULT)
self.assertEqual(req._email, 'internal')
with self.assertRaises(RuntimeError):
self.internal_notify(False, req)
def test_internal_requests_with_broken_secret(self) -> None:
secret = 'random'
req = self.Request(
POST=dict(secret=secret),
META=dict(REMOTE_ADDR='127.0.0.1'),
)
with self.settings(SHARED_SECRET='broken'):
self.assertFalse(authenticate_notify(req))
self.assertEqual(self.internal_notify(True, req).status_code, 403)
def test_external_requests(self) -> None:
secret = 'random'
req = self.Request(
POST=dict(secret=secret),
META=dict(REMOTE_ADDR='3.3.3.3'),
)
with self.settings(SHARED_SECRET=secret):
self.assertFalse(authenticate_notify(req))
self.assertEqual(self.internal_notify(True, req).status_code, 403)
def test_is_local_address(self) -> None:
self.assertTrue(is_local_addr('127.0.0.1'))
self.assertTrue(is_local_addr('::1'))
self.assertFalse(is_local_addr('42.43.44.45'))
class TestHumanUsersOnlyDecorator(ZulipTestCase):
def test_human_only_endpoints(self) -> None:
post_endpoints = [
"/api/v1/users/me/apns_device_token",
"/api/v1/users/me/android_gcm_reg_id",
"/api/v1/users/me/enter-sends",
"/api/v1/users/me/hotspots",
"/api/v1/users/me/presence",
"/api/v1/users/me/tutorial_status",
"/api/v1/report/send_times",
"/api/v1/report/narrow_times",
"/api/v1/report/unnarrow_times",
]
for endpoint in post_endpoints:
result = self.api_post('[email protected]', endpoint)
self.assert_json_error(result, "This endpoint does not accept bot requests.")
patch_endpoints = [
"/api/v1/settings",
"/api/v1/settings/display",
"/api/v1/settings/notifications",
"/api/v1/users/me/profile_data"
]
for endpoint in patch_endpoints:
result = self.api_patch('[email protected]', endpoint)
self.assert_json_error(result, "This endpoint does not accept bot requests.")
delete_endpoints = [
"/api/v1/users/me/apns_device_token",
"/api/v1/users/me/android_gcm_reg_id",
]
for endpoint in delete_endpoints:
result = self.api_delete('[email protected]', endpoint)
self.assert_json_error(result, "This endpoint does not accept bot requests.")
class TestAuthenticatedJsonPostViewDecorator(ZulipTestCase):
def test_authenticated_json_post_view_if_everything_is_correct(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
self._login(user_email, user_realm)
response = self._do_test(user_email)
self.assertEqual(response.status_code, 200)
def test_authenticated_json_post_view_with_get_request(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
self._login(user_email, user_realm)
with mock.patch('logging.warning') as mock_warning:
result = self.client_get(r'/json/subscriptions/exists', {'stream': 'Verona'})
self.assertEqual(result.status_code, 405)
mock_warning.assert_called_once() # Check we logged the Mock Not Allowed
self.assertEqual(mock_warning.call_args_list[0][0],
('Method Not Allowed (%s): %s', 'GET', '/json/subscriptions/exists'))
def test_authenticated_json_post_view_if_subdomain_is_invalid(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
self._login(user_email, user_realm)
with mock.patch('logging.warning') as mock_warning, \
mock.patch('zerver.decorator.get_subdomain', return_value=''):
self.assert_json_error_contains(self._do_test(user_email),
"Account is not associated with this "
"subdomain")
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(user_email, 'zulip', ''))
with mock.patch('logging.warning') as mock_warning, \
mock.patch('zerver.decorator.get_subdomain', return_value='acme'):
self.assert_json_error_contains(self._do_test(user_email),
"Account is not associated with this "
"subdomain")
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(user_email, 'zulip', 'acme'))
def test_authenticated_json_post_view_if_user_is_incoming_webhook(self) -> None:
user_email = '[email protected]'
user_realm = get_realm('zulip')
self._login(user_email, user_realm, password="test") # we set a password because user is a bot
self.assert_json_error_contains(self._do_test(user_email), "Webhook bots can only access webhooks")
def test_authenticated_json_post_view_if_user_is_not_active(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
self._login(user_email, user_realm, password="test")
# Get user_profile after _login so that we have the latest data.
user_profile = get_user(user_email, user_realm)
# we deactivate user manually because do_deactivate_user removes user session
user_profile.is_active = False
user_profile.save()
self.assert_json_error_contains(self._do_test(user_email), "Account is deactivated")
do_reactivate_user(user_profile)
def test_authenticated_json_post_view_if_user_realm_is_deactivated(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
user_profile = get_user(user_email, user_realm)
self._login(user_email, user_realm)
# we deactivate user's realm manually because do_deactivate_user removes user session
user_profile.realm.deactivated = True
user_profile.realm.save()
self.assert_json_error_contains(self._do_test(user_email), "This organization has been deactivated")
do_reactivate_realm(user_profile.realm)
def _do_test(self, user_email: str) -> HttpResponse:
stream_name = "stream name"
self.common_subscribe_to_streams(user_email, [stream_name])
data = {"password": initial_password(user_email), "stream": stream_name}
return self.client_post(r'/json/subscriptions/exists', data)
def _login(self, user_email: str, user_realm: Realm, password: str=None) -> None:
if password:
user_profile = get_user(user_email, user_realm)
user_profile.set_password(password)
user_profile.save()
self.login(user_email, password)
class TestAuthenticatedJsonViewDecorator(ZulipTestCase):
def test_authenticated_json_view_if_subdomain_is_invalid(self) -> None:
user_email = self.example_email("hamlet")
self.login(user_email)
with mock.patch('logging.warning') as mock_warning, \
mock.patch('zerver.decorator.get_subdomain', return_value=''):
self.assert_json_error_contains(self._do_test(str(user_email)),
"Account is not associated with this "
"subdomain")
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(user_email, 'zulip', ''))
with mock.patch('logging.warning') as mock_warning, \
mock.patch('zerver.decorator.get_subdomain', return_value='acme'):
self.assert_json_error_contains(self._do_test(str(user_email)),
"Account is not associated with this "
"subdomain")
mock_warning.assert_called_with(
"User {} ({}) attempted to access API on wrong "
"subdomain ({})".format(user_email, 'zulip', 'acme'))
def _do_test(self, user_email: str) -> HttpResponse:
data = {"password": initial_password(user_email)}
return self.client_post(r'/accounts/webathena_kerberos_login/', data)
class TestZulipLoginRequiredDecorator(ZulipTestCase):
def test_zulip_login_required_if_subdomain_is_invalid(self) -> None:
user_email = self.example_email("hamlet")
self.login(user_email)
with mock.patch('zerver.decorator.get_subdomain', return_value='zulip'):
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 200)
with mock.patch('zerver.decorator.get_subdomain', return_value=''):
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 302)
with mock.patch('zerver.decorator.get_subdomain', return_value='acme'):
result = self.client_get('/accounts/accept_terms/')
self.assertEqual(result.status_code, 302)
def test_2fa_failure(self) -> None:
@zulip_login_required
def test_view(request: HttpRequest) -> HttpResponse:
return HttpResponse('Success')
request = HttpRequest()
request.META['SERVER_NAME'] = 'localhost'
request.META['SERVER_PORT'] = 80
request.META['PATH_INFO'] = ''
request.user = hamlet = self.example_user('hamlet')
request.user.is_verified = lambda: False
self.login(hamlet.email)
request.session = self.client.session
request.get_host = lambda: 'zulip.testserver'
response = test_view(request)
content = getattr(response, 'content')
self.assertEqual(content.decode(), 'Success')
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
request = HttpRequest()
request.META['SERVER_NAME'] = 'localhost'
request.META['SERVER_PORT'] = 80
request.META['PATH_INFO'] = ''
request.user = hamlet = self.example_user('hamlet')
request.user.is_verified = lambda: False
self.login(hamlet.email)
request.session = self.client.session
request.get_host = lambda: 'zulip.testserver'
self.create_default_device(request.user)
response = test_view(request)
status_code = getattr(response, 'status_code')
self.assertEqual(status_code, 302)
url = getattr(response, 'url')
response_url = url.split("?")[0]
self.assertEqual(response_url, settings.HOME_NOT_LOGGED_IN)
def test_2fa_success(self) -> None:
@zulip_login_required
def test_view(request: HttpRequest) -> HttpResponse:
return HttpResponse('Success')
with self.settings(TWO_FACTOR_AUTHENTICATION_ENABLED=True):
request = HttpRequest()
request.META['SERVER_NAME'] = 'localhost'
request.META['SERVER_PORT'] = 80
request.META['PATH_INFO'] = ''
request.user = hamlet = self.example_user('hamlet')
request.user.is_verified = lambda: True
self.login(hamlet.email)
request.session = self.client.session
request.get_host = lambda: 'zulip.testserver'
self.create_default_device(request.user)
response = test_view(request)
content = getattr(response, 'content')
self.assertEqual(content.decode(), 'Success')
class TestRequireDecorators(ZulipTestCase):
def test_require_server_admin_decorator(self) -> None:
user_email = self.example_email('hamlet')
user_realm = get_realm('zulip')
self.login(user_email)
result = self.client_get('/activity')
self.assertEqual(result.status_code, 302)
user_profile = get_user(user_email, user_realm)
user_profile.is_staff = True
user_profile.save()
result = self.client_get('/activity')
self.assertEqual(result.status_code, 200)
def test_require_non_guest_user_decorator(self) -> None:
guest_user = self.example_user('polonius')
self.login(guest_user.email)
result = self.common_subscribe_to_streams(guest_user.email, ["Denmark"])
self.assert_json_error(result, "Not allowed for guest users")
def test_require_member_or_admin_decorator(self) -> None:
result = self.api_get("[email protected]", '/api/v1/bots')
self.assert_json_error(result, "This endpoint does not accept bot requests.")
guest_user = self.example_user('polonius')
self.login(guest_user.email)
result = self.client_get('/json/bots')
self.assert_json_error(result, "Not allowed for guest users")
class ReturnSuccessOnHeadRequestDecorator(ZulipTestCase):
def test_returns_200_if_request_method_is_head(self) -> None:
class HeadRequest:
method = 'HEAD'
request = HeadRequest()
@return_success_on_head_request
def test_function(request: HttpRequest) -> HttpResponse:
return json_response(msg=u'from_test_function') # nocoverage. isn't meant to be called
response = test_function(request)
self.assert_json_success(response)
self.assertNotEqual(ujson.loads(response.content).get('msg'), u'from_test_function')
def test_returns_normal_response_if_request_method_is_not_head(self) -> None:
class HeadRequest:
method = 'POST'
request = HeadRequest()
@return_success_on_head_request
def test_function(request: HttpRequest) -> HttpResponse:
return json_response(msg=u'from_test_function')
response = test_function(request)
self.assertEqual(ujson.loads(response.content).get('msg'), u'from_test_function')
class RestAPITest(ZulipTestCase):
def test_method_not_allowed(self) -> None:
self.login(self.example_email("hamlet"))
result = self.client_patch('/json/users')
self.assertEqual(result.status_code, 405)
self.assert_in_response('Method Not Allowed', result)
def test_options_method(self) -> None:
self.login(self.example_email("hamlet"))
result = self.client_options('/json/users')
self.assertEqual(result.status_code, 204)
self.assertEqual(str(result['Allow']), 'GET, HEAD, POST')
result = self.client_options('/json/streams/15')
self.assertEqual(result.status_code, 204)
self.assertEqual(str(result['Allow']), 'DELETE, PATCH')
def test_http_accept_redirect(self) -> None:
result = self.client_get('/json/users',
HTTP_ACCEPT='text/html')
self.assertEqual(result.status_code, 302)
self.assertTrue(result["Location"].endswith("/login/?next=/json/users"))
class CacheTestCase(ZulipTestCase):
def test_cachify_basics(self) -> None:
@cachify
def add(w: Any, x: Any, y: Any, z: Any) -> Any:
return w + x + y + z
for i in range(2):
self.assertEqual(add(1, 2, 4, 8), 15)
self.assertEqual(add('a', 'b', 'c', 'd'), 'abcd')
def test_cachify_is_per_call(self) -> None:
def test_greetings(greeting: str) -> Tuple[List[str], List[str]]:
result_log = [] # type: List[str]
work_log = [] # type: List[str]
@cachify
def greet(first_name: str, last_name: str) -> str:
msg = '%s %s %s' % (greeting, first_name, last_name)
work_log.append(msg)
return msg
result_log.append(greet('alice', 'smith'))
result_log.append(greet('bob', 'barker'))
result_log.append(greet('alice', 'smith'))
result_log.append(greet('cal', 'johnson'))
return (work_log, result_log)
work_log, result_log = test_greetings('hello')
self.assertEqual(work_log, [
'hello alice smith',
'hello bob barker',
'hello cal johnson',
])
self.assertEqual(result_log, [
'hello alice smith',
'hello bob barker',
'hello alice smith',
'hello cal johnson',
])
work_log, result_log = test_greetings('goodbye')
self.assertEqual(work_log, [
'goodbye alice smith',
'goodbye bob barker',
'goodbye cal johnson',
])
self.assertEqual(result_log, [
'goodbye alice smith',
'goodbye bob barker',
'goodbye alice smith',
'goodbye cal johnson',
])
class TestUserAgentParsing(ZulipTestCase):
def test_user_agent_parsing(self) -> None:
"""Test for our user agent parsing logic, using a large data set."""
user_agents_parsed = defaultdict(int) # type: Dict[str, int]
user_agents_path = os.path.join(settings.DEPLOY_ROOT, "zerver/tests/fixtures/user_agents_unique")
for line in open(user_agents_path).readlines():
line = line.strip()
match = re.match('^(?P<count>[0-9]+) "(?P<user_agent>.*)"$', line)
self.assertIsNotNone(match)
groupdict = match.groupdict()
count = groupdict["count"]
user_agent = groupdict["user_agent"]
ret = parse_user_agent(user_agent)
user_agents_parsed[ret["name"]] += int(count)
class TestIgnoreUnhashableLRUCache(ZulipTestCase):
def test_cache_hit(self) -> None:
@ignore_unhashable_lru_cache()
def f(arg: Any) -> Any:
return arg
def get_cache_info() -> Tuple[int, int, int]:
info = getattr(f, 'cache_info')()
hits = getattr(info, 'hits')
misses = getattr(info, 'misses')
currsize = getattr(info, 'currsize')
return hits, misses, currsize
def clear_cache() -> None:
getattr(f, 'cache_clear')()
# Check hashable argument.
result = f(1)
hits, misses, currsize = get_cache_info()
# First one should be a miss.
self.assertEqual(hits, 0)
self.assertEqual(misses, 1)
self.assertEqual(currsize, 1)
self.assertEqual(result, 1)
result = f(1)
hits, misses, currsize = get_cache_info()
# Second one should be a hit.
self.assertEqual(hits, 1)
self.assertEqual(misses, 1)
self.assertEqual(currsize, 1)
self.assertEqual(result, 1)
# Check unhashable argument.
result = f({1: 2})
hits, misses, currsize = get_cache_info()
# Cache should not be used.
self.assertEqual(hits, 1)
self.assertEqual(misses, 1)
self.assertEqual(currsize, 1)
self.assertEqual(result, {1: 2})
# Clear cache.
clear_cache()
hits, misses, currsize = get_cache_info()
self.assertEqual(hits, 0)
self.assertEqual(misses, 0)
self.assertEqual(currsize, 0)
def test_cache_hit_dict_args(self) -> None:
@ignore_unhashable_lru_cache()
@items_tuple_to_dict
def g(arg: Any) -> Any:
return arg
def get_cache_info() -> Tuple[int, int, int]:
info = getattr(g, 'cache_info')()
hits = getattr(info, 'hits')
misses = getattr(info, 'misses')
currsize = getattr(info, 'currsize')
return hits, misses, currsize
def clear_cache() -> None:
getattr(g, 'cache_clear')()
# Not used as a decorator on the definition to allow defining
# get_cache_info and clear_cache
f = dict_to_items_tuple(g)
# Check hashable argument.
result = f(1)
hits, misses, currsize = get_cache_info()
# First one should be a miss.
self.assertEqual(hits, 0)
self.assertEqual(misses, 1)
self.assertEqual(currsize, 1)
self.assertEqual(result, 1)
result = f(1)
hits, misses, currsize = get_cache_info()
# Second one should be a hit.
self.assertEqual(hits, 1)
self.assertEqual(misses, 1)
self.assertEqual(currsize, 1)
self.assertEqual(result, 1)
# Check dict argument.
result = f({1: 2})
hits, misses, currsize = get_cache_info()
# First one is a miss
self.assertEqual(hits, 1)
self.assertEqual(misses, 2)
self.assertEqual(currsize, 2)
self.assertEqual(result, {1: 2})
result = f({1: 2})
hits, misses, currsize = get_cache_info()
# Second one should be a hit.
self.assertEqual(hits, 2)
self.assertEqual(misses, 2)
self.assertEqual(currsize, 2)
self.assertEqual(result, {1: 2})
# Clear cache.
clear_cache()
hits, misses, currsize = get_cache_info()
self.assertEqual(hits, 0)
self.assertEqual(misses, 0)
self.assertEqual(currsize, 0)
| 41.122725 | 152 | 0.619153 |
4a1ee71cdb186ee6e94dcfad6891570a67644e1e | 1,708 | py | Python | config/wsgi.py | larrykamau/torrent_crawler | 17178f7a72e0fc4d8dc6752fcb76b0094dba40da | [
"MIT"
] | null | null | null | config/wsgi.py | larrykamau/torrent_crawler | 17178f7a72e0fc4d8dc6752fcb76b0094dba40da | [
"MIT"
] | null | null | null | config/wsgi.py | larrykamau/torrent_crawler | 17178f7a72e0fc4d8dc6752fcb76b0094dba40da | [
"MIT"
] | null | null | null | """
WSGI config for Torrent Scraper project.
This module contains the WSGI application used by Django's development server
and any production WSGI deployments. It should expose a module-level variable
named ``application``. Django's ``runserver`` and ``runfcgi`` commands discover
this application via the ``WSGI_APPLICATION`` setting.
Usually you will have the standard Django WSGI application here, but it also
might make sense to replace the whole Django WSGI application with a custom one
that later delegates to the Django one. For example, you could introduce WSGI
middleware here, or combine a Django application with an application of another
framework.
"""
import os
import sys
from django.core.wsgi import get_wsgi_application
# This allows easy placement of apps within the interior
# torrent_scraper directory.
app_path = os.path.abspath(
os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir)
)
sys.path.append(os.path.join(app_path, "torrent_scraper"))
# We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks
# if running multiple sites in the same mod_wsgi process. To fix this, use
# mod_wsgi daemon mode with each site in its own daemon process, or use
# os.environ["DJANGO_SETTINGS_MODULE"] = "config.settings.production"
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production")
# This application object is used by any WSGI server configured to use this
# file. This includes Django's development server, if the WSGI_APPLICATION
# setting points here.
application = get_wsgi_application()
# Apply WSGI middleware here.
# from helloworld.wsgi import HelloWorldApplication
# application = HelloWorldApplication(application)
| 42.7 | 79 | 0.79918 |
4a1ee7a9e5dad195a8d4910cca1be8bc28a09cf7 | 2,884 | py | Python | doc/learning_examples/multi_target/lasso_example_multi_random_gbm.py | TianXie1999/selective-inference | ca02bbd84af5f5597944c75bde8337db9c69066a | [
"BSD-3-Clause"
] | 51 | 2016-03-31T16:34:15.000Z | 2022-01-16T04:32:58.000Z | doc/learning_examples/multi_target/lasso_example_multi_random_gbm.py | TianXie1999/selective-inference | ca02bbd84af5f5597944c75bde8337db9c69066a | [
"BSD-3-Clause"
] | 11 | 2016-04-07T00:19:58.000Z | 2021-10-03T18:31:14.000Z | doc/learning_examples/multi_target/lasso_example_multi_random_gbm.py | TianXie1999/selective-inference | ca02bbd84af5f5597944c75bde8337db9c69066a | [
"BSD-3-Clause"
] | 14 | 2015-10-28T17:29:05.000Z | 2021-08-16T21:04:30.000Z | import functools
import numpy as np
from scipy.stats import norm as ndist
import regreg.api as rr
from selection.tests.instance import gaussian_instance
from selection.learning.utils import full_model_inference, pivot_plot
from selection.learning.core import normal_sampler, gbm_fit
def simulate(n=200, p=100, s=10, signal=(0.5, 1), sigma=2, alpha=0.1, B=3000):
# description of statistical problem
X, y, truth = gaussian_instance(n=n,
p=p,
s=s,
equicorrelated=False,
rho=0.5,
sigma=sigma,
signal=signal,
random_signs=True,
scale=False)[:3]
dispersion = sigma**2
S = X.T.dot(y)
covS = dispersion * X.T.dot(X)
smooth_sampler = normal_sampler(S, covS)
splitting_sampler = split_sampler(X * y[:, None], covS)
def meta_algorithm(XTX, XTXi, lam, sampler):
p = XTX.shape[0]
success = np.zeros(p)
loss = rr.quadratic_loss((p,), Q=XTX)
pen = rr.l1norm(p, lagrange=lam)
scale = 0.5
noisy_S = sampler(scale=scale)
loss.quadratic = rr.identity_quadratic(0, 0, -noisy_S, 0)
problem = rr.simple_problem(loss, pen)
soln = problem.solve(max_its=100, tol=1.e-10)
success += soln != 0
return set(np.nonzero(success)[0])
XTX = X.T.dot(X)
XTXi = np.linalg.inv(XTX)
resid = y - X.dot(XTXi.dot(X.T.dot(y)))
dispersion = np.linalg.norm(resid)**2 / (n-p)
lam = 4. * np.sqrt(n)
selection_algorithm = functools.partial(meta_algorithm, XTX, XTXi, lam)
# run selection algorithm
return full_model_inference(X,
y,
truth,
selection_algorithm,
smooth_sampler,
success_params=(1, 1),
B=B,
fit_probability=gbm_fit,
fit_args={'ntrees':5000})
if __name__ == "__main__":
import statsmodels.api as sm
import matplotlib.pyplot as plt
import pandas as pd
U = np.linspace(0, 1, 101)
plt.clf()
for i in range(500):
df = simulate()
csvfile = 'lasso_multi_random_gbm.csv'
outbase = csvfile[:-4]
if df is not None and i > 0:
try: # concatenate to disk
df = pd.concat([df, pd.read_csv(csvfile)])
except FileNotFoundError:
pass
df.to_csv(csvfile, index=False)
if len(df['pivot']) > 0:
pivot_ax, length_ax = pivot_plot(df, outbase)
| 31.010753 | 78 | 0.507975 |
4a1ee807204e5ab396ded15710d7ba389e3a72b5 | 2,546 | py | Python | fuzzer/image_queue.py | xiaoningdu/deepstellar | 2624476c5d0b01b7af9373dff4fd6a3f56e495b4 | [
"Apache-2.0"
] | 10 | 2020-08-07T01:15:14.000Z | 2022-01-10T15:49:14.000Z | fuzzer/image_queue.py | xiaoningdu/deepstellar | 2624476c5d0b01b7af9373dff4fd6a3f56e495b4 | [
"Apache-2.0"
] | 1 | 2020-11-01T11:41:00.000Z | 2020-11-01T11:41:00.000Z | fuzzer/image_queue.py | xiaoningdu/deepstellar | 2624476c5d0b01b7af9373dff4fd6a3f56e495b4 | [
"Apache-2.0"
] | 4 | 2020-08-06T06:27:31.000Z | 2022-01-10T15:49:21.000Z | import time
import numpy as np
from fuzzer.lib.queue import FuzzQueue
from fuzzer.lib.queue import Seed
class ImageInputCorpus(FuzzQueue):
"""Class that holds inputs and associated coverage."""
def __init__(self, outdir, israndom, sample_function, cov_num, criteria):
"""Init the class.
Args:
seed_corpus: a list of numpy arrays, one for each input tensor in the
fuzzing process.
sample_function: a function that looks at the whole current corpus and
samples the next element to mutate in the fuzzing loop.
Returns:
Initialized object.
"""
FuzzQueue.__init__(self, outdir, israndom, sample_function, cov_num, criteria)
self.loopup = {}
self.loopup[0] = 0
self.loopup[1] = 1
self.loopup.update(self.loopup.fromkeys(range(2, 51), 2))
self.loopup.update(self.loopup.fromkeys(range(51, 151), 4))
self.loopup.update(self.loopup.fromkeys(range(151, 256), 128))
def save_if_interesting(self, seed, data, crash, dry_run=False, suffix=None):
"""Adds item to corpus if it exercises new coverage."""
def class_loop_up(x):
return self.loopup[x]
self.mutations_processed += 1
current_time = time.time()
if dry_run:
coverage = self.compute_cov()
self.dry_run_cov = coverage
if current_time - self.log_time > 2:
self.log_time = current_time
self.log()
describe_op = "src:%06d" % (seed.parent.id) if suffix is None else "src:%s" % (suffix)
if crash:
fn = "%s/crashes/id:%06d,%s.npy" % (self.out_dir, self.uniq_crashes, describe_op)
self.uniq_crashes += 1
self.last_crash_time = current_time
else:
fn = "%s/queue/id:%06d,%s.npy" % (self.out_dir, self.total_queue, describe_op)
if self.has_new_bits(seed) or dry_run:
self.last_reg_time = current_time
if self.sample_type != 'random2' or dry_run:
seed.queue_time = current_time
seed.id = self.total_queue
seed.fname = fn
seed.probability = self.REG_INIT_PROB
self.queue.append(seed)
del seed.coverage
else:
del seed
self.total_queue += 1
else:
del seed
return False
np.save(fn, data)
return True
| 36.898551 | 94 | 0.577769 |
4a1ee814225cfcc7d5620eac720d977fdb0ec054 | 751 | py | Python | pymetamap/__init__.py | nwams/Medline-Pubmed-Search-Engine | 4a21ffe3e972cd0675b69ff8cb797aad4c17f088 | [
"Apache-2.0"
] | 15 | 2016-01-30T04:18:21.000Z | 2021-05-20T21:25:39.000Z | build/lib/pymetamap/__init__.py | nwams/Medline-Pubmed-Search-Engine | 4a21ffe3e972cd0675b69ff8cb797aad4c17f088 | [
"Apache-2.0"
] | null | null | null | build/lib/pymetamap/__init__.py | nwams/Medline-Pubmed-Search-Engine | 4a21ffe3e972cd0675b69ff8cb797aad4c17f088 | [
"Apache-2.0"
] | 2 | 2015-11-13T03:28:40.000Z | 2016-05-03T22:38:37.000Z | # Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
from .MetaMap import MetaMap
from Concept import Concept
from Concept import Corpus
__all__ = (MetaMap, Concept, Corpus)
__authors__ = 'Anthony Rios'
__version__ = '0.1'
__email__ = '[email protected]'
| 34.136364 | 74 | 0.76964 |
4a1ee8ee0ec9a8eee6d9a8410f47c37cebf757e7 | 4,469 | py | Python | day52021.py | GeirOwe/adventOfCode | fee1420cb8ecce8b7aaf9d48472364be191ca2a2 | [
"MIT"
] | 1 | 2021-12-20T11:10:59.000Z | 2021-12-20T11:10:59.000Z | day52021.py | GeirOwe/adventOfCode | fee1420cb8ecce8b7aaf9d48472364be191ca2a2 | [
"MIT"
] | null | null | null | day52021.py | GeirOwe/adventOfCode | fee1420cb8ecce8b7aaf9d48472364be191ca2a2 | [
"MIT"
] | 1 | 2021-12-02T14:40:12.000Z | 2021-12-02T14:40:12.000Z | # Day5 - 2021 Advent of code
# source: https://adventofcode.com/2021/day/5
import os
import numpy as np
def clear_console():
os.system('clear')
print('< .... AoC 2021 Day 4, part 1 .... >')
print()
return
def add_signal_to_row(fra, til, row, signalGrid):
if fra < til:
fromX = fra
toX = til
else:
fromX = til
toX = fra
#increase number in grid for all cells in row in the range
while fromX <= toX:
signalGrid[row, fromX] += 1
fromX += 1
return signalGrid
def add_signal_to_col(fra, til, column, signalGrid):
if fra < til:
fromY = fra
toY = til
else:
fromY = til
toY = fra
#increase number in grid for all cells in row in the range
while fromY <= toY:
signalGrid[fromY, column] += 1
fromY += 1
return signalGrid
def process_the_data(linesData, signalGrid):
#les linje for linje
i = 0
while i < len(linesData):
#hvis lik y ( linesData[i, 1] == linesData[i, 3] ) -> sett inn signal på en rad
# ifra linesData[i, 0] t.o.m linesData[i, 2]
#hvis lik x ( linesData[i, 0] == linesData[i, 2] ) -> sett inn signal på en kolonne
# ifra linesData[1] t.o.m linesData[3]
y1 = linesData[i, 1]
y2 = linesData[i, 3]
x1 = linesData[i, 0]
x2 = linesData[i, 2]
#add signals to a row if y1 = y2
if y1 == y2:
signalGrid = add_signal_to_row(x1, x2, y1, signalGrid) # -> fromX, toX, row, grid
#add signals to a column if x1 = x2
if x1 == x2:
signalGrid = add_signal_to_col(y1, y2, x1, signalGrid) # -> fromY, toY, column, grid
i += 1
#the signalgrid is done -> now calculate no of points (cells) > 1
valueX = (signalGrid > 1).sum()
return valueX
def get_coord(row):
temp = row.split("->")
temp1 = temp[0].split(",")
temp2 = temp[1].split(",")
x1 = int(temp1[0].strip())
y1 = int(temp1[1].strip())
x2 = int(temp2[0].strip())
y2 = int(temp2[1].strip())
return x1, y1, x2, y2
def optimize_the_data(theData):
coords = []
noOfRows = 0
rowsInGrid = 0 #used when reshaping the arrayGrid
colsInGrid = 0 #used when reshaping the arrayGrid
for row in theData:
#find coordinates and save largest y-coordinate
x1, y1, x2, y2 = get_coord(row)
if y1 > colsInGrid:
colsInGrid = y1
if y2 > colsInGrid:
colsInGrid = y2
if x1 > rowsInGrid:
rowsInGrid = x1
if x2 > rowsInGrid:
rowsInGrid = x2
if x1 == x2 or y1 == y2:
#add them to a list, reshape in the numpy array
coords.append(x1)
coords.append(y1)
coords.append(x2)
coords.append(y2)
noOfRows += 1
#move them into numpy arrays - to make it easier to process
#array (x, y) -> rows, columns -> : means all elements
#add 1 to rowsInGrid / colsInGrid since we started on zero
rowsInGrid += 1
colsInGrid += 1
linesData = np.array(coords, dtype = "int").reshape(noOfRows, 4)
signalGrid = np.zeros(int(rowsInGrid*colsInGrid), dtype = "int").reshape(rowsInGrid, colsInGrid)
#print(linesData[1, :], " -> expect 2nd row, all numbers -> 9 4 3 4")
#print(linesData)
#print(signalGrid)
print("rows: ", rowsInGrid, " cols: ", colsInGrid)
return linesData, signalGrid
def get_the_data():
#read the test puzzle input
#theData = open('day52021_test_puzzle_input.txt', 'r')
#read the puzzle input
theData = open('day52021_puzzle_input.txt', 'r')
#move data into a list - read a line and remove lineshift
data_list = []
for element in theData:
elementTrimmed = element.strip()
if elementTrimmed != "":
data_list.append(elementTrimmed)
return data_list
def start_the_engine():
#get the data and read them into a list
theData = get_the_data()
linesData, signalGrid = optimize_the_data(theData)
#process the data and return the answer
valueX = process_the_data(linesData, signalGrid)
print('the number of points where at least two lines overlap -> ', valueX,'\n')
return
#let's start
if __name__ == '__main__':
clear_console()
start_the_engine() | 31.695035 | 101 | 0.578429 |
4a1ee953889988ddd4a0a42709188b18693cbf15 | 2,289 | py | Python | tests/test_Terraform Linting CLI.py | schubergphilis/terraformlintingcli | dae3356ec092c9ff4e9c3da46fa896f694dced4f | [
"MIT"
] | 1 | 2020-02-17T09:13:31.000Z | 2020-02-17T09:13:31.000Z | tests/test_Terraform Linting CLI.py | schubergphilis/terraformlintingcli | dae3356ec092c9ff4e9c3da46fa896f694dced4f | [
"MIT"
] | null | null | null | tests/test_Terraform Linting CLI.py | schubergphilis/terraformlintingcli | dae3356ec092c9ff4e9c3da46fa896f694dced4f | [
"MIT"
] | null | null | null | #!/usr/bin/env python
# -*- coding: utf-8 -*-
# File: test_terraformlintingcli.py
#
# Copyright 2018 Costas Tyfoxylos
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to
# deal in the Software without restriction, including without limitation the
# rights to use, copy, modify, merge, publish, distribute, sublicense, and/or
# sell copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in
# all copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING
# FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER
# DEALINGS IN THE SOFTWARE.
#
"""
test_terraformlintingcli
----------------------------------
Tests for `terraformlintingcli` module.
.. _Google Python Style Guide:
http://google.github.io/styleguide/pyguide.html
"""
from betamax.fixtures import unittest
__author__ = '''Costas Tyfoxylos <[email protected]>'''
__docformat__ = '''google'''
__date__ = '''2018-05-24'''
__copyright__ = '''Copyright 2018, Costas Tyfoxylos'''
__credits__ = ["Costas Tyfoxylos"]
__license__ = '''MIT'''
__maintainer__ = '''Costas Tyfoxylos'''
__email__ = '''<[email protected]>'''
__status__ = '''Development''' # "Prototype", "Development", "Production".
class TestTerraformlintingcli(unittest.BetamaxTestCase):
def setUp(self):
"""
Test set up
This is where you can setup things that you use throughout the tests. This method is called before every test.
"""
pass
def tearDown(self):
"""
Test tear down
This is where you should tear down what you've setup in setUp before. This method is called after every test.
"""
pass
| 34.681818 | 118 | 0.711228 |
4a1ee96422b0e94eebbf5bb5fda3c54fc394e6ba | 2,623 | py | Python | irspack/user_cold_start/evaluator.py | kiminh/irspack | 45e448bb741b5f08b1b93d47ca293b981dd5f8af | [
"MIT"
] | 1 | 2021-01-11T18:34:30.000Z | 2021-01-11T18:34:30.000Z | irspack/user_cold_start/evaluator.py | kiminh/irspack | 45e448bb741b5f08b1b93d47ca293b981dd5f8af | [
"MIT"
] | null | null | null | irspack/user_cold_start/evaluator.py | kiminh/irspack | 45e448bb741b5f08b1b93d47ca293b981dd5f8af | [
"MIT"
] | null | null | null | from collections import OrderedDict
from typing import Any, Dict, List
import numpy as np
from irspack.user_cold_start.recommenders import base
from ..evaluator import METRIC_NAMES, EvaluatorCore, Metrics
from ..recommenders.base import InteractionMatrix
class UserColdStartEvaluator:
def __init__(
self,
X: InteractionMatrix,
profiles: base.ProfileMatrix,
mb_size: int = 1024,
n_thread: int = 1,
cutoff: int = 20,
):
assert X.shape[0] == profiles.shape[0]
self.core = EvaluatorCore(X.astype(np.float64), [])
self.profiles = profiles
self.n_users = X.shape[0]
self.n_items = X.shape[1]
self.dim_profile = profiles.shape[1]
self.mb_size = mb_size
self.n_thread = n_thread
self.cutoff = cutoff
def get_score(self, model: base.BaseUserColdStartRecommender) -> Dict[str, Any]:
metric_base = Metrics(self.n_items)
for start in range(0, self.n_users, self.mb_size):
end = min(start + self.mb_size, self.n_users)
score_mb = model.get_score(self.profiles[start:end])
metric = self.core.get_metrics(
score_mb, self.cutoff, start, self.n_thread, False
)
metric_base.merge(metric)
return metric_base.as_dict()
def get_scores(
self, model: base.BaseUserColdStartRecommender, cutoffs: List[int]
) -> Dict[str, float]:
result: Dict[str, float] = OrderedDict()
scores = self.get_scores_as_list(model, cutoffs)
for cutoff, score in zip(cutoffs, scores):
for metric_name in METRIC_NAMES:
result[f"{metric_name}@{cutoff}"] = score[metric_name]
return result
def get_scores_as_list(
self, model: base.BaseUserColdStartRecommender, cutoffs: List[int]
) -> List[Dict[str, float]]:
n_items = model.n_items
metrics: List[Metrics] = []
for c in cutoffs:
metrics.append(Metrics(n_items))
n_validated = self.n_users
block_end = n_validated
mb_size = self.mb_size
for chunk_start in range(0, block_end, mb_size):
chunk_end = min(chunk_start + mb_size, block_end)
score_mb = model.get_score(self.profiles[chunk_start:chunk_end])
for i, cutoff in enumerate(cutoffs):
chunked_metric = self.core.get_metrics(
score_mb, cutoff, chunk_start, self.n_thread, False
)
metrics[i].merge(chunked_metric)
return [item.as_dict() for item in metrics]
| 35.931507 | 84 | 0.62562 |
4a1eeaced06edca8043705752c41c776414ac77d | 1,417 | py | Python | app/recipe/serializers.py | baguskna/recipe-app-api | ba2a0f036a1688ad26fa58fe5dea3c198559d1a9 | [
"MIT"
] | null | null | null | app/recipe/serializers.py | baguskna/recipe-app-api | ba2a0f036a1688ad26fa58fe5dea3c198559d1a9 | [
"MIT"
] | null | null | null | app/recipe/serializers.py | baguskna/recipe-app-api | ba2a0f036a1688ad26fa58fe5dea3c198559d1a9 | [
"MIT"
] | null | null | null | from rest_framework import serializers
from core.models import Tag, Ingredient, Recipe
class TagSerializer(serializers.ModelSerializer):
"""Serializer for tag objects"""
class Meta:
model = Tag
fields = ('id', 'name')
read_only_fields = ('id',)
class IngredientSerializer(serializers.ModelSerializer):
"""Serializer for ingredient objects"""
class Meta:
model = Ingredient
fields = ('id', 'name')
read_only_fields = ('id',)
class RecipeSerializer(serializers.ModelSerializer):
"""Serializer for recipe objects"""
ingredients = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Ingredient.objects.all()
)
tags = serializers.PrimaryKeyRelatedField(
many=True,
queryset=Tag.objects.all()
)
class Meta:
model = Recipe
fields = ('id', 'title', 'ingredients', 'tags', 'time_minutes', 'price', 'link')
read_only_fields = ('id',)
class RecipeDetailSerializer(RecipeSerializer):
"""Serializer for recipe detail objects"""
ingredients = IngredientSerializer(many=True, read_only=True)
tags = TagSerializer(many=True, read_only=True)
class RecipeImageSerializer(serializers.ModelSerializer):
"""Serializer for uploading images to recipes"""
class Meta:
model = Recipe
fields = ('id', 'image')
read_only_fields = ('id',)
| 26.735849 | 88 | 0.659139 |
4a1eeb4e9f6088b73220341a8c3fe011772c93c8 | 20,927 | py | Python | pytorch_lightning/core/hooks.py | thepooons/pytorch-lightning | a053d758d03558d2aa5a328b2f6befbc133a0ebc | [
"Apache-2.0"
] | 1 | 2021-01-24T07:30:14.000Z | 2021-01-24T07:30:14.000Z | pytorch_lightning/core/hooks.py | thepooons/pytorch-lightning | a053d758d03558d2aa5a328b2f6befbc133a0ebc | [
"Apache-2.0"
] | null | null | null | pytorch_lightning/core/hooks.py | thepooons/pytorch-lightning | a053d758d03558d2aa5a328b2f6befbc133a0ebc | [
"Apache-2.0"
] | null | null | null | # Copyright The PyTorch Lightning team.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Various hooks to be used in the Lightning code."""
from typing import Any, Dict, List, Optional, Union
import torch
from torch.optim.optimizer import Optimizer
from torch.utils.data import DataLoader
from pytorch_lightning.utilities import move_data_to_device, rank_zero_warn
class ModelHooks:
"""Hooks to be used in LightningModule."""
def setup(self, stage: str):
"""
Called at the beginning of fit and test.
This is a good hook when you need to build models dynamically or adjust something about them.
This hook is called on every process when using DDP.
Args:
stage: either 'fit' or 'test'
Example::
class LitModel(...):
def __init__(self):
self.l1 = None
def prepare_data(self):
download_data()
tokenize()
# don't do this
self.something = else
def setup(stage):
data = Load_data(...)
self.l1 = nn.Linear(28, data.num_classes)
"""
def teardown(self, stage: str):
"""
Called at the end of fit and test.
Args:
stage: either 'fit' or 'test'
"""
def on_fit_start(self):
"""
Called at the very beginning of fit.
If on DDP it is called on every process
"""
def on_fit_end(self):
"""
Called at the very end of fit.
If on DDP it is called on every process
"""
def on_train_start(self) -> None:
"""
Called at the beginning of training before sanity check.
"""
# do something at the start of training
def on_train_end(self) -> None:
"""
Called at the end of training before logger experiment is closed.
"""
# do something at the end of training
def on_pretrain_routine_start(self) -> None:
"""
Called at the beginning of the pretrain routine (between fit and train start).
- fit
- pretrain_routine start
- pretrain_routine end
- training_start
"""
# do something at the start of the pretrain routine
def on_pretrain_routine_end(self) -> None:
"""
Called at the end of the pretrain routine (between fit and train start).
- fit
- pretrain_routine start
- pretrain_routine end
- training_start
"""
# do something at the end of the pretrain routine
def on_train_batch_start(
self, batch: Any, batch_idx: int, dataloader_idx: int
) -> None:
"""
Called in the training loop before anything happens for that batch.
If you return -1 here, you will skip training for the rest of the current epoch.
Args:
batch: The batched data as it is returned by the training DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch starts
def on_train_batch_end(self, outputs: Any, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
"""
Called in the training loop after the batch.
Args:
outputs: The outputs of training_step_end(training_step(x))
batch: The batched data as it is returned by the training DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch ends
def on_validation_model_eval(self) -> None:
"""
Sets the model to eval during the val loop
"""
self.eval()
def on_validation_model_train(self) -> None:
"""
Sets the model to train during the val loop
"""
self.train()
def on_validation_batch_start(self, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
"""
Called in the validation loop before anything happens for that batch.
Args:
batch: The batched data as it is returned by the validation DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch starts
def on_validation_batch_end(self, outputs: Any, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
"""
Called in the validation loop after the batch.
Args:
outputs: The outputs of validation_step_end(validation_step(x))
batch: The batched data as it is returned by the validation DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch ends
def on_test_batch_start(self, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
"""
Called in the test loop before anything happens for that batch.
Args:
batch: The batched data as it is returned by the test DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch starts
def on_test_batch_end(self, outputs: Any, batch: Any, batch_idx: int, dataloader_idx: int) -> None:
"""
Called in the test loop after the batch.
Args:
outputs: The outputs of test_step_end(test_step(x))
batch: The batched data as it is returned by the test DataLoader.
batch_idx: the index of the batch
dataloader_idx: the index of the dataloader
"""
# do something when the batch ends
def on_test_model_eval(self) -> None:
"""
Sets the model to eval during the test loop
"""
self.eval()
def on_test_model_train(self) -> None:
"""
Sets the model to train during the test loop
"""
self.train()
def on_epoch_start(self) -> None:
"""
Called in the training loop at the very beginning of the epoch.
"""
# do something when the epoch starts
def on_epoch_end(self) -> None:
"""
Called in the training loop at the very end of the epoch.
"""
# do something when the epoch ends
def on_train_epoch_start(self) -> None:
"""
Called in the training loop at the very beginning of the epoch.
"""
# do something when the epoch starts
def on_train_epoch_end(self, outputs) -> None:
"""
Called in the training loop at the very end of the epoch.
"""
# do something when the epoch ends
def on_validation_epoch_start(self) -> None:
"""
Called in the validation loop at the very beginning of the epoch.
"""
# do something when the epoch starts
def on_validation_epoch_end(self) -> None:
"""
Called in the validation loop at the very end of the epoch.
"""
# do something when the epoch ends
def on_test_epoch_start(self) -> None:
"""
Called in the test loop at the very beginning of the epoch.
"""
# do something when the epoch starts
def on_test_epoch_end(self) -> None:
"""
Called in the test loop at the very end of the epoch.
"""
# do something when the epoch ends
def on_before_zero_grad(self, optimizer: Optimizer) -> None:
"""
Called after optimizer.step() and before optimizer.zero_grad().
Called in the training loop after taking an optimizer step and before zeroing grads.
Good place to inspect weight information with weights updated.
This is where it is called::
for optimizer in optimizers:
optimizer.step()
model.on_before_zero_grad(optimizer) # < ---- called here
optimizer.zero_grad()
Args:
optimizer: The optimizer for which grads should be zeroed.
"""
# do something with the optimizer or inspect it.
def on_after_backward(self) -> None:
"""
Called in the training loop after loss.backward() and before optimizers do anything.
This is the ideal place to inspect or log gradient information.
Example::
def on_after_backward(self):
# example to inspect gradient information in tensorboard
if self.trainer.global_step % 25 == 0: # don't make the tf file huge
params = self.state_dict()
for k, v in params.items():
grads = v
name = k
self.logger.experiment.add_histogram(tag=name, values=grads,
global_step=self.trainer.global_step)
"""
class DataHooks:
"""Hooks to be used with LightningDataModule."""
def prepare_data(self) -> None:
"""
Use this to download and prepare data.
.. warning:: DO NOT set state to the model (use `setup` instead)
since this is NOT called on every GPU in DDP/TPU
Example::
def prepare_data(self):
# good
download_data()
tokenize()
etc()
# bad
self.split = data_split
self.some_state = some_other_state()
In DDP prepare_data can be called in two ways (using Trainer(prepare_data_per_node)):
1. Once per node. This is the default and is only called on LOCAL_RANK=0.
2. Once in total. Only called on GLOBAL_RANK=0.
Example::
# DEFAULT
# called once per node on LOCAL_RANK=0 of that node
Trainer(prepare_data_per_node=True)
# call on GLOBAL_RANK=0 (great for shared file systems)
Trainer(prepare_data_per_node=False)
This is called before requesting the dataloaders:
.. code-block:: python
model.prepare_data()
if ddp/tpu: init()
model.setup(stage)
model.train_dataloader()
model.val_dataloader()
model.test_dataloader()
"""
def train_dataloader(self) -> DataLoader:
"""
Implement a PyTorch DataLoader for training.
Return:
Single PyTorch :class:`~torch.utils.data.DataLoader`.
The dataloader you return will not be called every epoch unless you set
:paramref:`~pytorch_lightning.trainer.Trainer.reload_dataloaders_every_epoch` to ``True``.
For data processing use the following pattern:
- download in :meth:`prepare_data`
- process and split in :meth:`setup`
However, the above are only necessary for distributed processing.
.. warning:: do not assign state in prepare_data
- :meth:`~pytorch_lightning.trainer.Trainer.fit`
- ...
- :meth:`prepare_data`
- :meth:`setup`
- :meth:`train_dataloader`
Note:
Lightning adds the correct sampler for distributed and arbitrary hardware.
There is no need to set it yourself.
Example:
.. code-block:: python
def train_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=True, transform=transform,
download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.batch_size,
shuffle=True
)
return loader
"""
rank_zero_warn(
"`train_dataloader` must be implemented to be used with the Lightning Trainer"
)
def test_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
r"""
Implement one or multiple PyTorch DataLoaders for testing.
The dataloader you return will not be called every epoch unless you set
:paramref:`~pytorch_lightning.trainer.Trainer.reload_dataloaders_every_epoch` to ``True``.
For data processing use the following pattern:
- download in :meth:`prepare_data`
- process and split in :meth:`setup`
However, the above are only necessary for distributed processing.
.. warning:: do not assign state in prepare_data
- :meth:`~pytorch_lightning.trainer.Trainer.fit`
- ...
- :meth:`prepare_data`
- :meth:`setup`
- :meth:`train_dataloader`
- :meth:`val_dataloader`
- :meth:`test_dataloader`
Note:
Lightning adds the correct sampler for distributed and arbitrary hardware.
There is no need to set it yourself.
Return:
Single or multiple PyTorch DataLoaders.
Example:
.. code-block:: python
def test_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=False, transform=transform,
download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.batch_size,
shuffle=False
)
return loader
# can also return multiple dataloaders
def test_dataloader(self):
return [loader_a, loader_b, ..., loader_n]
Note:
If you don't need a test dataset and a :meth:`test_step`, you don't need to implement
this method.
Note:
In the case where you return multiple test dataloaders, the :meth:`test_step`
will have an argument ``dataloader_idx`` which matches the order here.
"""
def val_dataloader(self) -> Union[DataLoader, List[DataLoader]]:
r"""
Implement one or multiple PyTorch DataLoaders for validation.
The dataloader you return will not be called every epoch unless you set
:paramref:`~pytorch_lightning.trainer.Trainer.reload_dataloaders_every_epoch` to ``True``.
It's recommended that all data downloads and preparation happen in :meth:`prepare_data`.
- :meth:`~pytorch_lightning.trainer.Trainer.fit`
- ...
- :meth:`prepare_data`
- :meth:`train_dataloader`
- :meth:`val_dataloader`
- :meth:`test_dataloader`
Note:
Lightning adds the correct sampler for distributed and arbitrary hardware
There is no need to set it yourself.
Return:
Single or multiple PyTorch DataLoaders.
Examples:
.. code-block:: python
def val_dataloader(self):
transform = transforms.Compose([transforms.ToTensor(),
transforms.Normalize((0.5,), (1.0,))])
dataset = MNIST(root='/path/to/mnist/', train=False,
transform=transform, download=True)
loader = torch.utils.data.DataLoader(
dataset=dataset,
batch_size=self.batch_size,
shuffle=False
)
return loader
# can also return multiple dataloaders
def val_dataloader(self):
return [loader_a, loader_b, ..., loader_n]
Note:
If you don't need a validation dataset and a :meth:`validation_step`, you don't need to
implement this method.
Note:
In the case where you return multiple validation dataloaders, the :meth:`validation_step`
will have an argument ``dataloader_idx`` which matches the order here.
"""
def transfer_batch_to_device(self, batch: Any, device: Optional[torch.device] = None) -> Any:
"""
Override this hook if your :class:`~torch.utils.data.DataLoader` returns tensors
wrapped in a custom data structure.
The data types listed below (and any arbitrary nesting of them) are supported out of the box:
- :class:`torch.Tensor` or anything that implements `.to(...)`
- :class:`list`
- :class:`dict`
- :class:`tuple`
- :class:`torchtext.data.batch.Batch`
For anything else, you need to define how the data is moved to the target device (CPU, GPU, TPU, ...).
Example::
def transfer_batch_to_device(self, batch, device)
if isinstance(batch, CustomBatch):
# move all tensors in your custom data structure to the device
batch.samples = batch.samples.to(device)
batch.targets = batch.targets.to(device)
else:
batch = super().transfer_batch_to_device(data, device)
return batch
Args:
batch: A batch of data that needs to be transferred to a new device.
device: The target device as defined in PyTorch.
Returns:
A reference to the data on the new device.
Note:
This hook should only transfer the data and not modify it, nor should it move the data to
any other device than the one passed in as argument (unless you know what you are doing).
Note:
This hook only runs on single GPU training and DDP.
If you need multi-GPU support for your custom batch objects in ``dp`` or ``ddp2``,
you need to define your custom :class:`~torch.nn.parallel.DistributedDataParallel` or
:class:`~pytorch_lightning.overrides.data_parallel.LightningDistributedDataParallel` and
override :meth:`~pytorch_lightning.core.lightning.LightningModule.configure_ddp`.
See Also:
- :func:`~pytorch_lightning.utilities.apply_func.move_data_to_device`
- :func:`~pytorch_lightning.utilities.apply_func.apply_to_collection`
"""
device = device or self.device
return move_data_to_device(batch, device)
class CheckpointHooks:
"""Hooks to be used with Checkpointing."""
def on_load_checkpoint(self, checkpoint: Dict[str, Any]) -> None:
r"""
Called by Lightning to restore your model.
If you saved something with :meth:`on_save_checkpoint` this is your chance to restore this.
Args:
checkpoint: Loaded checkpoint
Example:
.. code-block:: python
def on_load_checkpoint(self, checkpoint):
# 99% of the time you don't need to implement this method
self.something_cool_i_want_to_save = checkpoint['something_cool_i_want_to_save']
Note:
Lightning auto-restores global step, epoch, and train state including amp scaling.
There is no need for you to restore anything regarding training.
"""
def on_save_checkpoint(self, checkpoint: Dict[str, Any]) -> None:
r"""
Called by Lightning when saving a checkpoint to give you a chance to store anything
else you might want to save.
Args:
checkpoint: Checkpoint to be saved
Example:
.. code-block:: python
def on_save_checkpoint(self, checkpoint):
# 99% of use cases you don't need to implement this method
checkpoint['something_cool_i_want_to_save'] = my_cool_pickable_object
Note:
Lightning saves all aspects of training (epoch, global step, etc...)
including amp scaling.
There is no need for you to store anything about training.
"""
| 34.8203 | 110 | 0.586228 |
4a1eeb5bfbeb7a04f8e22f871c34cff0a7fc00cf | 1,524 | py | Python | var/spack/repos/builtin/packages/hto4l/package.py | Loewe2/spack | 58192fb5b41a2ea3211d00de717d026738ede4eb | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | var/spack/repos/builtin/packages/hto4l/package.py | Loewe2/spack | 58192fb5b41a2ea3211d00de717d026738ede4eb | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | 5 | 2021-11-29T17:23:20.000Z | 2022-03-02T17:29:12.000Z | var/spack/repos/builtin/packages/hto4l/package.py | Loewe2/spack | 58192fb5b41a2ea3211d00de717d026738ede4eb | [
"ECL-2.0",
"Apache-2.0",
"MIT-0",
"MIT"
] | null | null | null | # Copyright 2013-2022 Lawrence Livermore National Security, LLC and other
# Spack Project Developers. See the top-level COPYRIGHT file for details.
#
# SPDX-License-Identifier: (Apache-2.0 OR MIT)
from spack import *
class Hto4l(MakefilePackage):
"""Hto4l is an event generator for the SM Higgs decay into 4 charged leptons
up to NLOPS electroweak accuracy and in presence of dimension-6 operators."""
homepage = "https://www2.pv.infn.it/~hepcomplex/hto4l.html"
url = "http://www2.pv.infn.it/~hepcomplex/releases/hto4l/Hto4l-v2.02.tar.bz2"
maintainers = ['haralmha']
version('2.02', sha256='1a7061689ddaf6bde1f12032479c529a9787d7b038ed55a0325398bd531aadf6')
depends_on('gsl')
@when('@2.02')
def patch(self):
filter_file(r'FFLAGS = -O1 -g -ffixed-line-length-none ' +
r'-fno-range-check \$\(DEF\)QUAD=0 \$\(DEF\)U77EXT=0',
'FFLAGS = -O1 -g -ffixed-line-length-none -std=legacy ' +
'-fno-range-check $(DEF)QUAD=0 $(DEF)U77EXT=0',
'LoopTools-2.10/makefile-lxplus')
filter_file(r'-mkdir \$\(PREFIX\)', '-mkdir -p $(PREFIX)',
'LoopTools-2.10/makefile-lxplus')
filter_file(r'-mkdir \$\(LIBDIR\) \$\(BINDIR\) \$\(INCLUDEDIR\)',
'-mkdir -p $(LIBDIR) $(BINDIR) $(INCLUDEDIR)',
'LoopTools-2.10/makefile-lxplus')
def install(self, spec, prefix):
mkdir(prefix.bin)
install('Hto4l', prefix.bin)
| 40.105263 | 94 | 0.60958 |
4a1eebda665b2e5e3c3ed71d441dc44004b650e2 | 2,309 | py | Python | timesketch/lib/graphs/win_services.py | deralexxx/timesketch | 61b83da377b589438a00ab4ca40aaf048e6907c7 | [
"Apache-2.0"
] | null | null | null | timesketch/lib/graphs/win_services.py | deralexxx/timesketch | 61b83da377b589438a00ab4ca40aaf048e6907c7 | [
"Apache-2.0"
] | null | null | null | timesketch/lib/graphs/win_services.py | deralexxx/timesketch | 61b83da377b589438a00ab4ca40aaf048e6907c7 | [
"Apache-2.0"
] | null | null | null | # Copyright 2020 Google Inc. All rights reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
"""Graph plugin for Windows services."""
from timesketch.lib.graphs.interface import BaseGraphPlugin
from timesketch.lib.graphs import manager
class WinServiceGraph(BaseGraphPlugin):
"""Graph plugin for Windows services."""
NAME = "WinService"
DISPLAY_NAME = "Windows services"
def generate(self):
"""Generate the graph.
Returns:
Graph object instance.
"""
query = "event_identifier:7045"
return_fields = ["computer_name", "username", "strings"]
events = self.event_stream(query_string=query, return_fields=return_fields)
for event in events:
computer_name = event["_source"].get("computer_name", "UNKNOWN")
username = event["_source"].get("username", "UNKNOWN")
event_strings = event["_source"].get("strings", [])
# Skip event if we don't have enough data to build the graph.
try:
service_name = event_strings[0]
image_path = event_strings[1]
service_type = event_strings[2]
start_type = event_strings[3]
except IndexError:
continue
computer = self.graph.add_node(computer_name, {"type": "computer"})
user = self.graph.add_node(username, {"type": "user"})
service = self.graph.add_node(
service_name, {"type": "winservice", "image_path": image_path}
)
self.graph.add_edge(user, service, start_type, event)
self.graph.add_edge(service, computer, service_type, event)
self.graph.commit()
return self.graph
manager.GraphManager.register_graph(WinServiceGraph)
| 34.984848 | 83 | 0.650931 |
4a1eecefb4c9f4b547a623c3113e15dc442c4532 | 186 | py | Python | Dataset/Leetcode/valid/14/242.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/14/242.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | Dataset/Leetcode/valid/14/242.py | kkcookies99/UAST | fff81885aa07901786141a71e5600a08d7cb4868 | [
"MIT"
] | null | null | null | class Solution(object):
def XXX(self, strs):
"""
:type strs: List[str]
:rtype: str
"""
import os
return os.path.commonprefix(strs)
| 16.909091 | 41 | 0.494624 |
4a1eee580df8e84b7d3020555015019db6640260 | 119 | py | Python | tests/__init__.py | ZAurele/alpha-py | b6330f1e714d07a2010ebe500d5ccdf4cc637998 | [
"MIT"
] | null | null | null | tests/__init__.py | ZAurele/alpha-py | b6330f1e714d07a2010ebe500d5ccdf4cc637998 | [
"MIT"
] | null | null | null | tests/__init__.py | ZAurele/alpha-py | b6330f1e714d07a2010ebe500d5ccdf4cc637998 | [
"MIT"
] | null | null | null | from .api import *
from .basic_test import *
from .configurations import *
from .database import *
from .utils import * | 23.8 | 29 | 0.756303 |
4a1eeea34661692ea4556eebb7cbec94fb5d925a | 14,607 | py | Python | retirement/tests/tests_viewset_WaitQueue.py | Jerome-Celle/Blitz-API | 7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0 | [
"MIT"
] | null | null | null | retirement/tests/tests_viewset_WaitQueue.py | Jerome-Celle/Blitz-API | 7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0 | [
"MIT"
] | null | null | null | retirement/tests/tests_viewset_WaitQueue.py | Jerome-Celle/Blitz-API | 7dfb7b837ed47b11afcfaa5f5aee831c1aa4e5e0 | [
"MIT"
] | null | null | null | import json
from datetime import datetime, timedelta
import pytz
from django.conf import settings
from django.contrib.auth import get_user_model
from django.urls import reverse
from django.utils import timezone
from rest_framework import status
from rest_framework.test import APIClient, APITestCase
from blitz_api.factories import AdminFactory, UserFactory
from ..models import Retirement, WaitQueue
User = get_user_model()
LOCAL_TIMEZONE = pytz.timezone(settings.TIME_ZONE)
class WaitQueueTests(APITestCase):
@classmethod
def setUpClass(cls):
super(WaitQueueTests, cls).setUpClass()
cls.client = APIClient()
cls.user = UserFactory()
cls.user2 = UserFactory()
cls.admin = AdminFactory()
def setUp(self):
self.retirement = Retirement.objects.create(
name="mega_retirement",
details="This is a description of the mega retirement.",
seats=400,
address_line1="123 random street",
postal_code="123 456",
state_province="Random state",
country="Random country",
price=199,
start_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 15, 8)),
end_time=LOCAL_TIMEZONE.localize(datetime(2130, 1, 17, 12)),
min_day_refund=7,
min_day_exchange=7,
refund_rate=50,
is_active=True,
activity_language='FR',
next_user_notified=3,
accessibility=True,
form_url="example.com",
carpool_url='example2.com',
review_url='example3.com',
has_shared_rooms=True,
)
self.wait_queue_subscription = WaitQueue.objects.create(
user=self.user2,
retirement=self.retirement,
)
def test_create(self):
"""
Ensure we can subscribe a user to a retirement wait_queue.
"""
self.client.force_authenticate(user=self.user)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
# The 'user' field is ignored when the calling user is not admin.
# The field is REQUIRED nonetheless.
'user': reverse('user-detail', args=[self.admin.id]),
}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED,
response.content,
)
content = {
'list_size': 2,
'retirement': 'http://testserver/retirement/retirements/' +
str(self.retirement.id),
'user': ''.join(['http://testserver/users/', str(self.user.id)]),
'created_at': json.loads(response.content)['created_at'],
}
response_data = json.loads(response.content)
del response_data['id']
del response_data['url']
self.assertEqual(
response_data,
content
)
def test_create_as_admin_for_user(self):
"""
Ensure we can subscribe another user to a retirement wait_queue as
an admin user.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user.id]),
}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_201_CREATED,
response.content,
)
content = {
'list_size': 2,
'retirement': 'http://testserver/retirement/retirements/' +
str(self.retirement.id),
'user': ''.join(['http://testserver/users/', str(self.user.id)]),
}
response_data = json.loads(response.content)
del response_data['id']
del response_data['url']
del response_data['created_at']
self.assertEqual(
response_data,
content
)
def test_create_not_authenticated(self):
"""
Ensure we can't subscribe to a retirement waitqueue if user has no
permission.
"""
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user.id]),
}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
content = {
'detail': 'Authentication credentials were not provided.'
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_create_duplicate(self):
"""
Ensure we can't subscribe to a retirement waitqueue twice.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user2.id]),
}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
content = {
"non_field_errors": [
"The fields user, retirement must make a unique set."
]
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_missing_field(self):
"""
Ensure we can't subscribe to a retirement waitqueue when required field
are missing.
"""
self.client.force_authenticate(user=self.admin)
data = {}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
content = {
"retirement": ["This field is required."],
"user": ["This field is required."]
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_create_invalid_field(self):
"""
Ensure we can't subscribe to a retirement waitqueue with invalid
fields.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': (1,),
'user': "http://testserver/invalid/999"
}
response = self.client.post(
reverse('retirement:waitqueue-list'),
data,
format='json',
)
content = {
'retirement': [
'Incorrect type. Expected URL string, received list.'
],
'user': ['Invalid hyperlink - No URL match.']
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_400_BAD_REQUEST)
def test_update(self):
"""
Ensure we can't update a subscription to a retirement waitqueue.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user2.id]),
}
response = self.client.put(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': 1},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
def test_partial_update(self):
"""
Ensure we can't partially a subscription to a retirement waitqueue.
"""
self.client.force_authenticate(user=self.admin)
data = {
'retirement': reverse(
'retirement:retirement-detail', args=[self.retirement.id]
),
'user': reverse('user-detail', args=[self.user2.id]),
}
response = self.client.put(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': 1},
),
data,
format='json',
)
self.assertEqual(
response.status_code,
status.HTTP_405_METHOD_NOT_ALLOWED
)
def test_delete(self):
"""
Ensure we can delete a subscription to a retirement waitqueue.
The index determining the next user to be notified should be corrected.
"""
self.client.force_authenticate(user=self.admin)
response = self.client.delete(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': self.wait_queue_subscription.id},
),
)
self.assertEqual(
response.status_code,
status.HTTP_204_NO_CONTENT,
response.content
)
self.retirement.refresh_from_db()
self.assertEqual(self.retirement.next_user_notified, 2)
def test_list(self):
"""
Ensure we can list subscriptions to retirement waitqueues as an
authenticated user.
"""
self.client.force_authenticate(user=self.user2)
response = self.client.get(
reverse('retirement:waitqueue-list'),
format='json',
)
response_data = json.loads(response.content)
content = {
'count': 1,
'next': None,
'previous': None,
'results': [{
'created_at': response_data['results'][0]['created_at'],
'id': self.wait_queue_subscription.id,
'list_size': 1,
'retirement':
'http://testserver/retirement/retirements/' +
str(self.retirement.id),
'url':
'http://testserver/retirement/wait_queues/' +
str(self.wait_queue_subscription.id),
'user': 'http://testserver/users/' + str(self.user2.id)
}]
}
self.assertEqual(response_data, content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_list_not_authenticated(self):
"""
Ensure we can't list subscriptions to retirement waitqueues as an
unauthenticated user.
"""
response = self.client.get(
reverse('retirement:waitqueue-list'),
format='json',
)
content = {'detail': 'Authentication credentials were not provided.'}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_read(self):
"""
Ensure we can read read a subscription to a retirement as an
authenticated user.
"""
self.client.force_authenticate(user=self.user2)
response = self.client.get(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': self.wait_queue_subscription.id},
),
)
content = {
'id': self.wait_queue_subscription.id,
'list_size': 1,
'retirement':
'http://testserver/retirement/retirements/' +
str(self.retirement.id),
'url':
'http://testserver/retirement/wait_queues/' +
str(self.wait_queue_subscription.id),
'user': ''.join(['http://testserver/users/', str(self.user2.id)]),
'created_at': json.loads(response.content)['created_at'],
}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_not_authenticated(self):
"""
Ensure we can't read a subscription to a retirement waitqueues as an
unauthenticated user.
"""
response = self.client.get(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': 1},
),
format='json',
)
content = {'detail': 'Authentication credentials were not provided.'}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_401_UNAUTHORIZED)
def test_read_as_admin(self):
"""
Ensure we can read read a subscription to a retirement as an admin
user.
"""
self.client.force_authenticate(user=self.admin)
response = self.client.get(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': self.wait_queue_subscription.id},
),
)
response_data = json.loads(response.content)
content = {
'id': self.wait_queue_subscription.id,
'list_size': 1,
'retirement':
'http://testserver/retirement/retirements/' +
str(self.retirement.id),
'url':
'http://testserver/retirement/wait_queues/' +
str(self.wait_queue_subscription.id),
'user': ''.join(['http://testserver/users/', str(self.user2.id)]),
'created_at': json.loads(response.content)['created_at'],
}
self.assertEqual(response_data, content)
self.assertEqual(response.status_code, status.HTTP_200_OK)
def test_read_non_existent(self):
"""
Ensure we get not found when asking for a subscription to a retirement
that doesn't exist.
"""
self.client.force_authenticate(user=self.admin)
response = self.client.get(
reverse(
'retirement:waitqueue-detail',
kwargs={'pk': 999},
),
)
content = {'detail': 'Not found.'}
self.assertEqual(json.loads(response.content), content)
self.assertEqual(response.status_code, status.HTTP_404_NOT_FOUND)
| 29.871166 | 79 | 0.556377 |
4a1eeefbd32ca67fc56a7e72e23970f047feba9e | 3,391 | py | Python | corehq/apps/case_importer/tracking/case_upload_tracker.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 1 | 2020-07-14T13:00:23.000Z | 2020-07-14T13:00:23.000Z | corehq/apps/case_importer/tracking/case_upload_tracker.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | 94 | 2020-12-11T06:57:31.000Z | 2022-03-15T10:24:06.000Z | corehq/apps/case_importer/tracking/case_upload_tracker.py | dimagilg/commcare-hq | ea1786238eae556bb7f1cbd8d2460171af1b619c | [
"BSD-3-Clause"
] | null | null | null | import time
from django.db import transaction
from memoized import memoized
from corehq.apps.case_importer.exceptions import ImporterRefError
from corehq.apps.case_importer.tracking.exceptions import TimedOutWaitingForCaseUploadRecord
from corehq.apps.case_importer.tracking.filestorage import (
persistent_file_store,
transient_file_store,
)
from corehq.apps.case_importer.tracking.models import (
CaseUploadFormRecord,
CaseUploadRecord,
)
from corehq.apps.case_importer.util import (
get_spreadsheet,
open_spreadsheet_download_ref,
)
class CaseUpload(object):
def __init__(self, upload_id):
self.upload_id = upload_id
@classmethod
def create(cls, file_object, filename, domain):
meta = transient_file_store.write_file(file_object, filename, domain)
return cls(meta.identifier)
@classmethod
def get(cls, upload_id):
return cls(upload_id)
@property
@memoized
def _case_upload_record(self):
return CaseUploadRecord.objects.get(upload_id=self.upload_id)
def wait_for_case_upload_record(self):
for wait_seconds in [1, 2, 5, 10, 20]:
try:
self._case_upload_record
return
except CaseUploadRecord.DoesNotExist:
time.sleep(wait_seconds)
raise TimedOutWaitingForCaseUploadRecord()
def get_tempfile(self):
return transient_file_store.get_tempfile_ref_for_contents(self.upload_id)
def check_file(self):
"""
open a spreadsheet download ref just to test there are no errors opening it
:raise ImporterError subtypes
"""
tempfile = self.get_tempfile()
if not tempfile:
raise ImporterRefError('file not found in cache')
open_spreadsheet_download_ref(tempfile)
def get_spreadsheet(self):
return get_spreadsheet(self.get_tempfile())
def trigger_upload(self, domain, config, comment=None):
"""
Save a CaseUploadRecord and trigger a task that runs the upload
The task triggered by this must call case_upload.wait_for_case_upload_record() before using it
to avoid a race condition.
"""
from corehq.apps.case_importer.tasks import bulk_import_async
original_filename = transient_file_store.get_filename(self.upload_id)
with open(self.get_tempfile(), 'rb') as f:
case_upload_file_meta = persistent_file_store.write_file(f, original_filename, domain)
task = bulk_import_async.delay(config, domain, self.upload_id)
CaseUploadRecord(
domain=domain,
comment=comment,
upload_id=self.upload_id,
task_id=task.task_id,
couch_user_id=config.couch_user_id,
case_type=config.case_type,
upload_file_meta=case_upload_file_meta,
).save()
def store_task_result(self, task_status):
self._case_upload_record.save_task_status_json(task_status)
def store_failed_task_result(self):
self._case_upload_record.save_task_status_json_if_failed()
def record_form(self, form_id):
case_upload_record = self._case_upload_record
with transaction.atomic():
form_record = CaseUploadFormRecord(
case_upload_record=case_upload_record, form_id=form_id)
form_record.save()
| 32.92233 | 102 | 0.697139 |
4a1eef2a8312b44cf06346fe3a9ef84088e33669 | 1,802 | py | Python | Chapter07/visualize_faces.py | lebull/Neural-Network-Projects-with-Python | 8242bb18f8ae272491b58947938234ac9b80bb36 | [
"MIT"
] | 269 | 2018-09-11T17:44:59.000Z | 2022-03-26T12:12:33.000Z | Chapter07/visualize_faces.py | blueye4k/Neural-Network-Projects-with-Python | 4f521c399389db412d900a1337f9031b4874295b | [
"MIT"
] | 12 | 2019-07-12T20:24:56.000Z | 2021-06-28T20:39:10.000Z | Chapter07/visualize_faces.py | blueye4k/Neural-Network-Projects-with-Python | 4f521c399389db412d900a1337f9031b4874295b | [
"MIT"
] | 156 | 2018-09-16T05:09:50.000Z | 2022-03-26T07:35:22.000Z | import matplotlib
matplotlib.use("TkAgg")
from keras.preprocessing.image import load_img, img_to_array
import os
import numpy as np
from matplotlib import pyplot as plt
faces_dir = 'att_faces/'
X_train, Y_train = [], []
X_test, Y_test = [], []
subfolders = sorted([file.path for file in os.scandir(faces_dir) if file.is_dir()])
for idx, folder in enumerate(subfolders):
for file in sorted(os.listdir(folder)):
img = load_img(folder+"/"+file, color_mode='grayscale')
img = img_to_array(img).astype('float32')/255
img = img.reshape(img.shape[0], img.shape[1],1)
if idx < 35:
X_train.append(img)
Y_train.append(idx)
else:
X_test.append(img)
Y_test.append(idx-35)
X_train = np.array(X_train)
X_test = np.array(X_test)
Y_train = np.array(Y_train)
Y_test = np.array(Y_test)
subject_idx = 4
fig, ((ax1,ax2,ax3),(ax4,ax5,ax6),(ax7,ax8,ax9)) = plt.subplots(3,3,figsize=(10,10))
subject_img_idx = np.where(Y_train==subject_idx)[0].tolist()
for i, ax in enumerate([ax1,ax2,ax3,ax4,ax5,ax6,ax7,ax8,ax9]):
img = X_train[subject_img_idx[i]]
img = img.reshape(img.shape[0], img.shape[1])
ax.imshow(img, cmap='gray')
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
plt.tight_layout()
plt.show()
subjects = range(10)
fig, ((ax1,ax2,ax3),(ax4,ax5,ax6),(ax7,ax8,ax9)) = plt.subplots(3,3,
figsize=(10,12))
subject_img_idx = [np.where(Y_train==i)[0].tolist()[0] for i in subjects]
for i, ax in enumerate([ax1,ax2,ax3,ax4,ax5,ax6,ax7,ax8,ax9]):
img = X_train[subject_img_idx[i]]
img = img.reshape(img.shape[0], img.shape[1])
ax.imshow(img, cmap='gray')
ax.grid(False)
ax.set_xticks([])
ax.set_yticks([])
ax.set_title("Subject {}".format(i))
plt.tight_layout()
plt.show()
| 31.068966 | 84 | 0.657603 |
4a1eef84a6e647688df346244cd155ea73a4008b | 714 | py | Python | check_anagram.py | Praggya17/HacktoberFestContribute | 098cb1012f1f2ed6ca6b3544a7b962b6c49e2643 | [
"MIT"
] | 98 | 2018-10-09T15:42:41.000Z | 2021-10-04T15:25:44.000Z | check_anagram.py | Praggya17/HacktoberFestContribute | 098cb1012f1f2ed6ca6b3544a7b962b6c49e2643 | [
"MIT"
] | 141 | 2018-10-06T16:55:20.000Z | 2021-10-31T18:25:35.000Z | check_anagram.py | Praggya17/HacktoberFestContribute | 098cb1012f1f2ed6ca6b3544a7b962b6c49e2643 | [
"MIT"
] | 885 | 2018-10-06T17:14:44.000Z | 2022-01-29T03:16:21.000Z |
__author__ = "innovative_coder"
from collections import Counter
def check_if_anagram(word1, word2):
counter = Counter()
for c in word1:
counter[c] += 1
for c in word2:
counter[c] -= 1
for values in counter.values():
if values != 0:
return False
return True
if __name__ == '__main__':
word1 = 'abc'
word2 = 'bca'
assert(check_if_anagram(word1, word2) == True)
word2 = 'bcd'
assert(check_if_anagram(word1, word2) == False)
word1 = ''
word2 = ''
assert(check_if_anagram(word1, word2) == True)
word1 = 'a'
word2 = 'a'
assert(check_if_anagram(word1, word2) == True)
| 18.789474 | 51 | 0.560224 |
4a1ef0d13a2f8719e5fbeb0c9728b2d3dfa366e5 | 3,085 | py | Python | Chapter01/Exercise1.01/bookr/bookr/settings.py | PacktPublishing/Web-Development-Projects-with-Django | 531bc4d58d614888cc81b7fd6f8ec859f5a65217 | [
"MIT"
] | 97 | 2021-03-01T12:54:30.000Z | 2022-03-28T02:57:26.000Z | Chapter01/Exercise1.01/bookr/bookr/settings.py | PacktPublishing/Web-Development-Projects-with-Django | 531bc4d58d614888cc81b7fd6f8ec859f5a65217 | [
"MIT"
] | 81 | 2020-08-27T04:56:04.000Z | 2022-03-12T00:53:40.000Z | Chapter01/Exercise1.01/bookr/bookr/settings.py | PacktPublishing/Web-Development-Projects-with-Django | 531bc4d58d614888cc81b7fd6f8ec859f5a65217 | [
"MIT"
] | 163 | 2020-12-25T14:38:38.000Z | 2022-03-30T10:31:40.000Z | """
Django settings for bookr project.
Generated by 'django-admin startproject' using Django 3.0b1.
For more information on this file, see
https://docs.djangoproject.com/en/dev/topics/settings/
For the full list of settings and their values, see
https://docs.djangoproject.com/en/dev/ref/settings/
"""
import os
# Build paths inside the project like this: os.path.join(BASE_DIR, ...)
BASE_DIR = os.path.dirname(os.path.dirname(os.path.abspath(__file__)))
# Quick-start development settings - unsuitable for production
# See https://docs.djangoproject.com/en/dev/howto/deployment/checklist/
# SECURITY WARNING: keep the secret key used in production secret!
SECRET_KEY = 'ik8#s8=-xx#a1(0lzyp1h3liu3g-roe_0cld+*j9krwg2)@=xu'
# SECURITY WARNING: don't run with debug turned on in production!
DEBUG = True
ALLOWED_HOSTS = []
# Application definition
INSTALLED_APPS = [
'django.contrib.admin',
'django.contrib.auth',
'django.contrib.contenttypes',
'django.contrib.sessions',
'django.contrib.messages',
'django.contrib.staticfiles',
]
MIDDLEWARE = [
'django.middleware.security.SecurityMiddleware',
'django.contrib.sessions.middleware.SessionMiddleware',
'django.middleware.common.CommonMiddleware',
'django.middleware.csrf.CsrfViewMiddleware',
'django.contrib.auth.middleware.AuthenticationMiddleware',
'django.contrib.messages.middleware.MessageMiddleware',
'django.middleware.clickjacking.XFrameOptionsMiddleware',
]
ROOT_URLCONF = 'bookr.urls'
TEMPLATES = [
{
'BACKEND': 'django.template.backends.django.DjangoTemplates',
'DIRS': [],
'APP_DIRS': True,
'OPTIONS': {
'context_processors': [
'django.template.context_processors.debug',
'django.template.context_processors.request',
'django.contrib.auth.context_processors.auth',
'django.contrib.messages.context_processors.messages',
],
},
},
]
WSGI_APPLICATION = 'bookr.wsgi.application'
# Database
# https://docs.djangoproject.com/en/dev/ref/settings/#databases
DATABASES = {
'default': {
'ENGINE': 'django.db.backends.sqlite3',
'NAME': os.path.join(BASE_DIR, 'db.sqlite3'),
}
}
# Password validation
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
AUTH_PASSWORD_VALIDATORS = [
{
'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator',
},
{
'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator',
},
]
# Internationalization
# https://docs.djangoproject.com/en/dev/topics/i18n/
LANGUAGE_CODE = 'en-us'
TIME_ZONE = 'UTC'
USE_I18N = True
USE_L10N = True
USE_TZ = True
# Static files (CSS, JavaScript, Images)
# https://docs.djangoproject.com/en/dev/howto/static-files/
STATIC_URL = '/static/'
| 25.495868 | 91 | 0.698541 |
4a1ef1f5126bcfac00e415420880f4a8b4ffb0fd | 2,531 | py | Python | model-optimizer/mo/front/caffe/extractors/pooling.py | mypopydev/dldt | 8cd639116b261adbbc8db860c09807c3be2cc2ca | [
"Apache-2.0"
] | 3 | 2019-07-08T09:03:03.000Z | 2020-09-09T10:34:17.000Z | model-optimizer/mo/front/caffe/extractors/pooling.py | openvino-pushbot/dldt | e607ee70212797cf9ca51dac5b7ac79f66a1c73f | [
"Apache-2.0"
] | 3 | 2020-11-13T18:59:18.000Z | 2022-02-10T02:14:53.000Z | model-optimizer/mo/front/caffe/extractors/pooling.py | openvino-pushbot/dldt | e607ee70212797cf9ca51dac5b7ac79f66a1c73f | [
"Apache-2.0"
] | 1 | 2018-12-14T07:56:02.000Z | 2018-12-14T07:56:02.000Z | """
Copyright (c) 2018 Intel Corporation
Licensed under the Apache License, Version 2.0 (the "License");
you may not use this file except in compliance with the License.
You may obtain a copy of the License at
http://www.apache.org/licenses/LICENSE-2.0
Unless required by applicable law or agreed to in writing, software
distributed under the License is distributed on an "AS IS" BASIS,
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
See the License for the specific language governing permissions and
limitations under the License.
"""
import numpy as np
from mo.front.caffe.extractors.utils import get_spatial_attr
from mo.front.common.extractors.utils import layout_attrs
from mo.front.common.partial_infer.pooling import pool_explicit_padding_infer
def pooling_ext(proto_layer, model_layer):
param = proto_layer.pooling_param
method = 'max'
kernel = [0, 0]
stride = [1, 1]
padding = [0, 0]
global_pooling = False
if hasattr(param, 'global_pooling') and param.global_pooling:
global_pooling = param.global_pooling
else:
kernel = get_spatial_attr(kernel, 'kernel_size', 'kernel', param)
padding = get_spatial_attr(padding, 'pad', 'pad', param)
stride = get_spatial_attr(stride, 'stride', 'stride', param)
if param.pool == 0:
method = 'max'
elif param.pool == 1:
method = 'avg'
else:
raise ValueError('Unknown Pooling Method!')
pooling_convention = 'full' # for Caffe rounding type should be ceil
rt = 'ceil'
if hasattr(param, 'ceil_mode') and not param.ceil_mode:
# If pooling has ceil_mode and ceil_mode is False using floor for rounding shapes in partial_infer
pooling_convention = 'valid'
rt = 'floor'
attrs = {
'type': 'Pooling',
'window': np.array([1, 1, kernel[1], kernel[0]], dtype=np.int64),
'stride': np.array([1, 1, stride[1], stride[0]], dtype=np.int64),
'pad': np.array([[0, 0], [0, 0], [padding[1], padding[1]], [padding[0], padding[0]]], dtype=np.int64),
'pad_spatial_shape': np.array([[padding[1], padding[1]], [padding[0], padding[0]]], dtype=np.int64),
'pool_method': method,
'exclude_pad': 'false',
'infer': pool_explicit_padding_infer,
'global_pool': global_pooling,
'output_spatial_shape': None,
'rounding_type': rt
}
attrs.update(layout_attrs())
attrs['pooling_convention'] = pooling_convention
return attrs
| 35.152778 | 110 | 0.672066 |
4a1ef34b246d0b2d5000e435846d79fb666bfdcd | 26,130 | py | Python | filmatyk/filters.py | Noiredd/Filmatyk | a09637d3d2c572bebf5e6ae004c16917c247e9fa | [
"MIT"
] | 2 | 2020-10-09T13:35:57.000Z | 2021-08-22T15:17:25.000Z | filmatyk/filters.py | Noiredd/Filmatyk | a09637d3d2c572bebf5e6ae004c16917c247e9fa | [
"MIT"
] | 34 | 2019-01-31T21:31:31.000Z | 2020-09-01T12:17:51.000Z | filmatyk/filters.py | Noiredd/Filmatyk | a09637d3d2c572bebf5e6ae004c16917c247e9fa | [
"MIT"
] | null | null | null | from calendar import monthrange
import datetime
from PIL import Image, ImageTk
import tkinter as tk
from tkinter import ttk
class FilterMachine(object):
# Holds multiple filters and returns a callable that can be simply passed to a
# "filter" function on the set of Items that the Presenter holds. The machine
# remembers all filters, so when either of them triggers an update, the whole
# callable is updated, and Presenter's filtersUpdate() is executed.
def __init__(self, callback):
self.filterObjs = []
self.filterFuns = []
# flags for performance improvement: construct a callable only from those
# filters that are currently active; only reset those too; additionally,
# ignore repeated resets (speeds up startup SIGNIFICANTLY)
self.filterFlags = []
self.filterMap = {} # maps filter ID's to their positions on the lists
self.callback = callback # to notify the Presenter about any changes
self.ignoreCallback = False # see resetAllFilters for the meaning of it
def registerFilter(self, filter_object:object):
filter_object.setCallback(self.updateCallback)
self.filterObjs.append(filter_object)
self.filterFuns.append(filter_object.getFunction())
self.filterFlags.append(False)
self.filterMap[filter_object.getID()] = len(self.filterObjs) - 1
def resetAllFilters(self, force=False):
# disable calling back to the Presenter until all of the work is done
self.ignoreCallback = True
for filter, flag in zip(self.filterObjs, self.filterFlags):
if flag or force:
filter.reset()
self.filterFlags = [False for flag in self.filterFlags] # clear all
# and now call back
self.ignoreCallback = False
self.callback()
def updateCallback(self, filter_id:int, new_function, reset=False):
filter_pos = self.filterMap[filter_id]
self.filterFuns[filter_pos] = new_function
# don't call back if the filter is dormant and has requested a reset
if reset and not self.filterFlags[filter_pos]:
return
# otherwise, proceed; set the flag on update, clear on reset
self.filterFlags[filter_pos] = not reset
if not self.ignoreCallback:
self.callback()
def populateChoices(self, items:list):
for filter in self.filterObjs:
filter.populateChoices(items)
self.resetAllFilters()
def getFiltering(self):
# returns a callable that executes all of the filters
funs = [fun for fun, flag in zip(self.filterFuns, self.filterFlags) if flag]
if not funs:
funs = [lambda x: True]
return lambda item: all([fun(item) for fun in funs])
class Filter(object):
# Filters return callables that, executed on Items, return a boolean value
# informing that a given Item passes or does not pass some criteria. These
# criteria are internal to the filter objects, and can be modified by user.
# A filter object has to be registered with the Presenter and instantiated in
# its GUI (the object controls its own widgets). It takes a look into the DB
# to collect all possible values to populate its widgets (e.g. all directors).
# When the user manipulates the filter's widgets, the object calls Presenter
# via the supplied callback, returning the updated callable the the Presenter
# can now use to filter its data.
# A derived filter has to:
# have a buildUI function to draw the interface (using self.main as root!)
# define self.function
# ensure that whenever the parameters change, notifyMachine is called
# filters have IDs so that machines can recognize them on callbacks
NEXT_ID = 0
@classmethod
def __getNewID(cls):
id = Filter.NEXT_ID
Filter.NEXT_ID += 1
return id
# by default any filter is inactive and everything shall pass it
@staticmethod
def DEFAULT(x):
return True
def __init__(self, root):
# automatically assign the next free ID
self.ID = self.__getNewID()
# construct the GUI aspect
self.main = tk.Frame(root)
self.buildUI()
# callback takes 2 pos args: an ID (int) and a function (callable)
# and 1 keyword arg: "reset"
self.machineCallback = lambda x: x # machine sets that during registering
# end result of a filter: a callable
self.function = self.DEFAULT
def setCallback(self, callback):
self.machineCallback = callback
def buildUI(self):
# derived-class-defined code for UI construction
pass
def populateChoices(self, items):
# derived-class-defined code for updating internal filter data from items
pass
# execute this every time the user modifies filter settings
def notifyMachine(self):
self.machineCallback(self.ID, self.function)
# execute this when the filter was reset
def reset(self):
self._reset()
def _reset(self):
self.function = self.DEFAULT
self.machineCallback(self.ID, self.function, reset=True)
def getID(self):
return self.ID
def getFunction(self):
return self.function
# TK interface for GUI placement
def pack(self, **kw):
self.main.pack(**kw)
def grid(self, **kw):
self.main.grid(**kw)
class TitleFilter(Filter):
icon_path = 'search.png'
def __init__(self, root):
self.title_in = tk.StringVar()
self.function = self.filterTitle
super(TitleFilter, self).__init__(root)
def reset(self):
self.title_in.set('')
self._reset()
def buildUI(self):
self.main.grid_columnconfigure(1, weight=1)
# search icon
self.icon = ImageTk.PhotoImage(Image.open(self.icon_path))
icon_place = tk.Label(self.main)
icon_place.configure(image=self.icon)
icon_place.grid(row=0, column=0, sticky=tk.W)
self.nameEntry = tk.Entry(master=self.main, textvariable=self.title_in)
self.nameEntry.grid(row=0, column=1, sticky=tk.EW)
self.nameEntry.bind('<Key>', self._enterKey)
ttk.Button(master=self.main, text='X', width=3, command=self.reset).grid(row=0, column=2)
def _enterKey(self, event=None):
# Clear the entry when user hits escape
if event:
if event.keysym == 'Escape':
self.reset()
# Wait before updating (see ListboxFilter.waitAndUpdate)
self.main.after(50, self._update)
def _update(self, event=None):
search_string = self.title_in.get().lower()
def filterTitle(item):
title = item.getRawProperty('title')
return search_string in title.lower()
self.function = filterTitle
self.notifyMachine()
def filterTitle(self, item):
title = item.getRawProperty('title')
return self.title_val in title.lower()
class YearFilter(Filter):
default_years = [1, 9999]
def __init__(self, root):
self.year_from = tk.StringVar()
self.year_to = tk.StringVar()
self.all_years = self.default_years
super(YearFilter, self).__init__(root)
def reset(self):
self.year_from.set(str(self.all_years[0]))
self.year_to.set(str(self.all_years[-1]))
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Rok produkcji:').grid(row=0, column=0, columnspan=5, sticky=tk.NW)
tk.Label(m, text='Od:').grid(row=1, column=0, sticky=tk.NW)
tk.Label(m, text='Do:').grid(row=1, column=2, sticky=tk.NW)
self.yFrom = yFrom = tk.Spinbox(m, width=5, textvariable=self.year_from, command=self._updateFrom)
yFrom.bind('<KeyRelease>', self._updateFrom)
yFrom.grid(row=1, column=1, sticky=tk.NW)
self.yTo = yTo = tk.Spinbox(m, width=5, textvariable=self.year_to, command=self._updateTo)
yTo.bind('<KeyRelease>', self._updateTo)
yTo.grid(row=1, column=3, sticky=tk.NW)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=1, column=4, sticky=tk.NE)
m.grid_columnconfigure(4, weight=1) # for even placement of the reset button
def populateChoices(self, items:list):
all_years = set()
for item in items:
year = item.getRawProperty('year')
if not year:
continue
all_years.add(year)
self.all_years = sorted(list(all_years))
if len(self.all_years) == 0:
self.all_years = self.default_years
self.yFrom.configure(values=self.all_years)
self.yTo.configure(values=self.all_years)
self.reset()
def _updateTo(self, event=None):
self._update(to=True, event=event)
def _updateFrom(self, event=None):
self._update(to=False, event=event)
def _update(self, to, event=None):
try:
yearFrom = int(self.year_from.get())
except ValueError:
yearFrom = self.all_years[0]
try:
yearTo = int(self.year_to.get())
except ValueError:
yearTo = self.all_years[-1]
# reject nonsensical input (e.g. if user types "199", about to hit "5")
if yearFrom > 2999:
yearFrom = self.all_years[0]
if yearTo < 1000:
yearTo = self.all_years[-1]
# automatically align the opposite limit if the combination is nonsensical
if yearFrom > yearTo:
if to: # yearTo was modified -- pull yearFrom down with it
yearFrom = yearTo
self.year_from.set(str(yearFrom))
else: # yearFrom was modified -- pull yearTo up with it
yearTo = yearFrom
self.year_to.set(str(yearTo))
def yearFilter(item):
year = item.getRawProperty('year')
if year >= yearFrom and year <= yearTo:
return True
else:
return False
self.function = yearFilter
self.notifyMachine()
class ListboxFilter(Filter):
PROPERTY = '' #derived classes must override this
def __init__(self, root):
self.all_options = []
super(ListboxFilter, self).__init__(root)
def makeListbox(self, where, selectmode, **grid_args):
frame = tk.Frame(where)
# exportselection is necessary, otherwise multiple Listboxes break each other
self.box = tk.Listbox(frame, height=10, selectmode=selectmode, exportselection=0)
self.box.bind('<1>', self.waitAndUpdate)
self.box.pack(side=tk.LEFT)
scroll = ttk.Scrollbar(frame, command=self.box.yview)
scroll.pack(side=tk.RIGHT, fill=tk.Y)
self.box.configure(yscrollcommand=scroll.set)
frame.grid(**grid_args)
def populateChoices(self, items:list):
all_options = set()
for item in items:
if isinstance(self.PROPERTY, list):
for prop in self.PROPERTY:
for value in item.getRawProperty(prop):
all_options.add(value)
else:
for value in item.getRawProperty(self.PROPERTY):
all_options.add(value)
self.all_options = sorted(list(all_options))
self.box.delete(0, tk.END)
for option in self.all_options:
self.box.insert(tk.END, option)
def waitAndUpdate(self, e=None):
# without after(), the callback executes *before* GUI has updated selection
self.main.after(50, self._update)
def getSelection(self):
return [self.all_options[i] for i in self.box.curselection()]
def _reset(self):
self.box.selection_clear(0, tk.END)
Filter._reset(self)
class GenreFilter(ListboxFilter):
PROPERTY = 'genres'
def __init__(self, root):
self.mode = tk.IntVar()
self.selected = []
self.filterMap = {
0: self.filterAtLeast,
1: self.filterAll,
2: self.filterExactly
}
super(GenreFilter, self).__init__(root)
def reset(self):
self.mode.set(0)
self.selected = []
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Gatunek:').grid(row=0, column=0, sticky=tk.NW)
self.makeListbox(m, tk.EXTENDED, row=1, column=0)
radios = tk.Frame(m)
radios.grid(row=2, column=0, sticky=tk.NW)
tk.Radiobutton(radios, text='przynajmniej', variable=self.mode, value=0,
command=self._update).pack(anchor=tk.W)
tk.Radiobutton(radios, text='wszystkie', variable=self.mode, value=1,
command=self._update).pack(anchor=tk.W)
tk.Radiobutton(radios, text='dokładnie', variable=self.mode, value=2,
command=self._update).pack(anchor=tk.W)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=2, column=0, sticky=tk.NE)
def _update(self, event=None):
self.selected = self.getSelection()
if len(self.selected) == 0:
self.function = Filter.DEFAULT
else:
self.function = self.filterMap[self.mode.get()]
self.notifyMachine()
def filterAtLeast(self, item):
for genre in self.selected:
if genre in item.getRawProperty(self.PROPERTY):
return True
return False
def filterAll(self, item):
for genre in self.selected:
if genre not in item.getRawProperty(self.PROPERTY):
return False
return True
def filterExactly(self, item):
if len(self.selected) == len(item.getRawProperty(self.PROPERTY)):
return self.filterAll(item)
return False
class CountryFilter(ListboxFilter):
PROPERTY = 'countries'
def __init__(self, root):
self.selected = []
super(CountryFilter, self).__init__(root)
def reset(self):
self.selected = []
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Kraj produkcji:').grid(row=0, column=0, sticky=tk.NW)
self.makeListbox(m, tk.SINGLE, row=1, column=0)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=2, column=0, sticky=tk.SE)
def _update(self, event=None):
self.selected = self.getSelection()
if len(self.selected) == 0:
self.function = Filter.DEFAULT
else:
self.function = self.filterBelongs
self.notifyMachine()
def filterBelongs(self, item):
for country in item.getRawProperty(self.PROPERTY):
if country in self.selected:
return True
return False
class DirectorFilter(ListboxFilter):
PROPERTY = 'directors'
def __init__(self, root):
self.selected = []
super(DirectorFilter, self).__init__(root)
def reset(self):
self.selected = []
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Reżyser:').grid(row=0, column=0, sticky=tk.NW)
self.makeListbox(m, tk.SINGLE, row=1, column=0)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=2, column=0, sticky=tk.SE)
def _update(self, event=None):
self.selected = self.getSelection()
if len(self.selected) == 0:
self.function = Filter.DEFAULT
else:
self.function = self.filterBelongs
self.notifyMachine()
def filterBelongs(self, item):
for director in item.getRawProperty(self.PROPERTY):
if director in self.selected:
return True
return False
class PlatformFilter(ListboxFilter):
PROPERTY = 'platforms'
def __init__(self, root):
self.selected = []
super(PlatformFilter, self).__init__(root)
def reset(self):
self.selected = []
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Platforma:').grid(row=0, column=0, sticky=tk.NW)
self.makeListbox(m, tk.SINGLE, row=1, column=0)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=2, column=0, sticky=tk.SE)
def _update(self, event=None):
self.selected = self.getSelection()
if len(self.selected) == 0:
self.function = Filter.DEFAULT
else:
self.function = self.filterBelongs
self.notifyMachine()
def filterBelongs(self, item):
for maker in item.getRawProperty(self.PROPERTY):
if maker in self.selected:
return True
class GamemakerFilter(ListboxFilter):
PROPERTY = ['developers', 'producers']
def __init__(self, root):
self.selected = []
super(GamemakerFilter, self).__init__(root)
def reset(self):
self.selected = []
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Twórca:').grid(row=0, column=0, sticky=tk.NW)
self.makeListbox(m, tk.SINGLE, row=1, column=0)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=2, column=0, sticky=tk.SE)
def _update(self, event=None):
self.selected = self.getSelection()
if len(self.selected) == 0:
self.function = Filter.DEFAULT
else:
self.function = self.filterBelongs
self.notifyMachine()
def filterBelongs(self, item):
makers = []
for prop in self.PROPERTY:
for maker in item.getRawProperty(prop):
makers.append(maker)
for maker in makers:
if maker in self.selected:
return True
return False
class RatingFilter(Filter):
def __init__(self, root):
self.rate_from = tk.StringVar()
self.rate_to = tk.StringVar()
super(RatingFilter, self).__init__(root)
def reset(self):
self.rate_from.set('-')
self.rate_to.set('10')
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Moja ocena:').grid(row=0, column=0, columnspan=5, sticky=tk.NW)
tk.Label(m, text='Od:').grid(row=1, column=0, sticky=tk.NW)
tk.Label(m, text='Do:').grid(row=1, column=2, sticky=tk.NW)
values = ['-'] + [str(i) for i in range(1,11)]
rFrom = tk.Spinbox(m, width=5, textvariable=self.rate_from, command=self._updateFrom, values=values)
rFrom.bind('<KeyRelease>', self._updateFrom)
rFrom.grid(row=1, column=1, sticky=tk.NW)
rTo = tk.Spinbox(m, width=5, textvariable=self.rate_to, command=self._updateTo, values=values)
rTo.bind('<KeyRelease>', self._updateTo)
rTo.grid(row=1, column=3, sticky=tk.NW)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=1, column=4, sticky=tk.NE)
m.grid_columnconfigure(4, weight=1)
def _updateTo(self, event=None):
self._update(to=True, event=event)
def _updateFrom(self, event=None):
self._update(to=False, event=event)
def _update(self, to, event=None):
try:
rateFrom = int(self.rate_from.get())
except ValueError:
rateFrom = 0
try:
rateTo = int(self.rate_to.get())
except ValueError:
rateTo = 10
# similar mechanism as in YearFilter
if rateFrom > rateTo:
if to:
rateFrom = rateTo
self.rate_from.set(str(rateFrom))
else:
rateTo = rateFrom
self.rate_to.set(str(rateTo))
def ratingFilter(item):
rating = item.getRawProperty('rating')
if rating >= rateFrom and rating <= rateTo:
return True
else:
return False
self.function = ratingFilter
self.notifyMachine()
class DateFilter(Filter):
current_year = datetime.date.today().year
def __init__(self, root):
self.from_year = tk.IntVar()
self.from_month = tk.IntVar()
self.from_day = tk.IntVar()
self.to_year = tk.IntVar()
self.to_month = tk.IntVar()
self.to_day = tk.IntVar()
self.all_years = [self.current_year]
super(DateFilter, self).__init__(root)
def reset(self):
dayzero = datetime.date(
year=self.all_years[0],
month=1,
day=1
)
today = datetime.date.today()
self._setDates(dateFrom=dayzero, dateTo=today)
self._reset()
def buildUI(self):
m = self.main
tk.Label(m, text='Data ocenienia:').grid(row=0, column=0, columnspan=4, sticky=tk.NW)
tk.Label(m, text='Od:').grid(row=1, column=0, sticky=tk.NW)
tk.Label(m, text='Do:').grid(row=2, column=0, sticky=tk.NW)
self.fyInput = fyInput = ttk.Combobox(
master=m,
state='readonly',
width=4,
textvariable=self.from_year
)
fyInput.bind('<<ComboboxSelected>>', self._updateFrom)
fyInput.grid(row=1, column=1, sticky=tk.NW)
self.tyInput = tyInput = ttk.Combobox(
master=m,
state='readonly',
width=4,
textvariable=self.to_year
)
tyInput.bind('<<ComboboxSelected>>', self._updateTo)
tyInput.grid(row=2, column=1, sticky=tk.NW)
months = [i+1 for i in range(12)]
self.fmInput = fmInput = ttk.Combobox(
master=m,
state='readonly',
width=2,
textvariable=self.from_month,
values=months
)
fmInput.bind('<<ComboboxSelected>>', self._updateFrom)
fmInput.grid(row=1, column=2, sticky=tk.NW)
self.tmInput = tmInput = ttk.Combobox(
master=m,
state='readonly',
width=2,
textvariable=self.to_month,
values=months
)
tmInput.bind('<<ComboboxSelected>>', self._updateTo)
tmInput.grid(row=2, column=2, sticky=tk.NW)
self.fdInput = fdInput = ttk.Combobox(
master=m,
state='readonly',
width=2,
textvariable=self.from_day
)
fdInput.bind('<<ComboboxSelected>>', self._updateFrom)
fdInput.grid(row=1, column=3, sticky=tk.NW)
self.tdInput = tdInput = ttk.Combobox(
master=m,
state='readonly',
width=2,
textvariable=self.to_day
)
tdInput.bind('<<ComboboxSelected>>', self._updateTo)
tdInput.grid(row=2, column=3, sticky=tk.NW)
ttk.Button(m, text='Reset', width=5, command=self.reset).grid(row=1, column=4, rowspan=2, sticky=tk.E)
m.grid_columnconfigure(4, weight=2)
# shortcut buttons
sc = tk.Frame(m)
tk.Frame(sc, height=10).grid(row=0, column=0) # separator
tk.Label(sc, text='Ostatni:').grid(row=1, column=0, sticky=tk.W)
ttk.Button(sc, text='rok', width=4, command=self._thisYear).grid(row=1, column=1)
ttk.Button(sc, text='msc', width=4, command=self._thisMonth).grid(row=1, column=2)
ttk.Button(sc, text='tdzn', width=4, command=self._thisWeek).grid(row=1, column=3)
tk.Label(sc, text='Poprzedni:').grid(row=2, column=0, sticky=tk.W)
ttk.Button(sc, text='rok', width=4, command=self._lastYear).grid(row=2, column=1)
ttk.Button(sc, text='msc', width=4, command=self._lastMonth).grid(row=2, column=2)
ttk.Button(sc, text='tdzn', width=4, command=self._lastWeek).grid(row=2, column=3)
sc.grid(row=3, column=0, columnspan=5, sticky=tk.NW)
def populateChoices(self, items:list):
all_years = set()
for item in items:
item_date = item.getRawProperty('dateOf')
if not item_date:
continue
all_years.add(item_date.year)
all_years.add(self.current_year)
self.all_years = list(range(min(all_years), max(all_years) + 1))
self.fyInput.configure(values=self.all_years)
self.tyInput.configure(values=self.all_years)
self.reset()
def _thisYear(self):
dateTo = datetime.date.today()
delta = datetime.timedelta(days=365)
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
def _thisMonth(self):
dateTo = datetime.date.today()
delta = datetime.timedelta(days=31)
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
def _thisWeek(self):
dateTo = datetime.date.today()
delta = datetime.timedelta(days=7)
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
# Change this into "previous" (unit), so that it's not an absolute change but
# a relative one wrt. the currently set filtering (i.e. someone picks a range
# of dates two months ago, clicks "previous year" and this gives them a range
# one year prior to that).
def _lastYear(self):
delta = datetime.timedelta(days=365)
dateTo = datetime.date.today() - delta
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
def _lastMonth(self):
delta = datetime.timedelta(days=31)
dateTo = datetime.date.today() - delta
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
def _lastWeek(self):
delta = datetime.timedelta(days=7)
dateTo = datetime.date.today() - delta
dateFrom = dateTo - delta
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
self._makeUpdate(dateFrom, dateTo)
def _updateTo(self, event=None):
self._update(to=True)
def _updateFrom(self, event=None):
self._update(to=False)
def _setDates(self, dateFrom=None, dateTo=None):
if dateFrom:
self.from_year.set(dateFrom.year)
self.from_month.set(dateFrom.month)
self.from_day.set(dateFrom.day)
max_days = monthrange(dateFrom.year, dateFrom.month)[1]
self.fdInput.configure(values=[i+1 for i in range(max_days)])
if dateTo:
self.to_year.set(dateTo.year)
self.to_month.set(dateTo.month)
self.to_day.set(dateTo.day)
max_days = monthrange(dateTo.year, dateTo.month)[1]
self.tdInput.configure(values=[i+1 for i in range(max_days)])
def _tryCorrectDate(self, year, month, day):
""" Constructs a date object, limiting days to maximum per month. """
max_day = monthrange(year, month)[1]
correct_day = min(day, max_day)
return datetime.date(year=year, month=month, day=correct_day)
def _getDates(self):
dateFrom = self._tryCorrectDate(
year=self.from_year.get(),
month=self.from_month.get(),
day=self.from_day.get()
)
dateTo = self._tryCorrectDate(
year=self.to_year.get(),
month=self.to_month.get(),
day=self.to_day.get()
)
return dateFrom, dateTo
def _update(self, to):
""" Constructs correct dates and calls back to the machine.
Each combobox from a group ("from" date, "to" date) calls this function
informing it which group has been acted upon. Date component values are
obtained from the widgets and automatically corrected for day per month
situation. Possible impossible date range is resolved and the corrected
dates are set back on the widget before being passed to _makeUpdate for
reporting back to the machine.
"""
# Get dates and correct them for the right number of days per month
dateFrom, dateTo = self._getDates()
# Handle the possible "from" after "to" conflict
if dateFrom > dateTo:
# For now simply set the same date on the other control. It's not trivial
# to do it intelligently, that is: to determine by which amount to adjust
# the other date.
if to:
dateFrom = dateTo
else:
dateTo = dateFrom
# Set the corrected dates on the GUI
self._setDates(dateFrom=dateFrom, dateTo=dateTo)
# Issue the filter update
self._makeUpdate(dateFrom=dateFrom, dateTo=dateTo)
def _makeUpdate(self, dateFrom, dateTo):
def dateFilter(item):
date = item.getRawProperty('dateOf')
if date >= dateFrom and date <= dateTo:
return True
else:
return False
self.function = dateFilter
self.notifyMachine()
| 37.651297 | 106 | 0.678951 |
4a1ef43bea402747df1d0d058f12c31be185ea9f | 1,626 | py | Python | locan/tests/locan_io/test_elyra_io.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 8 | 2021-11-25T20:05:49.000Z | 2022-03-27T17:45:00.000Z | locan/tests/locan_io/test_elyra_io.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 4 | 2021-12-15T22:39:20.000Z | 2022-03-11T17:35:34.000Z | locan/tests/locan_io/test_elyra_io.py | super-resolution/Locan | 94ed7759f7d7ceddee7c7feaabff80010cfedf30 | [
"BSD-3-Clause"
] | 1 | 2022-03-22T19:53:13.000Z | 2022-03-22T19:53:13.000Z | from io import StringIO
import locan.constants
from locan.locan_io import load_Elyra_file
from locan.locan_io.locdata.elyra_io import load_Elyra_header
def test_get_correct_column_names_from_Elyra_header():
columns = load_Elyra_header(
path=locan.ROOT_DIR / "tests/test_data/Elyra_dstorm_data.txt"
)
assert columns == [
"original_index",
"frame",
"frames_number",
"frames_missing",
"position_x",
"position_y",
"uncertainty",
"intensity",
"local_background_sigma",
"chi_square",
"psf_half_width",
"channel",
"slice_z",
]
file_like = StringIO(
"Index First Frame Number Frames Frames Missing Position X [nm] Position Y [nm]\n"
"1 1 1 0 15850.6 23502.1"
)
columns = load_Elyra_header(path=file_like)
assert columns == [
"original_index",
"frame",
"frames_number",
"frames_missing",
"position_x",
"position_y",
]
def test_loading_Elyra_file():
dat = load_Elyra_file(path=locan.ROOT_DIR / "tests/test_data/Elyra_dstorm_data.txt")
# loading is not limited by nrows=10 to ensure correct treatment of file appendix and NUL character.
assert len(dat) == 999
file_like = StringIO(
"Index\tFirst Frame\tNumber Frames\tFrames Missing\tPosition X [nm]\tPosition Y [nm]\n"
"1\t1\t1\t0\t15850.6\t23502.1"
)
dat = load_Elyra_file(path=file_like)
# loading is not limited by nrows=10 to ensure correct treatment of file appendix and NUL character.
assert len(dat) == 1
| 29.563636 | 104 | 0.646371 |
4a1ef54fb50013881aa832f83674ac66ecccd9bc | 5,568 | py | Python | tensorflow/python/ops/histogram_ops.py | zhangyujing/tensorflow | c7a04561fb8972fb64907acc5f10f3c6d4cef9f2 | [
"Apache-2.0"
] | 13 | 2018-07-23T18:53:35.000Z | 2021-11-18T19:56:45.000Z | tensorflow/python/ops/histogram_ops.py | zhangyujing/tensorflow | c7a04561fb8972fb64907acc5f10f3c6d4cef9f2 | [
"Apache-2.0"
] | 13 | 2020-01-28T22:20:14.000Z | 2022-03-11T23:20:14.000Z | tensorflow/python/ops/histogram_ops.py | zhangyujing/tensorflow | c7a04561fb8972fb64907acc5f10f3c6d4cef9f2 | [
"Apache-2.0"
] | 13 | 2018-09-07T13:28:38.000Z | 2020-07-17T15:06:24.000Z | # Copyright 2015 The TensorFlow Authors. All Rights Reserved.
#
# Licensed under the Apache License, Version 2.0 (the "License");
# you may not use this file except in compliance with the License.
# You may obtain a copy of the License at
#
# http://www.apache.org/licenses/LICENSE-2.0
#
# Unless required by applicable law or agreed to in writing, software
# distributed under the License is distributed on an "AS IS" BASIS,
# WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
# See the License for the specific language governing permissions and
# limitations under the License.
# ==============================================================================
# pylint: disable=g-short-docstring-punctuation
"""Histograms.
Please see @{$python/histogram_ops} guide.
@@histogram_fixed_width_bins
@@histogram_fixed_width
"""
from __future__ import absolute_import
from __future__ import division
from __future__ import print_function
from tensorflow.python.framework import dtypes
from tensorflow.python.framework import ops
from tensorflow.python.ops import array_ops
from tensorflow.python.ops import clip_ops
from tensorflow.python.ops import gen_math_ops
from tensorflow.python.ops import math_ops
from tensorflow.python.util.tf_export import tf_export
from tensorflow.python.util.tf_export import tf_export
@tf_export('histogram_fixed_width_bins')
def histogram_fixed_width_bins(values,
value_range,
nbins=100,
dtype=dtypes.int32,
name=None):
"""Bins the given values for use in a histogram.
Given the tensor `values`, this operation returns a rank 1 `Tensor`
representing the indices of a histogram into which each element
of `values` would be binned. The bins are equal width and
determined by the arguments `value_range` and `nbins`.
Args:
values: Numeric `Tensor`.
value_range: Shape [2] `Tensor` of same `dtype` as `values`.
values <= value_range[0] will be mapped to hist[0],
values >= value_range[1] will be mapped to hist[-1].
nbins: Scalar `int32 Tensor`. Number of histogram bins.
dtype: dtype for returned histogram.
name: A name for this operation (defaults to 'histogram_fixed_width').
Returns:
A `Tensor` holding the indices of the binned values whose shape matches
`values`.
Examples:
```python
# Bins will be: (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
nbins = 5
value_range = [0.0, 5.0]
new_values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with tf.get_default_session() as sess:
indices = tf.histogram_fixed_width_bins(new_values, value_range, nbins=5)
variables.global_variables_initializer().run()
sess.run(indices) => [0, 0, 1, 2, 4]
```
"""
with ops.name_scope(name, 'histogram_fixed_width_bins',
[values, value_range, nbins]):
values = ops.convert_to_tensor(values, name='values')
shape = array_ops.shape(values)
values = array_ops.reshape(values, [-1])
value_range = ops.convert_to_tensor(value_range, name='value_range')
nbins = ops.convert_to_tensor(nbins, dtype=dtypes.int32, name='nbins')
nbins_float = math_ops.cast(nbins, values.dtype)
# Map tensor values that fall within value_range to [0, 1].
scaled_values = math_ops.truediv(
values - value_range[0],
value_range[1] - value_range[0],
name='scaled_values')
# map tensor values within the open interval value_range to {0,.., nbins-1},
# values outside the open interval will be zero or less, or nbins or more.
indices = math_ops.floor(nbins_float * scaled_values, name='indices')
# Clip edge cases (e.g. value = value_range[1]) or "outliers."
indices = math_ops.cast(
clip_ops.clip_by_value(indices, 0, nbins_float - 1), dtypes.int32)
return array_ops.reshape(indices, shape)
@tf_export('histogram_fixed_width')
def histogram_fixed_width(values,
value_range,
nbins=100,
dtype=dtypes.int32,
name=None):
"""Return histogram of values.
Given the tensor `values`, this operation returns a rank 1 histogram counting
the number of entries in `values` that fell into every bin. The bins are
equal width and determined by the arguments `value_range` and `nbins`.
Args:
values: Numeric `Tensor`.
value_range: Shape [2] `Tensor` of same `dtype` as `values`.
values <= value_range[0] will be mapped to hist[0],
values >= value_range[1] will be mapped to hist[-1].
nbins: Scalar `int32 Tensor`. Number of histogram bins.
dtype: dtype for returned histogram.
name: A name for this operation (defaults to 'histogram_fixed_width').
Returns:
A 1-D `Tensor` holding histogram of values.
Examples:
```python
# Bins will be: (-inf, 1), [1, 2), [2, 3), [3, 4), [4, inf)
nbins = 5
value_range = [0.0, 5.0]
new_values = [-1.0, 0.0, 1.5, 2.0, 5.0, 15]
with tf.get_default_session() as sess:
hist = tf.histogram_fixed_width(new_values, value_range, nbins=5)
variables.global_variables_initializer().run()
sess.run(hist) => [2, 1, 1, 0, 2]
```
"""
with ops.name_scope(name, 'histogram_fixed_width',
[values, value_range, nbins]) as name:
# pylint: disable=protected-access
return gen_math_ops._histogram_fixed_width(
values, value_range, nbins, dtype=dtype, name=name)
# pylint: enable=protected-access
| 37.621622 | 80 | 0.670259 |
4a1ef58d3a2ee542968f254eac485875ff61c7b2 | 1,394 | py | Python | python/sorting/quicksort.py | educauchy/algorithms | 5e69f70595eceb28fe4f115a70e0e6f197e4564b | [
"MIT"
] | null | null | null | python/sorting/quicksort.py | educauchy/algorithms | 5e69f70595eceb28fe4f115a70e0e6f197e4564b | [
"MIT"
] | null | null | null | python/sorting/quicksort.py | educauchy/algorithms | 5e69f70595eceb28fe4f115a70e0e6f197e4564b | [
"MIT"
] | null | null | null | from typing import List, Union
arr_type = List[Union[float, int, str]]
def partition(x: arr_type, min: int = None, max: int = None) -> int:
"""
Return index of the pivot element
:param x: Subarray
:param min: Minimum index of subarray x
:param max: Maximum index of subarray x
:return:
"""
pvt = x[max] # current "pivot"
i = min - 1 # index of the last element of the sorted subarray LESS than "pivot"
for j in range(min, max): # from the first element to the penultimate
if x[j] <= pvt: # if current element less than "pivot"
i += 1 # index of the next element
x[i], x[j] = x[j], x[i] # swap current element with itself
x[i+1], x[max] = x[max], x[i+1] # swap the first MORE element and "pivot"
return i+1
def quicksort(x: arr_type, min: int = None, max: int = None) -> arr_type:
if min < max:
q = partition(x, min, max)
quicksort(x, 0, q-1)
quicksort(x, q+1, max)
return x
if __name__ == '__main__':
arr = [2, 5, 0, 5, 1, 2, 6, 8, 3, 11, 3, -1, 5]
quicksort(arr, 0, len(arr)-1)
print(arr)
arr = [3, 7, 8, 5, 2, 1, 9, 5, 4]
quicksort(arr, 0, len(arr)-1)
print(arr)
arr = [10, 7, 8, 9, 1, 5]
quicksort(arr, 0, len(arr)-1)
print(arr)
| 32.418605 | 108 | 0.526542 |
4a1ef668e994528ca4017aa8967a7731fbd62535 | 10,462 | py | Python | src/lib/zothero/cache.py | yarray/zothero | d8b2a2160e35ab2a5d503a7bbf5aa39c538174bc | [
"MIT"
] | 372 | 2017-12-20T22:56:25.000Z | 2022-03-30T22:43:52.000Z | src/lib/zothero/cache.py | yarray/zothero | d8b2a2160e35ab2a5d503a7bbf5aa39c538174bc | [
"MIT"
] | 65 | 2017-12-20T23:03:42.000Z | 2022-03-30T13:07:18.000Z | src/lib/zothero/cache.py | yarray/zothero | d8b2a2160e35ab2a5d503a7bbf5aa39c538174bc | [
"MIT"
] | 26 | 2018-05-23T07:59:52.000Z | 2022-03-21T04:37:16.000Z | # encoding: utf-8
#
# Copyright (c) 2016 Dean Jackson <[email protected]>
#
# MIT Licence. See http://opensource.org/licenses/MIT
#
# Created on 2016-11-13
#
"""Simple key-value store based on sqlite3.
Data is stored via `Store` sub-objects assigned to each table.
"""
from __future__ import print_function, absolute_import
from contextlib import contextmanager
import logging
import os
import re
import sqlite3
import time
log = logging.getLogger(__name__)
log.addHandler(logging.NullHandler())
# Create a new database
SQL_SCHEMA = u"""
CREATE TABLE `dbinfo` (
`id` INTEGER PRIMARY KEY AUTOINCREMENT NOT NULL,
`version` INTEGER NOT NULL
);
INSERT INTO `dbinfo` VALUES (1, 1);
"""
# Add a new table
SQL_TABLE = u"""
CREATE TABLE `{name}` (
`key` TEXT PRIMARY KEY,
`value` BLOB NOT NULL,
`updated` INTEGER DEFAULT 0
);
"""
# Convenience constants; currently unused
FOREVER = 0
ONE_MINUTE = 60
ONE_HOUR = 3600
ONE_DAY = 86400
ONE_WEEK = ONE_DAY * 7
ONE_YEAR = ONE_DAY * 365
TWO_YEARS = ONE_YEAR * 2
THREE_YEARS = ONE_YEAR * 3
def _nullop(value):
"""Do-nothing handler. Simply returns ``value``."""
return value
class Store(object):
"""Key-value store based on an sqlite3 table.
Instantiate these via `Cache.open(name)`.
Attributes:
cache (Cache): `Cache` object holding this store's database.
convert_in (callable): Called on input before storage.
convert_out (callable): Called on output before returning it.
name (str): Name of store (and database table).
"""
def __init__(self, name, cache, convert_in=None, convert_out=None):
"""Create new `Store`.
Args:
name (str): Name of store (and database table).
cache (Cache): `Cache` object managing the database.
convert_in (callable, optional): Called on input before storage.
convert_out (callable, optional): Called on output before return.
"""
self.name = name
self.cache = cache
self.convert_in = convert_in or _nullop
self.convert_out = convert_out or _nullop
@property
def conn(self):
"""Database connection."""
return self.cache.conn
@contextmanager
def cursor(self):
"""Context manager providing database cursor."""
with self.conn as c:
yield c.cursor()
def keys(self):
"""Iterate over all store keys.
Yields:
unicode: Store keys.
"""
sql = u"""
SELECT `key` FROM `{table}` WHERE 1
""".format(table=self.name)
for row in self.conn.execute(sql):
yield row['key']
def get(self, key, default=None):
"""Return value for `key` or `default`.
Passes result through `self.convert_out()` before returning.
Args:
key (str): Database key.
default (obj, optional): What to return if `key` is absent.
Returns:
obj: Object deserialised from the database.
"""
key = self._validate_key(key)
sql = u"""
SELECT `value` FROM `{table}` WHERE key = ?
""".format(table=self.name)
r = self.conn.execute(sql, (key,)).fetchone()
if r:
return self.convert_out(r['value'])
return default
def set(self, key, value):
"""Set value for key, passing `value` through `self.convert_in()`.
Args:
key (str): Database key.
value (obj): Object to store in database.
"""
key = self._validate_key(key)
value = self.convert_in(value)
with self.cursor() as c:
sql = u"""
UPDATE `{table}`
SET `value` = ?, `updated` = ?
WHERE key = ?
""".format(table=self.name)
c.execute(sql, (value, time.time(), key))
if c.rowcount > 0:
log.debug(u'[%s] updated `%s` -> %r', self.name, key, value)
return
with self.cursor() as c:
sql = u"""
INSERT INTO `{table}`
(`key`, `value`, `updated`)
VALUES (?, ?, ?)
""".format(table=self.name)
c.execute(sql, (key, value, time.time()))
if c.rowcount > 0:
log.debug(u'[%s] inserted `%s` -> %r', self.name, key, value)
return
log.error("[%s] couldn't save value for key %r", self.name, key)
def delete(self, key):
"""Remove item from store."""
sql = u"""
DELETE FROM `{table}` WHERE `key` = ?
""".format(table=self.name)
with self.cursor() as c:
c.execute(sql, (key,))
if c.rowcount:
return True
return False
def updated(self, key=None):
"""Timestamp of last time ``key`` was updated.
Args:
key (unicode, optional): Key of item to query. If no key
is specified, returns the last time any entry was
updated.
Returns:
float: UNIX timestamp of last update, or ``0.0`` if key
doesn't exit.
"""
if key:
sql = u"""
SELECT `updated` FROM `{table}` WHERE `key` = ?
""".format(table=self.name)
row = self.conn.execute(sql, (key,)).fetchone()
if row:
return row['updated']
return 0.0
# Return latest updated
sql = u"""
SELECT MAX(`updated`) AS `updated` FROM `{table}`
""".format(table=self.name)
row = self.conn.execute(sql).fetchone()
return row['updated'] if row['updated'] else 0.0
def _validate_key(self, key):
"""Coerce `key` to Unicode or raise `ValueError`.
Args:
key (str or unicode): String key.
Raises:
TypeError: Raised if `key` isn't a string.
Returns:
unicode: Unicode `key`.
"""
if isinstance(key, str):
key = unicode(key, 'utf-8')
elif not isinstance(key, unicode):
raise TypeError(
"`key` must be `str` or `unicode`, not `{}`".format(
key.__class__.__name__)
)
return key
class Cache(object):
"""Key-value store manager.
Attributes:
filepath (str): Path to cache sqlite file.
invalid_names (tuple): Names not permitted for Stores
(i.e. bad table names).
"""
invalid_names = ('dbinfo', 'sqlite_sequence', 'sqlite_master')
def __init__(self, filepath):
"""Open/create and open cache at `filepath`.
Args:
filepath (str): Path of cache sqlite database.
"""
self.filepath = filepath
self._conn = None
self.conn
@property
def conn(self):
"""Connection to database."""
if not self._conn:
conn = sqlite3.connect(self.filepath)
conn.row_factory = sqlite3.Row
with conn as c:
try:
c.execute(u'SELECT * FROM `dbinfo`')
except sqlite3.OperationalError:
log.debug('[cache] initialising %r...', self.filepath)
c.executescript(SQL_SCHEMA)
self._conn = conn
return self._conn
@contextmanager
def cursor(self):
"""Context manager providing database cursor."""
with self.conn as c:
yield c.cursor()
def open(self, name, convert_in=None, convert_out=None):
"""Open a `Store` with `name` and using the specified converters.
Args:
name (str): The name of the Store/database table.
convert_in (callable, optional): Serialise database values.
convert_out (callable, optional): Deserialise database values.
Returns:
Store: `Store` object.
"""
# log.debug('self.caches=%r', self.caches)
log.debug('[cache] opening store %r...', name)
if name not in self.caches:
log.info('[cache] creating table `%s`...', name)
self._add_table(name)
return Store(name, self, convert_in, convert_out)
def clear(self, name=None):
"""Clear Stores.
If no `name` is specified, the entire cache is deleted.
Args:
name (str, optional): Name of a specific store.
Raises:
ValueError: Raised if specified Store does not exit.
"""
if name is None: # Delete whole cache
try:
os.unlink(self.filepath)
except OSError:
pass
return
elif name in self.caches:
sql = u'DROP TABLE `{}`'.format(name)
with self.conn as c:
c.execute(sql)
return
else:
raise ValueError('store not found : {!r}'.format(name))
@property
def caches(self):
"""Synonym for `stores`."""
return self.stores
@property
def stores(self):
"""List of Stores in this Cache.
Returns:
list: String names of Stores.
"""
sql = u"SELECT name FROM `sqlite_master` WHERE type='table'"
rows = self.conn.execute(sql)
return [r['name'] for r in rows
if r['name'] not in self.invalid_names]
def _add_table(self, name):
"""Add new table to database, verifying name first.
Name should contain only lowercase ASCII letters, digits and
underscore (_). May not start with a digit.
Args:
name (str): Name of the table.
Raises:
ValueError: Raised if `name` is not permitted.
"""
if name.lower() in self.invalid_names:
raise ValueError('name is reserved: %r' % name.lower())
if not name or \
not re.match(r'[a-z][a-z0-9_]+', name) \
or len(name) > 100:
raise ValueError(
'invalid name: %r. Name must be 1-100 characters, '
'a-z and _ only.' % name.lower()
)
sql = SQL_TABLE.format(name=name)
with self.conn as c:
c.executescript(sql)
log.debug(u'[cache] added table `%s`', name)
log.debug(u'self.caches=%r', self.caches)
| 26.688776 | 77 | 0.543395 |
4a1ef6b03fb9e9470450de023a60d4b0f816b7da | 5,441 | py | Python | train.py | Mindy123456/pytorch-CycleGAN-and-pix2pix | ef54699e2d4a37b95d4c009e80b880e4c908aa59 | [
"BSD-3-Clause"
] | null | null | null | train.py | Mindy123456/pytorch-CycleGAN-and-pix2pix | ef54699e2d4a37b95d4c009e80b880e4c908aa59 | [
"BSD-3-Clause"
] | null | null | null | train.py | Mindy123456/pytorch-CycleGAN-and-pix2pix | ef54699e2d4a37b95d4c009e80b880e4c908aa59 | [
"BSD-3-Clause"
] | null | null | null | """General-purpose training script for image-to-image translation.
This script works for various models (with option '--model': e.g., pix2pix, cyclegan, colorization) and
different datasets (with option '--dataset_mode': e.g., aligned, unaligned, single, colorization).
You need to specify the dataset ('--dataroot'), experiment name ('--name'), and model ('--model').
It first creates model, dataset, and visualizer given the option.
It then does standard network training. During the training, it also visualize/save the images, print/save the loss plot, and save models.
The script supports continue/resume training. Use '--continue_train' to resume your previous training.
Example:
Train a CycleGAN model:
python train.py --dataroot ./datasets/maps --name maps_cyclegan --model cycle_gan
Train a pix2pix model:
python train.py --dataroot ./datasets/facades --name facades_pix2pix --model pix2pix --direction BtoA
See options/base_options.py and options/train_options.py for more training options.
See training and test tips at: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/docs/tips.md
See frequently asked questions at: https://github.com/junyanz/pytorch-CycleGAN-and-pix2pix/blob/master/docs/qa.md
"""
import time
from options.train_options import TrainOptions
from data import create_dataset
from models import create_model
# from util.visualizer import Visualizer
if __name__ == '__main__':
opt = TrainOptions().parse() # get training options
dataset = create_dataset(opt) # create a dataset given opt.dataset_mode and other options
dataset_size = len(dataset) # get the number of images in the dataset.
print('The number of training images = %d' % dataset_size)
model = create_model(opt) # create a model given opt.model and other options
model.setup(opt) # regular setup: load and print networks; create schedulers
# visualizer = Visualizer(opt) # create a visualizer that display/save images and plots
total_iters = 0 # the total number of training iterations
start_time = time.time()
for epoch in range(opt.epoch_count, opt.n_epochs + opt.n_epochs_decay + 1): # outer loop for different epochs; we save the model by <epoch_count>, <epoch_count>+<save_latest_freq>
epoch_start_time = time.time() # timer for entire epoch
iter_data_time = time.time() # timer for data loading per iteration
epoch_iter = 0 # the number of training iterations in current epoch, reset to 0 every epoch
# visualizer.reset() # reset the visualizer: make sure it saves the results to HTML at least once every epoch
model.update_learning_rate() # update learning rates in the beginning of every epoch.
for i, data in enumerate(dataset): # inner loop within one epoch
iter_start_time = time.time() # timer for computation per iteration
# if total_iters % opt.print_freq == 0:
# t_data = iter_start_time - iter_data_time
# t_data = iter_start_time - iter_data_time # replace 2 lines above
# print('t_data: %f' % t_data)
total_iters += opt.batch_size
epoch_iter += opt.batch_size
model.set_input(data) # unpack data from dataset and apply preprocessing
model.optimize_parameters() # calculate loss functions, get gradients, update network weights
print('total_iters: %d' % total_iters)
# if total_iters % opt.display_freq == 0: # display images on visdom and save images to a HTML file
# save_result = total_iters % opt.update_html_freq == 0
# model.compute_visuals()
# visualizer.display_current_results(model.get_current_visuals(), epoch, save_result)
# if total_iters % opt.print_freq == 0: # print training losses and save logging information to the disk
# losses = model.get_current_losses()
# t_comp = (time.time() - iter_start_time) / opt.batch_size
# visualizer.print_current_losses(epoch, epoch_iter, losses, t_comp, t_data)
# if opt.display_id > 0:
# visualizer.plot_current_losses(epoch, float(epoch_iter) / dataset_size, losses)
if total_iters % opt.save_latest_freq == 0: # cache our latest model every <save_latest_freq> iterations
print('saving the latest model (epoch %d, total_iters %d)' % (epoch, total_iters))
save_suffix = 'iter_%d' % total_iters if opt.save_by_iter else 'latest'
model.save_networks(save_suffix)
iter_data_time = time.time()
if epoch % opt.save_epoch_freq == 0: # cache our model every <save_epoch_freq> epochs
print('saving the model at the end of epoch %d, iters %d' % (epoch, total_iters))
model.save_networks('latest')
model.save_networks(epoch)
print('End of epoch %d / %d \t Time Taken: %d sec' % (epoch, opt.n_epochs + opt.n_epochs_decay, time.time() - epoch_start_time))
end_time = time.time()
total_time = end_time - start_time
train_epoch = opt.n_epochs + opt.n_epochs_decay + 1 - opt.epoch_count
print('Train with %d epoch from epoch %d for %d sec, batch_size = %d ' % (train_epoch, opt.epoch_count, total_time, opt.batch_size)) | 66.353659 | 186 | 0.680022 |
4a1ef6dd51a9350821334e2084c1942e6f455987 | 1,582 | py | Python | catkin_ws/src/udemy_opencv/contours_detection.py | hariudkmr/Intro_to_Ros | 05b02117ad3a48c17aaeb6473fec36074090c7b0 | [
"MIT"
] | 1 | 2021-01-22T16:36:03.000Z | 2021-01-22T16:36:03.000Z | catkin_ws/src/udemy_opencv/contours_detection.py | hariudkmr/Intro_to_Ros | 05b02117ad3a48c17aaeb6473fec36074090c7b0 | [
"MIT"
] | null | null | null | catkin_ws/src/udemy_opencv/contours_detection.py | hariudkmr/Intro_to_Ros | 05b02117ad3a48c17aaeb6473fec36074090c7b0 | [
"MIT"
] | null | null | null | #!/usr/bin/env python
import numpy as np
import cv2
def read_image(image_name, show):
image = cv2.imread(image_name, cv2.IMREAD_COLOR)
if show:
cv2.imshow("RGB Image", image)
return image
def convert_rgb_to_gray(image, show):
gray_image = cv2.cvtColor(image, cv2.COLOR_BGR2GRAY)
if show:
cv2.imshow("Gray Image", gray_image)
return gray_image
def convert_gray_to_binary(image, adaptive, show):
if adaptive:
binary_image = cv2.adaptiveThreshold(
image, 255, cv2.ADAPTIVE_THRESH_GAUSSIAN_C, cv2.THRESH_BINARY_INV, 5, 2)
else:
binary_image = cv2.threshold(image, 127, 255, cv2.THRESH_BINARY_INV)
if show:
cv2.imshow("Binary Image", binary_image)
return binary_image
def get_contours(binary_image):
_, contours, _ = cv2.findContours(
binary_image, cv2.RETR_TREE, cv2.CHAIN_APPROX_SIMPLE)
return contours
def draw_contours(image, contours, image_name):
index = -1
thickness = 2
color = (255, 0, 255)
cv2.drawContours(image, contours, index, color, thickness)
cv2.imshow(image_name, image)
def main():
image_name = "images/105.jpg"
rgb_image = read_image(image_name, True)
gray_image = convert_rgb_to_gray(rgb_image, True)
binary_image = convert_gray_to_binary(gray_image, True, True)
contours = get_contours(binary_image)
draw_contours(rgb_image, contours, "RGB_Contours")
draw_contours(binary_image, contours, "Binary_Contours")
cv2.waitKey(0)
cv2.destroyAllWindows()
if __name__ == '__main__':
main()
| 25.934426 | 84 | 0.699747 |
4a1ef86b2dc6fb392ce8e4d4764481cb272277b2 | 3,499 | py | Python | sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | 1 | 2021-06-02T08:01:35.000Z | 2021-06-02T08:01:35.000Z | sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | 1 | 2020-03-06T05:57:16.000Z | 2020-03-06T05:57:16.000Z | sdk/datafactory/azure-mgmt-datafactory/azure/mgmt/datafactory/models/dynamics_crm_entity_dataset_py3.py | tzhanl/azure-sdk-for-python | 18cd03f4ab8fd76cc0498f03e80fbc99f217c96e | [
"MIT"
] | null | null | null | # coding=utf-8
# --------------------------------------------------------------------------
# Copyright (c) Microsoft Corporation. All rights reserved.
# Licensed under the MIT License. See License.txt in the project root for
# license information.
#
# Code generated by Microsoft (R) AutoRest Code Generator.
# Changes may cause incorrect behavior and will be lost if the code is
# regenerated.
# --------------------------------------------------------------------------
from .dataset_py3 import Dataset
class DynamicsCrmEntityDataset(Dataset):
"""The Dynamics CRM entity dataset.
All required parameters must be populated in order to send to Azure.
:param additional_properties: Unmatched properties from the message are
deserialized this collection
:type additional_properties: dict[str, object]
:param description: Dataset description.
:type description: str
:param structure: Columns that define the structure of the dataset. Type:
array (or Expression with resultType array), itemType: DatasetDataElement.
:type structure: object
:param schema: Columns that define the physical type schema of the
dataset. Type: array (or Expression with resultType array), itemType:
DatasetSchemaDataElement.
:type schema: object
:param linked_service_name: Required. Linked service reference.
:type linked_service_name:
~azure.mgmt.datafactory.models.LinkedServiceReference
:param parameters: Parameters for dataset.
:type parameters: dict[str,
~azure.mgmt.datafactory.models.ParameterSpecification]
:param annotations: List of tags that can be used for describing the
Dataset.
:type annotations: list[object]
:param folder: The folder that this Dataset is in. If not specified,
Dataset will appear at the root level.
:type folder: ~azure.mgmt.datafactory.models.DatasetFolder
:param type: Required. Constant filled by server.
:type type: str
:param entity_name: The logical name of the entity. Type: string (or
Expression with resultType string).
:type entity_name: object
"""
_validation = {
'linked_service_name': {'required': True},
'type': {'required': True},
}
_attribute_map = {
'additional_properties': {'key': '', 'type': '{object}'},
'description': {'key': 'description', 'type': 'str'},
'structure': {'key': 'structure', 'type': 'object'},
'schema': {'key': 'schema', 'type': 'object'},
'linked_service_name': {'key': 'linkedServiceName', 'type': 'LinkedServiceReference'},
'parameters': {'key': 'parameters', 'type': '{ParameterSpecification}'},
'annotations': {'key': 'annotations', 'type': '[object]'},
'folder': {'key': 'folder', 'type': 'DatasetFolder'},
'type': {'key': 'type', 'type': 'str'},
'entity_name': {'key': 'typeProperties.entityName', 'type': 'object'},
}
def __init__(self, *, linked_service_name, additional_properties=None, description: str=None, structure=None, schema=None, parameters=None, annotations=None, folder=None, entity_name=None, **kwargs) -> None:
super(DynamicsCrmEntityDataset, self).__init__(additional_properties=additional_properties, description=description, structure=structure, schema=schema, linked_service_name=linked_service_name, parameters=parameters, annotations=annotations, folder=folder, **kwargs)
self.entity_name = entity_name
self.type = 'DynamicsCrmEntity'
| 47.931507 | 274 | 0.672192 |
4a1ef8923893c60879227c868e758170458acae3 | 1,046 | py | Python | sonar/line.py | augustin-barillec/sonar | 5d565bbdb924109163ab0cba355350484f545143 | [
"MIT"
] | null | null | null | sonar/line.py | augustin-barillec/sonar | 5d565bbdb924109163ab0cba355350484f545143 | [
"MIT"
] | null | null | null | sonar/line.py | augustin-barillec/sonar | 5d565bbdb924109163ab0cba355350484f545143 | [
"MIT"
] | null | null | null | from .utils import line_shift
from .coordinate import xy_def, ij_to_xy, xy_to_ij, f_to_xy, xy_to_f
def xy_line(m1_xy, m2_xy):
if m1_xy == m2_xy:
raise ValueError('The two points must be distinct')
res = []
for x, y in xy_def:
dl = line_shift(m1_xy, m2_xy, (x, y))
dr = line_shift(m1_xy, m2_xy, (x+1, y))
ur = line_shift(m1_xy, m2_xy, (x+1, y+1))
ul = line_shift(m1_xy, m2_xy, (x, y+1))
if not ((dl > 0 and dr > 0 and ur > 0 and ul > 0) or (dl < 0 and dr < 0 and ur < 0 and ul < 0)):
res.append((x, y))
if m1_xy[0] != m2_xy[0]:
res = sorted(res, key=lambda m_xy: m_xy[0])
else:
res = sorted(res, key=lambda m_xy: m_xy[1])
return res
def ij_line(m1_ij, m2_ij):
m1_xy = ij_to_xy(m1_ij[0], m1_ij[1])
m2_xy = ij_to_xy(m2_ij[0], m2_ij[1])
return [xy_to_ij(x, y) for x, y in xy_line(m1_xy, m2_xy)]
def f_line(m1_f, m2_f):
m1_xy = f_to_xy(m1_f)
m2_xy = f_to_xy(m2_f)
return [xy_to_f(x, y) for x, y in xy_line(m1_xy, m2_xy)]
| 31.69697 | 104 | 0.587954 |
4a1ef8a8053db3626652dec4a194dda50a513d70 | 2,431 | py | Python | cfg.py | BobiAce/OCR_TEST | 05117db82287c92cf99df573d686f13438f0675f | [
"MIT"
] | null | null | null | cfg.py | BobiAce/OCR_TEST | 05117db82287c92cf99df573d686f13438f0675f | [
"MIT"
] | null | null | null | cfg.py | BobiAce/OCR_TEST | 05117db82287c92cf99df573d686f13438f0675f | [
"MIT"
] | null | null | null | import os
train_task_id = '3T736'
initial_epoch = 8
epoch_num = 50
lr = 1e-3
decay = 5e-4
clipvalue = 0.5 # default 0.5, 0 means no clip
patience = 8
load_weights = True
lambda_inside_score_loss = 4.0
lambda_side_vertex_code_loss = 1.0
lambda_side_vertex_coord_loss = 1.0
total_img = 10048
validation_split_ratio = 0.1
max_train_img_size = int(train_task_id[-3:])
max_predict_img_size = int(train_task_id[-3:]) # 2400
assert max_train_img_size in [256, 384, 512, 640, 736], \
'max_train_img_size must in [256, 384, 512, 640, 736]'
if max_train_img_size == 256:
batch_size = 8
elif max_train_img_size == 384:
batch_size = 4
elif max_train_img_size == 512:
batch_size = 4
else:
batch_size = 4
steps_per_epoch = total_img * (1 - validation_split_ratio) // batch_size
validation_steps = total_img * validation_split_ratio // batch_size
# data_dir = '/home/jinbo/PycharmPro/DF-competition/OCR_data/mtwi_2018/mtwi_2018_train/'
data_dir = 'dataset/'
origin_image_dir_name = 'train_mtwi_img/'
origin_txt_dir_name = 'train_mtwi_txt/'
train_image_dir_name = 'images_%s/' % train_task_id
train_label_dir_name = 'labels_%s/' % train_task_id
show_gt_image_dir_name = 'show_gt_images_%s/' % train_task_id
show_act_image_dir_name = 'show_act_images_%s/' % train_task_id
gen_origin_img = True
draw_gt_quad = False
draw_act_quad = False
val_fname = 'val_%s.txt' % train_task_id
train_fname = 'train_%s.txt' % train_task_id
# in paper it's 0.3, maybe to large to this problem
shrink_ratio = 0.2
# pixels between 0.2 and 0.6 are side pixels
shrink_side_ratio = 0.6
epsilon = 1e-4
num_channels = 3
feature_layers_range = range(5, 1, -1)
# feature_layers_range = range(3, 0, -1)
feature_layers_num = len(feature_layers_range)
# pixel_size = 4
pixel_size = 2 ** feature_layers_range[-1]
locked_layers = False
if not os.path.exists('model'):
os.mkdir('model')
if not os.path.exists('saved_model'):
os.mkdir('saved_model')
pretrained_model = 'saved_model/east_model_weights_3T512.h5'
model_weights_path = 'model/weights_%s.{epoch:03d}-{val_loss:.3f}.h5' \
% train_task_id
saved_model_file_path = 'saved_model/east_model_%s.h5' % train_task_id
saved_model_weights_file_path = 'saved_model/east_model_weights_%s.h5'\
% train_task_id
pixel_threshold = 0.9
side_vertex_pixel_threshold = 0.9
trunc_threshold = 0.1
predict_cut_text_line = False
predict_write2txt = True
| 31.571429 | 88 | 0.749074 |
4a1ef8b918e7652610bf30b83a5b63a0c23cee24 | 332 | py | Python | other/dingding/dingtalk/api/rest/OapiEduSchoolInitRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/rest/OapiEduSchoolInitRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | other/dingding/dingtalk/api/rest/OapiEduSchoolInitRequest.py | hth945/pytest | 83e2aada82a2c6a0fdd1721320e5bf8b8fd59abc | [
"Apache-2.0"
] | null | null | null | '''
Created by auto_sdk on 2020.10.26
'''
from dingtalk.api.base import RestApi
class OapiEduSchoolInitRequest(RestApi):
def __init__(self,url=None):
RestApi.__init__(self,url)
self.campus = None
self.operator = None
def getHttpMethod(self):
return 'POST'
def getapiname(self):
return 'dingtalk.oapi.edu.school.init'
| 20.75 | 40 | 0.743976 |
4a1ef8b93fa3f551f9eba7671c8188b7b2e0b868 | 5,511 | py | Python | datasetinsights/stats/visualization/keypoints_plot.py | Unity-Technologies/dataset-insights | 62849b1412e3e63a2f4a6f56bd27272cb4c1dfc1 | [
"Apache-2.0"
] | 3 | 2020-07-22T02:35:20.000Z | 2020-07-22T19:45:57.000Z | datasetinsights/stats/visualization/keypoints_plot.py | Unity-Technologies/dataset-insights | 62849b1412e3e63a2f4a6f56bd27272cb4c1dfc1 | [
"Apache-2.0"
] | 8 | 2020-07-24T22:23:51.000Z | 2020-08-03T23:02:13.000Z | datasetinsights/stats/visualization/keypoints_plot.py | Unity-Technologies/dataset-insights | 62849b1412e3e63a2f4a6f56bd27272cb4c1dfc1 | [
"Apache-2.0"
] | 2 | 2020-07-22T19:33:28.000Z | 2020-07-31T17:44:50.000Z | """ Helper keypoints library to plot keypoint joints and skeletons with a
simple Python API.
"""
def _get_color_from_color_node(color):
"""Gets the color from the color node in the template.
Args:
color (tuple): The color's channel values expressed in a range from 0..1
Returns: The color for the node.
"""
r = int(color["r"] * 255)
g = int(color["g"] * 255)
b = int(color["b"] * 255)
a = int(color["a"] * 255)
return r, g, b, a
def _get_color_for_bone(bone):
"""Gets the color for the bone from the template. A bone is a visual
connection between two keypoints in the keypoint list of the figure.
bone
{
joint1: <int> Index into the keypoint list for the first joint.
joint2: <int> Index into the keypoint list for the second joint.
color {
r: <float> Value (0..1) of the red channel.
g: <float> Value (0..1) of the green channel.
b: <float> Value (0..1) of the blue channel.
a: <float> Value (0..1) of the alpha channel.
}
}
Args:
bone: The active bone.
Returns: The color of the bone.
"""
if "color" in bone:
return _get_color_from_color_node(bone["color"])
else:
return 255, 0, 255, 255
def _get_color_for_keypoint(template, keypoint):
"""Gets the color for the keypoint from the template. A keypoint is a
location of interest inside of a figure. Keypoints are connected
together with bones. The configuration of keypoint locations and bone
connections are defined in a template file.
keypoint_template {
template_id: <str> The UUID of the template.
template_name: <str> Human readable name of the template.
key_points [ <List> List of joints defined in this template
{
label: <str> The label of the joint.
index: <int> The index of the joint.
color {
r: <float> Value (0..1) for the red channel.
g: <float> Value (0..1) for the green channel.
b: <float> Value (0..1) for the blue channel.
a: <float> Value (0..1) for the alpha channel.
}
}, ...
]
skeleton [ <List> List of skeletal connections
{
joint1: <int> The first joint of the connection.
joint2: <int> The second joint of the connection.
color {
r: <float> Value (0..1) for the red channel.
g: <float> Value (0..1) for the green channel.
b: <float> Value (0..1) for the blue channel.
a: <float> Value (0..1) for the alpha channel.
}
}, ...
]
}
Args:
template: The active template.
keypoint: The active keypoint.
Returns: The color for the keypoint.
"""
node = template["key_points"][keypoint["index"]]
if "color" in node:
return _get_color_from_color_node(node["color"])
else:
return 0, 0, 255, 255
def draw_keypoints_for_figure(image, figure, draw, templates, visual_width=6):
"""Draws keypoints for a figure on an image.
keypoints {
label_id: <int> Integer identifier of the label.
instance_id: <str> UUID of the instance.
template_guid: <str> UUID of the keypoint template.
pose: <str> String label for current pose.
keypoints [
{
index: <int> Index of keypoint in template.
x: <float> X subpixel coordinate of keypoint.
y: <float> Y subpixel coordinate of keypoint
state: <int> 0: keypoint does not exist,
1: keypoint exists but is not visible,
2: keypoint exists and is visible.
}, ...
]
}
Args:
image (PIL Image): a PIL image.
figure: The figure to draw.
draw (PIL ImageDraw): PIL image draw interface.
templates (list): a list of keypoint templates.
visual_width (int): the visual width of the joints.
Returns: a PIL image with keypoints for a figure drawn on it.
"""
# find the template for this
for template in templates:
if template["template_id"] == figure["template_guid"]:
break
else:
return image
# load the spec
skeleton = template["skeleton"]
for bone in skeleton:
j1 = figure["keypoints"][bone["joint1"]]
j2 = figure["keypoints"][bone["joint2"]]
if j1["state"] == 2 and j2["state"] == 2:
x1 = int(j1["x"])
y1 = int(j1["y"])
x2 = int(j2["x"])
y2 = int(j2["y"])
color = _get_color_for_bone(bone)
draw.line((x1, y1, x2, y2), fill=color, width=visual_width)
for k in figure["keypoints"]:
state = k["state"]
if state == 2:
x = k["x"]
y = k["y"]
color = _get_color_for_keypoint(template, k)
half_width = visual_width / 2
draw.ellipse(
(
x - half_width,
y - half_width,
x + half_width,
y + half_width,
),
fill=color,
outline=color,
)
return image
| 31.491429 | 80 | 0.535112 |
4a1ef8fcc6cfb8ff18a119bf774b684151e2c83f | 25,311 | py | Python | custom/icds_reports/sqldata/exports/children.py | tobiasmcnulty/commcare-hq | 234aa1fba98a96de1b625bbd70b2066fc877eed1 | [
"BSD-3-Clause"
] | 1 | 2020-07-14T13:00:23.000Z | 2020-07-14T13:00:23.000Z | custom/icds_reports/sqldata/exports/children.py | tobiasmcnulty/commcare-hq | 234aa1fba98a96de1b625bbd70b2066fc877eed1 | [
"BSD-3-Clause"
] | null | null | null | custom/icds_reports/sqldata/exports/children.py | tobiasmcnulty/commcare-hq | 234aa1fba98a96de1b625bbd70b2066fc877eed1 | [
"BSD-3-Clause"
] | null | null | null | from sqlagg.base import AliasColumn
from sqlagg.columns import SumWhen, SumColumn, SimpleColumn
from corehq.apps.reports.sqlreport import DatabaseColumn, AggregateColumn
from custom.icds_reports.sqldata.base import IcdsSqlData
from custom.icds_reports.utils.mixins import ExportableMixin
from custom.icds_reports.utils import wasting_severe_column, wasting_moderate_column, \
wasting_normal_column, stunting_severe_column, stunting_moderate_column, stunting_normal_column, percent, \
hfa_recorded_in_month_column, wfh_recorded_in_month_column, get_age_condition, phone_number_function
class ChildrenExport(ExportableMixin, IcdsSqlData):
title = 'Children'
table_name = 'agg_child_health_monthly'
def __init__(self, config=None, loc_level=1, show_test=False, beta=False):
super(ChildrenExport, self).__init__(config, loc_level, show_test, beta)
self.config.update({
'age_0': '0',
'age_6': '6',
'age_12': '12',
'age_24': '24',
'age_36': '36',
'age_48': '48',
'age_60': '60',
'age_72': '72',
})
@property
def get_columns_by_loc_level(self):
columns = [
DatabaseColumn('State', SimpleColumn('state_name'))
]
if self.loc_level > 1:
columns.append(DatabaseColumn('District', SimpleColumn('district_name'), slug='district_name'))
if self.loc_level > 2:
columns.append(DatabaseColumn('Block', SimpleColumn('block_name'), slug='block_name'))
if self.loc_level > 3:
columns.append(DatabaseColumn('Supervisor', SimpleColumn('supervisor_name'), slug='supervisor_name'))
if self.loc_level > 4:
columns.append(DatabaseColumn('AWC', SimpleColumn('awc_name'), slug='awc_name'))
columns.append(DatabaseColumn(
'AWW Phone Number',
SimpleColumn('contact_phone_number'),
format_fn=phone_number_function,
slug='contact_phone_number')
)
return columns
@property
def columns(self):
columns = self.get_columns_by_loc_level
agg_columns = [
AggregateColumn(
'Weighing efficiency (in month)',
percent,
[
SumWhen(
whens=[["age_tranche != :age_72", 'nutrition_status_weighed']], else_=0,
alias='nutrition_status_weighed'
),
SumWhen(
whens=[["age_tranche != :age_72", 'wer_eligible']], else_=0,
alias='wer_eligible'
)
],
slug='percent_weight_efficiency'
),
AggregateColumn(
'Height measurement efficiency (in month)',
percent,
[
SumWhen(
whens=[["age_tranche != :age_72", 'height_measured_in_month']], else_=0,
alias='height_measured_in_month_efficiency'
),
SumWhen(
whens=[["age_tranche != :age_72", 'height_eligible']], else_=0,
alias='height_eligible',
)
],
slug='height_measurement'
),
DatabaseColumn(
'Total number of unweighed children (0-5 Years)',
SumWhen(
whens=[["age_tranche != :age_72", 'nutrition_status_unweighed']], else_=0,
alias='nutrition_status_unweighed'
),
slug='total_number_unweighed'
),
AggregateColumn(
'Percentage of severely underweight children',
percent,
[
SumWhen(
whens=[["age_tranche != :age_72", 'nutrition_status_severely_underweight']], else_=0,
alias='nutrition_status_severely_underweight'
),
AliasColumn('nutrition_status_weighed'),
],
slug='percent_severe_underweight'
),
AggregateColumn(
'Percentage of moderately underweight children',
percent,
[
SumWhen(
whens=[["age_tranche != :age_72", 'nutrition_status_moderately_underweight']], else_=0,
alias='nutrition_status_moderately_underweight'
),
AliasColumn('nutrition_status_weighed'),
],
slug='percent_moderate_underweight'
),
AggregateColumn(
'Percentage of normal weight-for-age children',
percent,
[
SumWhen(
whens=[["age_tranche != :age_72", 'nutrition_status_normal']], else_=0,
alias='nutrition_status_normal'
),
AliasColumn('nutrition_status_weighed'),
],
slug='percent_normal_weight'
),
AggregateColumn(
'Percentage of children with severe wasting',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), wasting_severe_column(self.beta)]],
alias='wasting_severe'
),
SumWhen(
whens=[[get_age_condition(self.beta), wfh_recorded_in_month_column(self.beta)]],
alias='weighed_and_height_measured_in_month'
),
],
slug='percent_severe_wasting'
),
AggregateColumn(
'Percentage of children with moderate wasting',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), wasting_moderate_column(self.beta)]],
alias='wasting_moderate'
),
AliasColumn('weighed_and_height_measured_in_month')
],
slug='percent_moderate_wasting'
),
AggregateColumn(
'Percentage of children with normal weight-for-height',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), wasting_normal_column(self.beta)]],
alias='wasting_normal'
),
AliasColumn('weighed_and_height_measured_in_month')
],
slug='percent_normal_wasting'
),
AggregateColumn(
'Percentage of children with severe stunting',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), stunting_severe_column(self.beta)]],
alias='stunting_severe'
),
SumWhen(
whens=[[get_age_condition(self.beta), hfa_recorded_in_month_column(self.beta)]],
alias='height_measured_in_month'
),
],
slug='percent_severe_stunting'
),
AggregateColumn(
'Percentage of children with moderate stunting',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), stunting_moderate_column(self.beta)]],
alias='stunting_moderate'
),
AliasColumn('height_measured_in_month')
],
slug='percent_moderate_stunting'
),
AggregateColumn(
'Percentage of children with normal height-for-age',
percent,
[
SumWhen(
whens=[[get_age_condition(self.beta), stunting_normal_column(self.beta)]],
alias='stunting_normal'
),
AliasColumn('height_measured_in_month')
],
slug='percent_normal_stunting'
),
AggregateColumn(
'Percent of newborns with low birth weight',
percent,
[
SumColumn('low_birth_weight_in_month',
alias='low_birth_weight_in_month'),
SumColumn('weighed_and_born_in_month',
alias='weighed_and_born_in_month')
],
slug='newborn_low_birth_weight'
),
AggregateColumn(
'Percentage of children breastfed at birth',
percent,
[
SumColumn('bf_at_birth',
alias='bf_at_birth'),
SumColumn('born_in_month',
alias='born_in_month')
],
slug='percent_breastfed_at_birth'
),
AggregateColumn(
'Percentage of children exclusively breastfeeding',
percent,
[
SumColumn('ebf_in_month',
alias='ebf_in_month'),
SumColumn('ebf_eligible',
alias='ebf_eligible')
],
slug='percent_ebf'
),
AggregateColumn(
'Percentage of children initiated complementary feeding (in the past 30 days)',
percent,
[
SumColumn('cf_initiation_in_month',
alias='cf_initiation_in_month'),
SumColumn('cf_initiation_eligible',
alias='cf_initiation_eligible')
],
slug='percent_initiated_on_cf'
),
AggregateColumn(
'Percentage of children initiated appropriate complementary feeding',
percent,
[
SumColumn('cf_in_month',
alias='cf_in_month'),
SumColumn('cf_eligible',
alias='cf_eligible')
],
slug='percent_appropriate_cf'
),
AggregateColumn(
'Percentage of children receiving complementary feeding with adequate diet diversity',
percent,
[
SumColumn('cf_diet_diversity',
alias='cf_diet_diversity'),
AliasColumn('cf_eligible')
],
slug='percent_cf_diet_diversity'
),
AggregateColumn(
'Percentage of children receiving complementary feeding with adequate diet quantity',
percent,
[
SumColumn('cf_diet_quantity',
alias='cf_diet_quantity'),
AliasColumn('cf_eligible')
],
slug='percent_cf_diet_quanity'
),
AggregateColumn(
"Percentage of children receiving complementary feeding "
"with appropriate handwashing before feeding",
percent,
[
SumColumn('cf_handwashing',
alias='cf_handwashing'),
AliasColumn('cf_eligible')
],
slug='percent_cf_handwashing_before_feeding'
),
]
agg_columns.insert(0, DatabaseColumn('Total no. of children weighed',
AliasColumn('nutrition_status_weighed'),
slug='nutrition_status_weighed'))
agg_columns.insert(1, DatabaseColumn('Total no. of children eligible to be weighed',
AliasColumn('wer_eligible'),
slug='wer_eligible'))
agg_columns.insert(3, DatabaseColumn('Total no. of children whose height was measured',
AliasColumn('height_measured_in_month_efficiency'),
slug='height_measured_in_month_efficiency'))
agg_columns.insert(4, DatabaseColumn('Total no. of children eligible for measuring height',
AliasColumn('height_eligible'),
slug='height_eligible'))
agg_columns.insert(7, DatabaseColumn('No. of severely underweight children',
AliasColumn('nutrition_status_severely_underweight'),
slug='nutrition_status_severely_underweight'))
agg_columns.insert(8, DatabaseColumn('Total No. of children weighed',
AliasColumn('nutrition_status_weighed'),
slug='nutrition_status_weighed'))
agg_columns.insert(10, DatabaseColumn('No. of moderately underweight children ',
AliasColumn('nutrition_status_moderately_underweight'),
slug='nutrition_status_moderately_underweight'))
agg_columns.insert(11, DatabaseColumn('Total No. of children weighed',
AliasColumn('nutrition_status_weighed'),
slug='nutrition_status_weighed'))
agg_columns.insert(13, DatabaseColumn('No. of children with normal weight for age',
AliasColumn('nutrition_status_normal'),
slug='nutrition_status_normal'))
agg_columns.insert(14, DatabaseColumn('Total No. of children weighed',
AliasColumn('nutrition_status_weighed'),
slug='nutrition_status_weighed'))
agg_columns.insert(16, DatabaseColumn('No. of Children with severe wasting',
AliasColumn('wasting_severe'),
slug='wasting_severe'))
agg_columns.insert(17, DatabaseColumn('Total number of children whose height and weight is measured',
AliasColumn('weighed_and_height_measured_in_month'),
slug='weighed_and_height_measured_in_month'))
agg_columns.insert(19, DatabaseColumn('No. of moderately wasted children',
AliasColumn('wasting_moderate'),
slug='wasting_moderate'))
agg_columns.insert(20, DatabaseColumn('Total number of children whose height and weight is measured',
AliasColumn('weighed_and_height_measured_in_month'),
slug='weighed_and_height_measured_in_month'))
agg_columns.insert(22, DatabaseColumn('No. of children with normal weight-for-height',
AliasColumn('wasting_normal'),
slug='wasting_normal'))
agg_columns.insert(23, DatabaseColumn('Total no. of children whose height and weight is measured',
AliasColumn('weighed_and_height_measured_in_month'),
slug='weighed_and_height_measured_in_month'))
agg_columns.insert(25, DatabaseColumn('No. of severely stunted children',
AliasColumn('stunting_severe'),
slug='stunting_severe'))
agg_columns.insert(26, DatabaseColumn('Total no. of children whose height has been measured',
AliasColumn('height_measured_in_month'),
slug='height_measured_in_month'))
agg_columns.insert(28, DatabaseColumn('No. of moderately stunted children',
AliasColumn('stunting_moderate'),
slug='stunting_moderate'))
agg_columns.insert(29, DatabaseColumn('Total no. of children whose height has been measured',
AliasColumn('height_measured_in_month'),
slug='height_measured_in_month'))
agg_columns.insert(31, DatabaseColumn('No. of children with normal height for age',
AliasColumn('stunting_normal'),
slug='stunting_normal'))
agg_columns.insert(32, DatabaseColumn('Total no. of children whose height has been measured',
AliasColumn('height_measured_in_month'),
slug='height_measured_in_month'))
agg_columns.insert(34, DatabaseColumn('No. of newborns with low birth weight',
AliasColumn('low_birth_weight_in_month'),
slug='low_birth_weight_in_month'))
agg_columns.insert(35, DatabaseColumn('Total no. of children born and weighed in the current month',
AliasColumn('weighed_and_born_in_month'),
slug='weighed_and_born_in_month'))
agg_columns.insert(37, AggregateColumn('No. of children between 1-2 years old who completed 1'
' year immunization',
lambda x, y: ((x or 0) + (y or 0)),
[
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_on_time']],
alias='fully_immunized_on_time_num'
),
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_late']],
alias='fully_immunized_late_num'
)
], slug='num_immun_children'))
agg_columns.insert(38, DatabaseColumn('Total no. of children from age >12 months and <= 24',
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_eligible']],
alias='fully_immunized_eligible_num'
),
slug='fully_immunized_eligible'))
agg_columns.insert(39, AggregateColumn('Percentage of children between 1-2 years who completed'
' 1 year immunizations',
lambda x, y, z:
'%.2f%%' % (((x or 0) + (y or 0)) * 100 / float(z or 1)),
[
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_on_time']],
alias='fully_immunized_on_time'
),
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_late']],
alias='fully_immunized_late'
),
SumWhen(
whens=[["age_tranche <= :age_24",
'fully_immunized_eligible']],
alias='fully_immunized_eligible'
)
],
slug='percent_completed_1year_immunizations'
))
agg_columns.insert(40, DatabaseColumn('No. of children breastfed at birth',
AliasColumn('bf_at_birth'),
slug='bf_at_birth'))
agg_columns.insert(41, DatabaseColumn('Total no. of children enrolled in ICDS-CAS system and born in last month',
AliasColumn('born_in_month'),
slug='born_in_month'))
agg_columns.insert(43, DatabaseColumn('No. of children exclusively breastfed',
AliasColumn('ebf_in_month'),
slug='ebf_in_month'))
agg_columns.insert(44, DatabaseColumn('Total number of children (0-6 months) of age enrolled in ICDS-CAS system',
AliasColumn('ebf_eligible'),
slug='ebf_eligible'))
agg_columns.insert(46, DatabaseColumn('No. of children initiated complementary feeding (in the past 30 days)',
AliasColumn('cf_initiation_in_month'),
slug='cf_initiation_in_month'))
agg_columns.insert(47, DatabaseColumn('Total no. of children (6-8 ) months of age enrolled with ICDS-CAS',
AliasColumn('cf_initiation_eligible'),
slug='cf_initiation_eligible'))
agg_columns.insert(49, DatabaseColumn('No. of children initiated appropriate complementary feeding',
AliasColumn('cf_in_month'),
slug='cf_in_month'))
agg_columns.insert(50, DatabaseColumn('No.of children (6-24) months of age enrolled with ICDS-CAS',
AliasColumn('cf_eligible'),
slug='cf_eligible'))
agg_columns.insert(52, DatabaseColumn('No.of children receiving complementary feeding with adequate diet diversity',
AliasColumn('cf_diet_diversity'),
slug='cf_diet_diversity'))
agg_columns.insert(53, DatabaseColumn('Total number of children (6 months - 2 yrs) of age enrolled with ICDS-CAS',
AliasColumn('cf_eligible'),
slug='cf_eligible'))
agg_columns.insert(55, DatabaseColumn('No. of children initiated complementary feeding with adequate diet quantity',
AliasColumn('cf_diet_quantity'),
slug='cf_diet_quantity'))
agg_columns.insert(56, DatabaseColumn('No.of children (6-24) months of age enrolled with ICDS-CAS',
AliasColumn('cf_eligible'),
slug='cf_eligible'))
agg_columns.insert(58, DatabaseColumn('Total Number of children receiving complementary feeding '
'with appropriate handwashing before feeding',
AliasColumn('cf_handwashing'),
slug='cf_handwashing'))
agg_columns.insert(59, DatabaseColumn('No.of children (6-24) months of age enrolled with ICDS-CAS',
AliasColumn('cf_eligible'),
slug='cf_eligible'))
return columns + agg_columns
| 53.17437 | 124 | 0.459721 |
Subsets and Splits
No community queries yet
The top public SQL queries from the community will appear here once available.